Commit 21135416 authored by Alexander Lercher's avatar Alexander Lercher

Merge branch 'feature/community-detection-pipeline' into feature/multilayer-integration

parents f3b50d55 53aba5c1
......@@ -5,3 +5,11 @@
**/env
**/venv
**/venv3
src/modules/certificate/articonf1.key
src/modules/certificate/articonf1.crt
src/modules/certificate/articonf1-chain.crt
src/modules/security/regular_user_credentials.json
......@@ -22,3 +22,9 @@ The scripts *build.py* and *deploy.py* are used to create Docker images and depl
1. The semantic linking microservice receives the notification and GETs all traces (including the new one) from the trace retrieval microservice
1. All traces can now be processed
![Input handling image](documentation/images/input-handling.png)
## API Authentication
![Authentication diagram](documentation/images/authentication.png)
## Authentication and Business Interface
[See here](documentation/external_access.md)
\ No newline at end of file
......@@ -3,6 +3,14 @@ import sys
import importlib.util
import pathlib
'''
This script searches for all 'tests/' directories and executes all tests
by cd'ing into the dir and executing unittest discover.
It additionally installs all dependencies from a '../requirements.txt' via pip.
Use command line argument '-w' to run on windows.
'''
PY = 'py' if (len(sys.argv) > 1 and sys.argv[1] == '-w') else 'python3.7' # use -w to run on windows
ROOT = pathlib.Path(__file__).parent.parent.absolute()
TESTS_FOLDER_NAME = os.path.normpath("/tests")
......@@ -14,14 +22,15 @@ for (dirname, dirs, files) in os.walk(ROOT):
#I assume all the tests are placed in a folder named "tests"
if (TESTS_FOLDER_NAME in str(dirname)) \
and 'src' in str(dirname) \
and not(f"{TESTS_FOLDER_NAME}{os.path.normpath('/')}" in str(dirname)) \
and not("venv" in str(dirname)):
try:
print(f"Executing tests in {dirname}")
os.chdir(os.path.normpath(dirname))
# TODO do this during docker image setup
exit_val = os.system("python3.7 -m pip install -r ../requirements.txt") # install pip dependencies
exit_val = os.system("python3.7 -m unittest discover") # execute the tests
exit_val = os.system(f"{PY} -m pip install -r ../requirements.txt") # install pip dependencies
exit_val = os.system(f"{PY} -m unittest discover") # execute the tests
resultCodeList.append(exit_val) #once per folder i.e if 3 tests are in a folder and crash, there will be just one exit val
except Exception as e:
print(e)
......
# SMART RESTful API Gateway
https://articonf1.itec.aau.at:30401/api/ui/
The API Gateway serves as an interface to external applications. Here, users are authenticated and blockchain transactions are added.
## Authentication
```POST https://articonf1.itec.aau.at:30401/api/tokens``` with body:
```
{
"password": "password",
"username": "regular@articonf1.itec.aau.at"
}
```
returns the JWT token for authentication, e.g.
```
{
"token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9..."
}
```
This token is used for authentication as _regular user_ on all microservices currently supporting authentication. Therefore the authentication header "```Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...```" has to be added to the request.
## Blockchain transactions
```POST https://articonf1.itec.aau.at:30401/api/trace``` with body:
```
{
"ApplicationType": "use-case identifier as string",
"key": "value",
...
}
```
adds a blockchain transaction entry for ApplicationType with all the keys and values. These will be converted and stored in our own format for creating multilayers and communities.
# Business Logic Microservice
https://articonf1.itec.aau.at:30420/api/ui
This microservice contains use-case specific informations, like schemas and contexts.
## Schema information
```GET https://articonf1.itec.aau.at:30420/api/use-cases/{use-case}/schema``` returns all schema mappings for the use-case identifier. The mapping is used to flatten nested input from the blockchain.
## Context information
```GET https://articonf1.itec.aau.at:30420/api/use-cases/{use-case}/layers``` returns all layers from the schema used for clustering interally.
# Role Stage Discovery Microservice
https://articonf1.itec.aau.at:30103/api/ui
This microservice contains the communities based on clusters and similarities between communities.
The endpoints are currently refactored, I will describe them once we are finished and processed data is available.
\ No newline at end of file
......@@ -4,6 +4,10 @@ info:
description: This is the documentation for the role stage discovery microservice.
version: "1.0.0"
# Import security definitions from global security definition
securityDefinitions:
$ref: '../security/security.yml#securityDefinitions'
consumes:
- "application/json"
produces:
......@@ -26,13 +30,15 @@ paths:
schema:
type: object
responses:
200:
'200':
description: "Successful echo of request data"
#region Layers
/layers:
post:
operationId: "routes.layers.post"
security:
- JwtRegular: []
tags:
- "Layers"
summary: "Add a new layer [TODO: or overwrite an existing one]"
......@@ -44,18 +50,20 @@ paths:
schema:
$ref: "#/definitions/Layer-UpperCase"
responses:
201:
'201':
description: "Successful operation"
400:
'400':
description: "Invalid input"
get:
operationId: "routes.layers.get"
security:
- JwtRegular: []
tags:
- "Layers"
summary: "Get all layer data"
parameters: []
responses:
200:
'200':
description: "Successful operation"
schema:
$ref: "#/definitions/LayerCollection"
......@@ -63,6 +71,8 @@ paths:
/layers/{name}:
get:
operationId: "routes.layers.get_by_name"
security:
- JwtRegular: []
tags:
- "Layers"
summary: "Get single layer data"
......@@ -73,16 +83,18 @@ paths:
required: true
type: "string"
responses:
200:
'200':
description: "Successful operation"
schema:
$ref: "#/definitions/Layer"
404:
'404':
description: "Layer not found"
/layers/{name}/nodes:
get:
operationId: "routes.layers.get_nodes"
security:
- JwtRegular: []
tags:
- "Layers"
summary: "Get all individual nodes for the layer"
......@@ -93,14 +105,16 @@ paths:
required: true
type: "string"
responses:
200:
'200':
description: "Successful operation"
schema:
$ref: "#/definitions/NodeCollection"
404:
'404':
description: "Layer not found"
post:
operationId: "routes.layers.post_nodes"
security:
- JwtRegular: []
tags:
- "Layers"
summary: "Adds a single or multiple nodes to the layer"
......@@ -117,14 +131,16 @@ paths:
schema:
$ref: "#/definitions/NodeCollection"
responses:
201:
'201':
description: "Successful operation"
400:
'400':
description: "Invalid input"
/layers/{name}/clusters:
get:
operationId: "routes.clustersets.get_by_name"
security:
- JwtRegular: []
tags:
- "Layers"
summary: "Get all clusters for the layer"
......@@ -135,16 +151,18 @@ paths:
required: true
type: "string"
responses:
200:
'200':
description: "Successful operation"
schema:
$ref: "#/definitions/ClusterCollection"
404:
'404':
description: "Layer not found"
/layers/{name}/timeslices:
get:
operationId: "routes.timeslices.get_by_name"
security:
- JwtRegular: []
tags:
- "Layers"
summary: "Get all timeslices for the layer"
......@@ -155,11 +173,11 @@ paths:
required: true
type: "string"
responses:
200:
'200':
description: "Successful operation"
schema:
$ref: "#/definitions/TimeSliceCollection"
404:
'404':
description: "Layer not found"
#endregion
......@@ -168,12 +186,14 @@ paths:
/rfc/run:
post:
operationId: "routes.functions.run_agi_clustering_and_graph_creation"
security:
- JwtRegular: []
tags:
- "Remote function calls"
summary: "Insert locations from AGI, create clusters for starting time and location layers, create graphs for the location clusters"
parameters: []
responses:
204:
'204':
description: "Successful operation"
#endregion
......@@ -182,12 +202,14 @@ paths:
/connectedClusters:
get:
operationId: "routes.connClusters.get_conn_clusters"
security:
- JwtRegular: []
tags:
- "Connected"
summary: "Get connected Clusters data"
description: "Returns a dictionary of cluster. The clusters contain the associated connected clusters and connected nodes data."
responses:
200:
'200':
description: "Successful operation"
schema:
$ref: "#/definitions/ConnectedDict"
......@@ -195,6 +217,8 @@ paths:
/clusterSimilarity:
get:
operationId: "routes.similarity.get_similarity"
security:
- JwtRegular: []
tags:
- "Similarity"
summary: "Get data of the similarity between clusters."
......@@ -212,7 +236,7 @@ paths:
description: "Data is returned in batches of size 1000. Returns a dictionary where the key is a tuple of cluster_labels (i.e. [0,319]) and the value is the computed similarity between 2 clusters in the tuple, in regard to each layer in the input. \n Note: the tuple clusters have the same layer and the computed similarity is in regard to clusters from OTHER layers."
responses:
200:
'200':
description: "Successful operation"
schema:
$ref: "#/definitions/ClusterSimilarityArray"
......@@ -220,12 +244,14 @@ paths:
/clusterRunArray:
get:
operationId: "routes.connRun.get_connected_run"
security:
- JwtRegular: []
tags:
- "RunId"
summary: "Get RunId"
description: "Returns the RunId and the associated datetime when a connection of clusters/simillarity of clusters was computed."
responses:
200:
'200':
description: "Successful operation"
schema:
$ref: "#/definitions/ClusterRunArray"
......
......@@ -13,15 +13,26 @@ LOGGER = logging.getLogger(__name__)
#############################
import connexion
from security import swagger_util
from pathlib import Path
# load swagger config
app = connexion.App(__name__, specification_dir='configs/')
app.add_api('swagger.yml')
app.add_api(swagger_util.get_bundled_specs(Path("configs/swagger.yml")),
resolver = connexion.RestyResolver("cms_rest_api"))
@app.route('/', methods=['GET'])
def api_root():
return 'Endpoint of role-stage-discovery-microservice!'
# SSL configuration
try:
# should be ../../../modules/certificate local
certificate_path = os.environ['ARTICONF_CERTIFICATE_PATH']
except KeyError:
certificate_path = '/srv/articonf/'
context = (os.path.normpath(f'{certificate_path}/articonf1.crt'), os.path.normpath(f'{certificate_path}/articonf1.key')) # certificate and key files
# start app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=True)
app.run(host='0.0.0.0', port=5000, ssl_context=context)
......@@ -27,6 +27,7 @@ mccabe==0.6.1
networkx==2.4
numpy==1.18.1
openapi-spec-validator==0.2.8
prance==0.19.0
pycodestyle==2.5.0
pylint==2.4.4
pymongo==3.10.1
......@@ -38,6 +39,7 @@ requests==2.22.0
rope==0.16.0
scikit-learn==0.22.1
scipy==1.4.1
semver==2.10.2
six==1.14.0
swagger-ui-bundle==0.0.6
typed-ast==1.4.1
......
import unittest
import sys
for path in ['../', './', '../../../modules/']:
for path in ['../', './', '../../../modules/', '../../../../modules']:
sys.path.insert(1, path)
from db.entities.connected_node import NodeC
......
......@@ -32,6 +32,14 @@ spec:
image: alexx882/role-stage-discovery-microservice
ports:
- containerPort: 5000
volumeMounts:
- mountPath: /srv/articonf
name: articonf
volumes:
- name: articonf
hostPath:
path: /srv/articonf
type: Directory
---
apiVersion: v1
kind: Service
......
''' This script provides information about the environment SMART runs in. '''
import os
def is_running_locally():
'''Set env var ARTICONF_LOCAL=1 to run locally.'''
server = True
try:
if str(os.environ['ARTICONF_LOCAL']) == '1':
server = False
except KeyError:
pass
return not server
\ No newline at end of file
## Rabbit MQ
# RABBIT_MQ_HOSTNAME = 'rabbit-mq'
#RABBIT_MQ_PORT = 5672
RABBIT_MQ_HOSTNAME = 'articonf1.itec.aau.at'
RABBIT_MQ_PORT = 30302
'''Contains all networking constants for microservices to communicate to each other.'''
from env_info import is_running_locally
# run locally if env var ARTICONF_LOCAL=1
server = not is_running_locally()
#region Rabbit MQ
if server:
RABBIT_MQ_HOSTNAME = 'rabbit-mq'
RABBIT_MQ_PORT = 5672
else:
RABBIT_MQ_HOSTNAME = 'articonf1.itec.aau.at'
RABBIT_MQ_PORT = 30302
#endregion Rabbit MQ
#region Transaction Hub In
## Trace Retrieval
TRACE_RETRIEVAL_HOSTNAME = 'trace-retrieval'
TRACE_RETRIEVAL_REST_PORT = 80
TRACE_RETRIEVAL_DB_HOSTNAME = f'{TRACE_RETRIEVAL_HOSTNAME}-db'
TRACE_RETRIEVAL_DB_PORT = 27017
if server:
TRACE_RETRIEVAL_HOSTNAME = 'trace-retrieval'
TRACE_RETRIEVAL_REST_PORT = 80
TRACE_RETRIEVAL_DB_HOSTNAME = f'{TRACE_RETRIEVAL_HOSTNAME}-db'
TRACE_RETRIEVAL_DB_PORT = 27017
else:
TRACE_RETRIEVAL_HOSTNAME = 'articonf1.itec.aau.at'
TRACE_RETRIEVAL_REST_PORT = 30001
TRACE_RETRIEVAL_DB_HOSTNAME = 'articonf1.itec.aau.at'
TRACE_RETRIEVAL_DB_PORT = 30003
#endregion Transaction Hub In
#region Data Hub
## Semantic Linking
SEMANTIC_LINKING_HOSTNAME = 'semantic-linking'
SEMANTIC_LINKING_REST_PORT = 80
SEMANTIC_LINKING_DB_HOSTNAME = f'articonf1.itec.aau.at'
SEMANTIC_LINKING_DB_PORT = 30102
if server:
SEMANTIC_LINKING_HOSTNAME = 'semantic-linking'
SEMANTIC_LINKING_REST_PORT = 80
SEMANTIC_LINKING_DB_HOSTNAME = f'{SEMANTIC_LINKING_HOSTNAME}-db'
SEMANTIC_LINKING_DB_PORT = 27017
else:
SEMANTIC_LINKING_HOSTNAME = 'articonf1.itec.aau.at'
SEMANTIC_LINKING_REST_PORT = 30101
SEMANTIC_LINKING_DB_HOSTNAME = 'articonf1.itec.aau.at'
SEMANTIC_LINKING_DB_PORT = 30102
## Role Stage Discovery
ROLESTAGE_DISCOVERY_HOSTNAME = 'role-stage-discovery'
ROLESTAGE_DISCOVERY_REST_PORT = 30103
ROLESTAGE_DISCOVERY_DB_HOSTNAME = f'articonf1.itec.aau.at'
ROLESTAGE_DISCOVERY_DB_PORT = 30104
\ No newline at end of file
if server:
ROLESTAGE_DISCOVERY_HOSTNAME = 'role-stage-discovery'
ROLESTAGE_DISCOVERY_REST_PORT = 80
ROLESTAGE_DISCOVERY_DB_HOSTNAME = f'{ROLESTAGE_DISCOVERY_HOSTNAME}-db'
ROLESTAGE_DISCOVERY_DB_PORT = 27017
else:
ROLESTAGE_DISCOVERY_HOSTNAME = 'articonf1.itec.aau.at'
ROLESTAGE_DISCOVERY_REST_PORT = 30103
ROLESTAGE_DISCOVERY_DB_HOSTNAME = 'articonf1.itec.aau.at'
ROLESTAGE_DISCOVERY_DB_PORT = 30104
#endregion Data Hub
#region Rest Gateway
if server:
REST_GATEWAY_HOSTNAME = 'rest-gateway'
REST_GATEWAY_REST_PORT = 80
REST_GATEWAY_DB_HOSTNAME = f'{REST_GATEWAY_HOSTNAME}-db'
REST_GATEWAY_DB_PORT = 27017
else:
REST_GATEWAY_HOSTNAME = 'articonf1.itec.aau.at'
REST_GATEWAY_REST_PORT = 30401
REST_GATEWAY_DB_HOSTNAME = 'articonf1.itec.aau.at'
REST_GATEWAY_DB_PORT = 30402
#endregion Rest Gateway
#region Participation Hub
## Business Logic
if server:
BUSINESS_LOGIC_HOSTNAME = 'business-logic'
BUSINESS_LOGIC_DB_HOSTNAME = f'{BUSINESS_LOGIC_HOSTNAME}-db'
BUSINESS_LOGIC_REST_PORT = 80
BUSINESS_LOGIC_DB_PORT = 27017
else:
BUSINESS_LOGIC_HOSTNAME = 'articonf1.itec.aau.at'
BUSINESS_LOGIC_DB_HOSTNAME = 'articonf1.itec.aau.at'
BUSINESS_LOGIC_REST_PORT = 30420
BUSINESS_LOGIC_DB_PORT = 30421
#endregion Participation Hub
securityDefinitions:
JwtRegular:
type: apiKey
name: Authorization
in: header
x-apikeyInfoFunc: "security.security_util.verifyTokenRegular"
JwtAdmin:
type: apiKey
name: Authorization
in: header
x-apikeyInfoFunc: "security.security_util.verifyTokenAdmin"
# global import, red is normal don't worry
import network_constants
import requests
import json
from typing import Dict, List
import logging
class TokenStash:
'''
used to keep track of already verified tokens in order to mitigate the traffic
to the user-microservice
'''
trusted_tokens = {}
roles = {}
@staticmethod
def add(token: str, username: str, role: str):
'''
adds a verified token to the stash
'''
TokenStash.trusted_tokens[token] = username
TokenStash.roles[token] = role
@staticmethod
def is_token_cached(token: str) -> str:
'''
returns the associated username to a token, None otherwise
'''
if token in TokenStash.trusted_tokens and token in TokenStash.roles:
return {"sub": TokenStash.trusted_tokens[token], "role": TokenStash.roles[token]}
return None
def decodeToken(token: str, roles:List[str]=[]) -> Dict:
'''
verifies the passed token on the user-microservice and returns a dictionary with the
subject entry if the verification was successful, an error is raised otherwise
@params:
token - Required : JWT token from authorization header, must start with "Bearer "
roles - Optional : User must have at least one of these roles
'''
if not token.startswith("Bearer "):
raise ValueError('Invalid JWT token (must be a Bearer string)')
token = token[7:]
cached_data = TokenStash.is_token_cached(token)
if cached_data != None:
# Re-Use cached token
return cached_data
url = f'https://{network_constants.REST_GATEWAY_HOSTNAME}:{network_constants.REST_GATEWAY_REST_PORT}/api/tokens/{token}'
response = requests.post(
url,
verify=False,
proxies = { "http":None, "https":None }
)
if response.status_code != 200:
raise ValueError(
f"Validation of token failed ({response.status_code})!")
data = json.loads(response.text)
if not "username" in data or not "role" in data:
raise ValueError(
f"Validation of token failed (missing field in verification response)!")
if len(roles) > 0 and data["role"] not in roles:
raise ValueError(
f"Validation of token failed (wrong role)!")
TokenStash.add(token, data["username"], data["role"])
return {"sub": data["username"], "role": data["role"]}
def _verify(token:str, roles:List[str]=[]):
try:
token_info = decodeToken(token, roles=roles)
return token_info
except Exception as e:
LOGGER = logging.getLogger(__name__)
LOGGER.error("Token invalid: "+str(e))
return None
def verifyTokenRegular(token, required_scopes):
return _verify(token)
def verifyTokenAdmin(token, required_scopes):
return _verify(token, roles=["a"])
from typing import Dict, Any
from pathlib import Path
import prance
def get_bundled_specs(main_file: Path) -> Dict[str, Any]:
'''
parses the given swagger.yml file and resolves dependencies
from that file to enable the possibility to split the
configuration into several files
'''
parser = prance.ResolvingParser(str(main_file.absolute()),
lazy = True, backend = 'openapi-spec-validator')
parser.parse()
return parser.specification
\ No newline at end of file
FROM python:3
LABEL maintainer="Alexander Lercher"
ENV http_proxy http://proxy.uni-klu.ac.at:3128/
ENV https_proxy http://proxy.uni-klu.ac.at:3128/
RUN apt-get update
EXPOSE 5000
WORKDIR /app
COPY src/participation-hub/business-logic-microservice/app/requirements.txt /app/
RUN pip install -r requirements.txt
COPY src/modules/ /app/
COPY src/participation-hub/business-logic-microservice/app/ /app/
RUN chmod a+x main.py
CMD ["python", "./main.py"]
\ No newline at end of file
# Business Logic Microservice
The business model microservice serves as an interface for the individual use cases. Here, schema information for all use cases is stored to enable context-aware processing.
## Technologies
- Python 3.x
- Docker
- Kubernetes
\ No newline at end of file
swagger: "2.0"
info:
title: Business Logic microservice
description: This is the documentation for the business logic microservice.
version: "1.0.0"
# Import security definitions from seperate file
securityDefinitions:
$ref: '../security/security.yml#securityDefinitions'
consumes:
- "application/json"
produces:
- "application/json"
basePath: "/api"
paths:
/use-cases:
delete:
security:
- JwtRegular: []
operationId: "routes.layer.delete_uses_cases"
tags:
- "Use-Cases"
summary: "Delete all use-cases"
description: "Delete all use-cases"
responses:
'200':
description: "Successful Request"
paths:
/use-cases/{use_case}/schema/mapping:
put:
security:
- JwtRegular: []
operationId: "routes.layer.add_mapping_to_schema"
tags:
- "Schemas"
summary: "Adds an attribute mapping to the Schema of the Use-Case"
description: "Adds an attribute mapping to the each Layer of the Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Schema belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/Mapping'
responses:
'200':
description: "Successful Request"
/use-cases/{use_case}/schema:
get:
security:
- JwtRegular: []
operationId: "routes.layer.schema_for_use_case"
tags:
- "Schemas"
summary: "Get the Schema assigned to the Use-Case"
description: "Get the Schema assigned to the Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Schema belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Schema does not exist"
delete:
security:
- JwtRegular: []
operationId: "routes.layer.delete_schema_for_use_case"
tags:
- "Schemas"
summary: "Delete the Schema assigned to the Use-Case"
description: "Delete the Schema assigned to the Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Schema belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Schema does not exist"
/schemas:
get:
security:
- JwtRegular: []
operationId: "routes.layer.all_schemas"
tags:
- "Schemas"
summary: "Get all Use-Cases"
description: "Get all Use-Cases"
responses:
'200':
description: "Successful Request"
/use-cases:
delete:
security:
- JwtRegular: []
operationId: "routes.layer.delete_uses_cases"
tags:
- "Use-Cases"
summary: "Delete all Use-Cases"
description: "Delete all Use-Cases"
responses:
'200':
description: "Successful Request"
get:
security:
- JwtRegular: []
operationId: "routes.layer.use_cases"
tags:
- "Use-Cases"
summary: "Retrieves all Use-Cases"
description: "Retrieves all Use-Cases"
responses:
'200':
description: "Successful Request"
/use-cases/{use_case}/layers:
get:
security:
- JwtRegular: []
operationId: "routes.layer.get_all_for_use_case"
tags:
- "Layers"
summary: "Retrieves all layers belonging to the given Use-Case"
description: "Retrieves all layers belonging to the given Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the layer belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Use-Case does not exist"
/use-cases/{use_case}/cluster-mapping/layers/{name}:
delete:
security:
- JwtRegular: []
operationId: "routes.layer.delete_cluster_mapping"
tags:
- "Layers"
summary: "Deletes a cluster-attribute mapping from the selected layer"
description: "Deletes a cluster-attribute mapping from the selected layer"
parameters:
- name: "name"
in: "path"
description: "Name of the layer (must exist)"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
type: object
properties:
attribute:
type: string
example: "end_time"
description: "Internal name of the attribute"
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
'400':
description: "Field in request is missing or attribute does not exist in the Layer"
put:
security:
- JwtRegular: []
operationId: "routes.layer.add_cluster_mapping"
tags:
- "Layers"
summary: "Selects a property of the Layer as cluster property"
description: "Selects a property of the Layer as cluster property"
parameters:
- name: "name"
in: "path"
description: "Name of the layer (must exist)"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
type: object
properties:
attribute:
type: string
example: "end_time"
description: "Internal name of the attribute"
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
'400':
description: "Field in request is missing or attribute does not exist in the Layer"
/use-cases/{use_case}/layers/use-case:
put:
security:
- JwtRegular: []
operationId: "routes.layer.update_use_case_for_all"
tags:
- "Layers"
summary: "Update the Use-Case of all Layers belonging to the Use-Case in the query"
description: "Update the Use-Case of all Layers belonging to the Use-Case in the query"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
type: object
properties:
use_case:
type: string
example: "air-bnb"
responses:
'200':
description: "Successful Request"
'400':
description: "Field in request is missing"
/use-cases/{use_case}/layers/mapping:
put:
security:
- JwtRegular: []
operationId: "routes.layer.add_mapping_for_all"
tags:
- "Layers"
summary: "Adds an attribute mapping to the each Layer of the Use-Case"
description: "Adds an attribute mapping to the each Layer of the Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/LayerMapping'
responses:
'200':
description: "Successful Request"
'400':
description: "Field in request is missing"
delete:
security:
- JwtRegular: []
operationId: "routes.layer.delete_mapping_for_all"
tags:
- "Layers"
summary: "Deletes an attribute mapping for all Layers of the Use-Case"
description: "Deletes an attribute mapping for all Layers of the Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
type: object
properties:
internal:
type: string
example: "end_time"
responses:
'200':
description: "Successful Request"
'400':
description: "Field in request is missing"
/use-cases/{use_case}/layers/{name}/mapping:
delete:
security:
- JwtRegular: []
operationId: "routes.layer.delete_mapping"
tags:
- "Layers"
summary: "Deletes an attribute mapping from the selected Layer"
description: "Deletes an attribute mapping from the selected Layer"
parameters:
- name: "name"
in: "path"
description: "Name of the Layer (must exist)"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/LayerMapping'
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
'400':
description: "Field in request is missing"
put:
security:
- JwtRegular: []
operationId: "routes.layer.add_mapping"
tags:
- "Layers"
summary: "Adds an attribute mapping to the selected Layer"
description: "Adds an attribute mapping to the selected Layer"
parameters:
- name: "name"
in: "path"
description: "Name of the Layer (must exist)"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/LayerMapping'
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
'400':
description: "Field in request is missing"
/use-cases/{use_case}/layers/{name}:
get:
security:
- JwtRegular: []
operationId: "routes.layer.one"
tags:
- "Layers"
summary: "Retrieve one Layer from the DB"
description: "Retrieve one Layer from the DB"
parameters:
- name: "name"
in: "path"
description: "Name of the Layer"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
post:
security:
- JwtRegular: []
operationId: "routes.layer.add"
tags:
- "Layers"
summary: "Adds an empty Layer to the DB"
description: "Adds an empty Layer to the DB"
parameters:
- name: "name"
in: "path"
description: "Name of the new Layer"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'400':
description: "Layer already exists"
delete:
security:
- JwtRegular: []
operationId: "routes.layer.delete_one"
tags:
- "Layers"
summary: "Delete one Layer from the DB"
description: "Delete one Layer from the DB"
parameters:
- name: "name"
in: "path"
description: "Name of the layer to delete"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the layer belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
/layers:
get:
security:
- JwtRegular: []
operationId: "routes.layer.all"
tags:
- "Layers"
summary: "Retrieve all Layers from the DB"
description: "Retrieve all Layers from the DB"
responses:
'200':
description: "Successful Request"
delete:
security:
- JwtRegular: []
operationId: "routes.layer.delete_all_layers"
tags:
- "Layers"
summary: "Delete all Layers from the DB"
description: "Delete all Layers from the DB"
responses:
'200':
description: "Successful Request"
post:
security:
- JwtRegular: []
operationId: "routes.layer.add_complete"
tags:
- "Layers"
summary: "Adds a complete Layer (including its properties) to the DB"
description: "Adds a complete Layer (including its properties) to the DB"
parameters:
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/Layer'
responses:
'200':
description: "Successful Request"
'400':
description: "Bad structure in request body or Layer already exists"
/debug:
post:
security:
- JwtRegular: []
operationId: "debug.echo"
tags:
- "Echo"
summary: "Echo function for debugging purposes"
description: "Echoes the input back to the caller."
parameters:
- in: body
name: "Object"
required: true
schema:
type: object
responses:
'200':
description: "Successful echo of request data"
definitions:
LayerMapping:
type: "object"
required:
- internal
properties:
internal:
type: string
example: "end_time"
Mapping:
type: "object"
required:
- internal
- external
properties:
internal:
type: string
example: "end_time"
external:
type: string
example: "arrival_unix_timestamp_utc"
Layer:
type: "object"
required:
- name
- use_case
- cluster_properties
- properties
properties:
name:
type: string
example: "layer1"
use_case:
type: string
example: "car-sharing"
cluster_properties:
type: array
items:
type: string
example: "internal_property_1"
properties:
type: array
items:
type: string
example: "internal_property_1"
\ No newline at end of file
swagger: "2.0"
info:
title: Business Logic microservice
description: This is the documentation for the business logic microservice.
version: "1.0.0"
consumes:
- "application/json"
produces:
- "application/json"
basePath: "/api"
paths:
/use-cases/{use_case}/schema/mapping:
put:
operationId: "routes.layer.add_mapping_to_schema"
tags:
- "Schemas"
summary: "Adds an attribute mapping to the Schema of the Use-Case"
description: "Adds an attribute mapping to the each Layer of the Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Schema belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/Mapping'
responses:
'200':
description: "Successful Request"
/use-cases/{use_case}/schema:
get:
operationId: "routes.layer.schema_for_use_case"
tags:
- "Schemas"
summary: "Get the Schema assigned to the Use-Case"
description: "Get the Schema assigned to the Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Schema belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Schema does not exist"
delete:
operationId: "routes.layer.delete_schema_for_use_case"
tags:
- "Schemas"
summary: "Delete the Schema assigned to the Use-Case"
description: "Delete the Schema assigned to the Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Schema belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Schema does not exist"
/schemas:
get:
operationId: "routes.layer.all_schemas"
tags:
- "Schemas"
summary: "Get all Use-Cases"
description: "Get all Use-Cases"
responses:
'200':
description: "Successful Request"
/use-cases:
delete:
operationId: "routes.layer.delete_uses_cases"
tags:
- "Use-Cases"
summary: "Delete all Use-Cases"
description: "Delete all Use-Cases"
responses:
'200':
description: "Successful Request"
get:
operationId: "routes.layer.use_cases"
tags:
- "Use-Cases"
summary: "Retrieves all Use-Cases"
description: "Retrieves all Use-Cases"
responses:
'200':
description: "Successful Request"
/use-cases/{use_case}/layers:
get:
operationId: "routes.layer.get_all_for_use_case"
tags:
- "Layers"
summary: "Retrieves all layers belonging to the given Use-Case"
description: "Retrieves all layers belonging to the given Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the layer belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Use-Case does not exist"
/use-cases/{use_case}/cluster-mapping/layers/{name}:
delete:
operationId: "routes.layer.delete_cluster_mapping"
tags:
- "Layers"
summary: "Deletes a cluster-attribute mapping from the selected layer"
description: "Deletes a cluster-attribute mapping from the selected layer"
parameters:
- name: "name"
in: "path"
description: "Name of the layer (must exist)"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
type: object
properties:
attribute:
type: string
example: "end_time"
description: "Internal name of the attribute"
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
'400':
description: "Field in request is missing or attribute does not exist in the Layer"
put:
operationId: "routes.layer.add_cluster_mapping"
tags:
- "Layers"
summary: "Selects a property of the Layer as cluster property"
description: "Selects a property of the Layer as cluster property"
parameters:
- name: "name"
in: "path"
description: "Name of the layer (must exist)"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
type: object
properties:
attribute:
type: string
example: "end_time"
description: "Internal name of the attribute"
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
'400':
description: "Field in request is missing or attribute does not exist in the Layer"
/use-cases/{use_case}/layers/use-case:
put:
operationId: "routes.layer.update_use_case_for_all"
tags:
- "Layers"
summary: "Update the Use-Case of all Layers belonging to the Use-Case in the query"
description: "Update the Use-Case of all Layers belonging to the Use-Case in the query"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
type: object
properties:
use_case:
type: string
example: "air-bnb"
responses:
'200':
description: "Successful Request"
'400':
description: "Field in request is missing"
/use-cases/{use_case}/layers/mapping:
put:
operationId: "routes.layer.add_mapping_for_all"
tags:
- "Layers"
summary: "Adds an attribute mapping to the each Layer of the Use-Case"
description: "Adds an attribute mapping to the each Layer of the Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/LayerMapping'
responses:
'200':
description: "Successful Request"
'400':
description: "Field in request is missing"
delete:
operationId: "routes.layer.delete_mapping_for_all"
tags:
- "Layers"
summary: "Deletes an attribute mapping for all Layers of the Use-Case"
description: "Deletes an attribute mapping for all Layers of the Use-Case"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
type: object
properties:
internal:
type: string
example: "end_time"
responses:
'200':
description: "Successful Request"
'400':
description: "Field in request is missing"
/use-cases/{use_case}/layers/{name}/mapping:
delete:
operationId: "routes.layer.delete_mapping"
tags:
- "Layers"
summary: "Deletes an attribute mapping from the selected Layer"
description: "Deletes an attribute mapping from the selected Layer"
parameters:
- name: "name"
in: "path"
description: "Name of the Layer (must exist)"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/LayerMapping'
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
'400':
description: "Field in request is missing"
put:
operationId: "routes.layer.add_mapping"
tags:
- "Layers"
summary: "Adds an attribute mapping to the selected Layer"
description: "Adds an attribute mapping to the selected Layer"
parameters:
- name: "name"
in: "path"
description: "Name of the Layer (must exist)"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/LayerMapping'
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
'400':
description: "Field in request is missing"
/use-cases/{use_case}/layers/{name}:
get:
operationId: "routes.layer.one"
tags:
- "Layers"
summary: "Retrieve one Layer from the DB"
description: "Retrieve one Layer from the DB"
parameters:
- name: "name"
in: "path"
description: "Name of the Layer"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
post:
operationId: "routes.layer.add"
tags:
- "Layers"
summary: "Adds an empty Layer to the DB"
description: "Adds an empty Layer to the DB"
parameters:
- name: "name"
in: "path"
description: "Name of the new Layer"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the Layer belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'400':
description: "Layer already exists"
delete:
operationId: "routes.layer.delete_one"
tags:
- "Layers"
summary: "Delete one Layer from the DB"
description: "Delete one Layer from the DB"
parameters:
- name: "name"
in: "path"
description: "Name of the layer to delete"
required: true
type: "string"
- name: "use_case"
in: "path"
description: "Name of the Use-Case the layer belongs to"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
/layers:
get:
operationId: "routes.layer.all"
tags:
- "Layers"
summary: "Retrieve all Layers from the DB"
description: "Retrieve all Layers from the DB"
responses:
'200':
description: "Successful Request"
delete:
operationId: "routes.layer.delete_all_layers"
tags:
- "Layers"
summary: "Delete all Layers from the DB"
description: "Delete all Layers from the DB"
responses:
'200':
description: "Successful Request"
post:
operationId: "routes.layer.add_complete"
tags:
- "Layers"
summary: "Adds a complete Layer (including its properties) to the DB"
description: "Adds a complete Layer (including its properties) to the DB"
parameters:
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/Layer'
responses:
'200':
description: "Successful Request"
'400':
description: "Bad structure in request body or Layer already exists"
/debug:
post:
operationId: "debug.echo"
tags:
- "Echo"
summary: "Echo function for debugging purposes"
description: "Echoes the input back to the caller."
parameters:
- in: body
name: "Object"
required: true
schema:
type: object
responses:
'200':
description: "Successful echo of request data"
definitions:
LayerMapping:
type: "object"
required:
- internal
properties:
internal:
type: string
example: "end_time"
Mapping:
type: "object"
required:
- internal
- external
properties:
internal:
type: string
example: "end_time"
external:
type: string
example: "arrival_unix_timestamp_utc"
Layer:
type: "object"
required:
- name
- use_case
- cluster_properties
- properties
properties:
name:
type: string
example: "layer1"
use_case:
type: string
example: "car-sharing"
cluster_properties:
type: array
items:
type: string
example: "internal_property_1"
properties:
type: array
items:
type: string
example: "internal_property_1"
\ No newline at end of file
from typing import Dict, List
class LayerAdapter:
'''
represents the mapping from an internal layer with a set of attributes and what
attributes from the dataset correspond to each one
'''
def __init__(self, name: str, use_case: str, properties: List[str], cluster_properties: List[str]):
'''
Creates a new instance of LayerAdapter
@params:
name - Required : unique identifier for the layer
properties - Required : list of mappings from the Schema
cluster_properties - Required : subset of properties to indicate what properties are used for clustering
use_case - Required : identifier for the use-case this layer belongs to
'''
self.name = name
self.properties = properties
self.use_case = use_case
for prop in cluster_properties:
if prop not in properties:
raise ValueError(f"{prop} is no property in the layer!")
self.cluster_properties = cluster_properties
def add_mapping(self, internal: str):
'''
Add a new mapping to the layer. This mapping consists of an internal representation.
@params:
internal - Required: string identifier used internally, f.e. "startTime"
external - Required: string identifier the column has in the external dataset f.e. "arrivalTimeOfCustomer"
'''
if internal not in self.properties:
self.properties.append(internal)
def delete_mapping(self, internal: str):
'''
Removes a proprety from the layer. Raises a ValueError if the property identified by the internal representation
does not exist.
@params:
internal - Required: string identifier used internally, f.e. "startTime"
'''
if internal not in self.properties:
raise ValueError(f"Attribute {internal} is not an internal attribute!")
if internal in self.cluster_properties:
self.delete_cluster_mapping(internal)
self.properties.remove(internal)
def add_cluster_mapping(self, attribute:str):
'''
Adds an attribute of the internal representations to the set of attributes to cluster after. The set of cluster
attribute is always a subset of self.properties.keys().
@params:
attribute - Required: string identifier used internally, f.e. "startTime"
'''
if attribute not in self.properties:
raise ValueError(f"Attribute {attribute} is not an internal attribute!")
if attribute not in self.cluster_properties:
self.cluster_properties.append(attribute)
def delete_cluster_mapping(self, attribute:str):
'''
Removes an attribute from the set of cluster-attributes. Raises a ValueError if either the attribute does not exist
as an internal representation
@params:
attribute - Required: string identifier used internally, f.e. "startTime"
'''
if attribute not in self.properties:
raise ValueError(f"Attribute {attribute} is not an internal attribute!")
self.cluster_properties.remove(attribute)
def to_serializable_dict(self) -> Dict:
return {
"name": self.name,
"properties": self.properties,
"cluster_properties": self.cluster_properties,
"use_case": self.use_case
}
@staticmethod
def from_serializable_dict(user_dict: Dict):
'''
creates a layer object from a dictionary. has to have the following keys:
- name
- properties
- cluster_properties
- use_case
'''
return LayerAdapter(
user_dict["name"],
user_dict["use_case"],
user_dict["properties"],
user_dict["cluster_properties"]
)
from typing import Dict
class Schema:
def __init__(self, use_case:str, mappings:Dict[str, str]):
self.use_case = use_case
self.mappings = mappings
def add_mapping(self, internal:str, external:str):
if internal not in self.mappings.keys():
self.mappings[internal] = external
def to_serializable_dict(self) -> Dict:
return {
"use_case": self.use_case,
"mappings": self.mappings
}
@staticmethod
def from_serializable_dict(serializable_dict: Dict):
return Schema(
serializable_dict["use_case"],
serializable_dict["mappings"]
)
\ No newline at end of file
from typing import Dict, List
class UseCase:
'''
represents the mapping from an internal layer with a set of attributes and what
attributes from the dataset correspond to each one
'''
def __init__(self, name: str):
'''
Creates a new instance of LayerAdapter
@params:
name - Required : unique identifier for the layer
properties - Required : maps InternalPropertyName->DatasetPropertyName
cluster_properties - Required : subset of the keys of properties, marks properties to cluster with
use_case - Required : identifier for the use-case this layer belongs to
'''
self.name = name
def to_serializable_dict(self) -> Dict:
return {
"name": self.name,
}
@staticmethod
def from_serializable_dict(user_dict: Dict):
'''
creates a layer object from a dictionary. has to have the following keys:
- name
- properties
- cluster_properties
- use_case
'''
return UseCase(
user_dict["name"],
)
# global imports (dont't worry, red is normal)
import network_constants as netconst
from database.MongoRepositoryBase import MongoRepositoryBase
from db.entities.layer_adapter import LayerAdapter
from db.entities.use_case import UseCase
from db.entities.schema import Schema
import pymongo
import json
from typing import List, Dict
class Repository(MongoRepositoryBase):
'''This is a repository for MongoDb.'''
def __init__(self):
super().__init__(netconst.BUSINESS_LOGIC_DB_HOSTNAME,
netconst.BUSINESS_LOGIC_DB_PORT,
'rest-gateway-db')
self._adapter_collection = 'layer_adapters'
self._use_case_collection = 'use_cases'
self._schema_collection = 'schemas'
def add_schema(self, schema: Schema):
self.put_use_case(schema.use_case)
super().insert_entry(self._schema_collection, schema.to_serializable_dict())
def all_schemas(self) -> List[Schema]:
result = super().get_entries(self._schema_collection, projection={'_id': False})
return [Schema.from_serializable_dict(row) for row in list(result)]
def schema_for_use_case(self, use_case: str) -> List[Dict]:
result = super().get_entries(self._schema_collection, projection={'_id': False}, selection={"use_case": use_case})
result = list(result)
if len(result) > 1:
raise ValueError("No more than 1 Schema allowed per use-case!")
if len(result) == 1:
return Schema.from_serializable_dict(result[0])
return None
def delete_schema_with_use_case(self, use_case: str):
collection = self._database[self._schema_collection]
collection.delete_one({"use_case": use_case})
def delete_all_schemas(self):
collection = self._database[self._schema_collection]
collection.delete_many({})
def update_schema(self, schema: Schema):
collection = self._database[self._schema_collection]
collection.update_one({"use_case": schema.use_case}, {"$set": schema.to_serializable_dict()})
def delete_all_use_cases_with_name(self, name: str):
collection = self._database[self._use_case_collection]
collection.delete_many({"name": name})
def delete_all_layers(self):
collection = self._database[self._adapter_collection]
collection.delete_many({})
def delete_all_use_cases(self):
collection = self._database[self._use_case_collection]
collection.delete_many({})
def all_use_cases(self) -> List[UseCase]:
dicts = list(super().get_entries(self._use_case_collection, projection={'_id': False}))
return [UseCase.from_serializable_dict(d) for d in dicts]
def put_use_case(self, use_case_name: str):
use_cases = self.all_use_cases()
existing_use_cases = list(filter(lambda use_case: use_case.name == use_case_name, use_cases))
if len(existing_use_cases) == 0:
use_case = UseCase(use_case_name)
super().insert_entry(self._use_case_collection, use_case.to_serializable_dict())
def add(self, adapter : LayerAdapter):
self.put_use_case(adapter.use_case)
super().insert_entry(self._adapter_collection, adapter.to_serializable_dict())
def one_by_name_and_usecase(self, name : str, use_case: str) -> LayerAdapter:
return list(super().get_entries(self._adapter_collection, selection={"name": name, "use_case": use_case}))
def update_use_case(self, adapter : LayerAdapter, use_case: str):
collection = self._database[self._adapter_collection]
collection.update_one({"name":adapter.name, "use_case": use_case}, {"$set": adapter.to_serializable_dict()})
def update(self, adapter : LayerAdapter):
collection = self._database[self._adapter_collection]
collection.update_one({"name":adapter.name, "use_case": adapter.use_case}, {"$set": adapter.to_serializable_dict()})
def delete_all_with_name_and_use_case(self, name: str, use_case: str):
collection = self._database[self._adapter_collection]
collection.delete_many({"name": name, "use_case": use_case})
def all(self) -> List[Dict]:
result = super().get_entries(self._adapter_collection, projection={'_id': False})
return list(result)
def all_for_use_case(self, use_case: str) -> List[Dict]:
result = super().get_entries(self._adapter_collection, projection={'_id': False}, selection={"use_case": use_case})
return list(result)
\ No newline at end of file
# add modules folder to interpreter path
import sys
import os
from pathlib import Path
from typing import Dict, Any
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
# load swagger config
import connexion
from security import swagger_util
app = connexion.App(__name__, specification_dir='configs/')
from db.entities.layer_adapter import LayerAdapter
@app.route('/', methods=['GET'])
def api_root():
return 'Endpoint of business-logic-microservice!'
# SSL configuration
try:
certificate_path = os.environ['ARTICONF_CERTIFICATE_PATH']
except KeyError:
certificate_path = '/srv/articonf/'
context = (os.path.normpath(f'{certificate_path}/articonf1.crt'), os.path.normpath(f'{certificate_path}/articonf1.key')) # certificate and key files
# Local Mode
try:
print("Running with local settings...")
local = os.environ['ARTICONF_LOCAL']
app.add_api(swagger_util.get_bundled_specs(Path("configs/swagger_local.yml")),
resolver = connexion.RestyResolver("cms_rest_api"))
except KeyError:
app.add_api(swagger_util.get_bundled_specs(Path("configs/swagger.yml")),
resolver = connexion.RestyResolver("cms_rest_api"))
# start app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=False, ssl_context=context)
astroid==2.4.2
attrs==19.3.0
autopep8==1.5.4
certifi==2020.6.20
chardet==3.0.4
click==7.1.2
clickclick==1.2.2
colorama==0.4.3
connexion==2.7.0
Flask==1.1.2
idna==2.10
importlib-metadata==1.7.0
inflection==0.5.0
isort==4.3.21
itsdangerous==1.1.0
Jinja2==2.11.2
jsonschema==3.2.0
lazy-object-proxy==1.4.3
MarkupSafe==1.1.1
mccabe==0.6.1
openapi-spec-validator==0.2.9
prance==0.19.0
pycodestyle==2.6.0
pylint==2.5.3
pymongo==3.11.0
pyrsistent==0.16.0
PyYAML==5.3.1
requests==2.24.0
semver==2.10.2
six==1.15.0
swagger-ui-bundle==0.0.8
toml==0.10.1
typed-ast==1.4.1
urllib3==1.25.10
Werkzeug==1.0.1
wrapt==1.12.1
zipp==3.1.0
#global imports
from db.entities.layer_adapter import LayerAdapter
from services.layer_adapter_service import LayerAdapterService
import json
from flask import Response, request
def use_cases():
return [adapter.to_serializable_dict() for adapter in LayerAdapterService.all_use_cases()]
def delete_uses_cases():
LayerAdapterService.delete_all_use_cases()
return Response(status=200)
def all():
return LayerAdapterService.all()
def all_schemas():
return [schema.to_serializable_dict() for schema in LayerAdapterService.all_schemas()]
def schema_for_use_case(use_case:str):
schema = LayerAdapterService.schema_for_use_case(use_case)
if schema != None:
return Response(status=200, response=json.dumps(schema.to_serializable_dict()))
return Response(status=404, response=f"Schema {use_case} does not exist")
def add_mapping_to_schema(use_case:str):
data = request.json
if "internal" not in data or "external" not in data:
return Response(status=400, response=f"Field missing! Fields required: (internal, external)")
schema = LayerAdapterService.schema_for_use_case(use_case)
if schema == None:
print("schema not there, creating it...")
schema = LayerAdapterService.put_schema(use_case)
schema.add_mapping(data["internal"], data["external"])
LayerAdapterService.update_schema(schema)
return Response(status=200)
def delete_schema_for_use_case(use_case:str):
schema = LayerAdapterService.schema_for_use_case(use_case)
if schema == None:
return Response(status=404, response=f"Schema {use_case} does not exist")
LayerAdapterService.delete_schema(use_case)
return Response(status=200)
##########
# LAYERS #
##########
def get_all_for_use_case(use_case: str):
'''
get all layers assigned to the given use_case
'''
return LayerAdapterService.all_for_use_case(use_case)
def add(name: str, use_case: str):
'''
add an empty layer to the DB
@params:
name - Required : unique identifier for the layer
use_case - Required : String-identifier for the use-case
'''
if LayerAdapterService.one(name, use_case) != None:
return Response(status=400, response=f"Layer with name '{name}' already exists!")
LayerAdapterService.add(name, use_case)
return Response(status=200)
def add_complete():
'''
add a layer already containing attribute mappings to the BD.
'''
data = request.json
if "name" not in data or "properties" not in data or "cluster_properties" not in data or "use_case" not in data:
return Response(status=400, response=f"Field missing! Fields required: (name, properties, cluster_properties)")
layer = LayerAdapterService.one(data["name"], data["use_case"])
if layer != None:
return Response(status=400, response=f'Layer with name "{data["name"]}" already exists!')
# check if schema contains mapping
try:
layer_new = LayerAdapter.from_serializable_dict(data)
except BaseException as e:
print(f"Exception: {e}")
return Response(status=400)
try:
LayerAdapterService.check_layer(layer_new)
except ValueError as e:
return Response(status = 400, response=f"{e}")
LayerAdapterService.add_complete(layer_new)
return Response(status=200)
def one(name: str, use_case: str):
'''
fetch a single layer from the DB
@params:
name - Required : unique identifier for the layer
use_case - Required : String-identifier for the use-case
'''
layer = LayerAdapterService.one(name, use_case)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
return Response(status=200, response=json.dumps(layer.to_serializable_dict()))
def add_mapping_for_all(use_case: str):
'''
add a new mapping to each layer of the use-case
@params:
use_case - Required : String-identifier for the use-case
'''
data = request.json
if "internal" not in data:
return Response(status=400, response=f"Field missing! Fields required: (internal)")
# check if schema contains mapping
schema = LayerAdapterService.put_schema(use_case)
if data["internal"] not in schema.mappings:
return Response(status=400, response=f'{data["internal"]} is not existent in the schema!')
layers = LayerAdapterService.all_for_use_case(use_case)
layers = [LayerAdapter.from_serializable_dict(d) for d in layers]
for layer in layers:
layer.add_mapping(data["internal"])
try:
LayerAdapterService.check_layer(layer)
except ValueError as e:
return Response(status = 400, response=f"{e}")
LayerAdapterService.update(layer)
return Response(status=200)
def add_mapping(name: str, use_case: str):
'''
add a new mapping to the layer identified by name
@params:
name - Required : unique identifier for the layer
use_case - Required : String-identifier for the use-case
'''
layer = LayerAdapterService.one(name, use_case)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
data = request.json
if "internal" not in data:
return Response(status=400, response=f"Field missing! Fields required: (internal)")
# check if schema contains mapping
schema = LayerAdapterService.put_schema(use_case)
if data["internal"] not in schema.mappings:
return Response(status=400, response=f'{data["internal"]} is not existent in the schema!')
layer.add_mapping(data["internal"])
LayerAdapterService.update(layer)
return Response(status=200)
def update_use_case_for_all(use_case: str):
'''
update the use-case for each layer in the use-case
this changes the use-case of all layers, meaning the use-case used
for this request might not be usable after the request
@params:
use_case - Required : String-identifier for the use-case
'''
data = request.json
if "use_case" not in data:
return Response(status=400, response=f"Field missing! Fields required: (use_case)")
layers = LayerAdapterService.all_for_use_case(use_case)
layers = [LayerAdapter.from_serializable_dict(d) for d in layers]
for layer in layers:
layer.use_case = data["use_case"]
try:
LayerAdapterService.check_layer(layer)
except ValueError as e:
return Response(status = 400, response=f"{e}")
LayerAdapterService.update_use_case(layer, use_case)
return Response(status=200)
def delete_mapping_for_all(use_case: str):
'''
delete a mapping from each layer in the use-case
@params:
use_case - Required : String-identifier for the use-case
'''
data = request.json
if "internal" not in data:
return Response(status=400, response=f"Field missing! Fields required: (internal)")
layers = LayerAdapterService.all_for_use_case(use_case)
layers = [LayerAdapter.from_serializable_dict(d) for d in layers]
for layer in layers:
try:
layer.delete_mapping(data["internal"])
except ValueError:
continue
LayerAdapterService.update(layer)
return Response(status=200)
def delete_mapping(name: str, use_case: str):
'''
delete a mapping from the layer identified by the internal representation
@params:
name - Required : unique identifier for the layer
use_case - Required : String-identifier for the use-case
'''
layer = LayerAdapterService.one(name, use_case)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
data = request.json
if "internal" not in data:
return Response(status=400, response=f"Field missing! Fields required: (internal)")
try:
layer.delete_mapping(data["internal"])
LayerAdapterService.update(layer)
except ValueError:
return Response(status=400, response=f'{data["internal"]} is not a property of the layer!')
return Response(status=200)
def add_cluster_mapping(name: str, use_case: str):
'''
add a mapped property to the list of properties to cluster with
@params:
name - Required : unique identifier for the layer
use_case - Required : String-identifier for the use-case
'''
layer = LayerAdapterService.one(name, use_case)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
data = request.json
if "attribute" not in data:
return Response(status=400, response=f"Field missing! Fields required: (attribute)")
try:
layer.add_cluster_mapping(data["attribute"])
LayerAdapterService.update(layer)
return Response(status=200)
except:
return Response(status=400, response=f'{data["attribute"]} is no attribute in the layer!')
def delete_cluster_mapping(name: str, use_case: str):
'''
remove a mapped property from the list of properties to cluster with
@params:
name - Required : unique identifier for the layer
use_case - Required : String-identifier for the use-case
'''
layer = LayerAdapterService.one(name, use_case)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
data = request.json
if "attribute" not in data:
return Response(status=400, response=f"Field missing! Fields required: (attribute)")
try:
layer.delete_cluster_mapping(data["attribute"])
LayerAdapterService.update(layer)
return Response(status=200)
except ValueError as e:
print(e)
return Response(status=400, response=f'{data["attribute"]} is no attribute in the layer!')
def delete_one(name: str, use_case: str):
'''
delete a layer and all its mappings from the Db
@params:
name - Required : unique identifier for the layer
use_case - Required : String-identifier for the use-case
'''
layer = LayerAdapterService.one(name, use_case)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
LayerAdapterService.delete(layer)
return Response(status=200)
def delete_all_layers():
'''
delete all layers from the DB
'''
LayerAdapterService.delete_all_layers()
return Response(status=200)
\ No newline at end of file
#global imports
from db.repository import Repository
from db.entities.layer_adapter import LayerAdapter
from db.entities.schema import Schema
from typing import List
class LayerAdapterService:
_repository = Repository()
@staticmethod
def all_schemas() -> List[Schema]:
return LayerAdapterService._repository.all_schemas()
@staticmethod
def schema_for_use_case(use_case: str):
LayerAdapterService._repository.put_use_case(use_case)
return LayerAdapterService._repository.schema_for_use_case(use_case)
@staticmethod
def put_schema(use_case: str):
schema = LayerAdapterService.schema_for_use_case(use_case)
if schema == None:
schema = Schema(use_case, mappings={})
LayerAdapterService._repository.add_schema(schema)
return schema
@staticmethod
def check_layer(layer: LayerAdapter):
'''
checks if the given layer has correct mappings regarding the schema of the use_case
'''
schema = LayerAdapterService.put_schema(layer.use_case)
for p in layer.properties:
if p not in schema.mappings:
raise ValueError(f'{p} is not existent in the schema!')
@staticmethod
def delete_schema(use_case:str):
LayerAdapterService._repository.delete_schema_with_use_case(use_case)
@staticmethod
def update_schema(schema: Schema):
LayerAdapterService._repository.update_schema(schema)
@staticmethod
def all_use_cases() -> List[str]:
return LayerAdapterService._repository.all_use_cases()
@staticmethod
def delete_all_use_cases():
LayerAdapterService._repository.delete_all_use_cases()
LayerAdapterService._repository.delete_all_schemas()
LayerAdapterService._repository.delete_all_layers()
@staticmethod
def delete_use_case(name:str):
LayerAdapterService._repository.delete_all_use_cases_with_name(name)
@staticmethod
def all() -> List[LayerAdapter]:
'''
Return all currently defined layers
'''
return LayerAdapterService._repository.all()
@staticmethod
def all_for_use_case(use_case: str) -> List[LayerAdapter]:
'''
Return all currently defined layers that belong to the
given use-case
'''
return LayerAdapterService._repository.all_for_use_case(use_case)
@staticmethod
def update(layer: LayerAdapter):
'''
Overwrite the stored instance with the given one which is
identified by the layer name
@params:
layer - Required : layer object holding the current data
'''
LayerAdapterService._repository.update(layer)
@staticmethod
def update_use_case(layer: LayerAdapter, use_case:str):
'''
Overwrite the stored instance with the given one which is
identified by the layer name
@params:
layer - Required : layer object holding the current data
'''
LayerAdapterService._repository.update_use_case(layer, use_case)
@staticmethod
def add(name: str, use_case: str):
'''
Add a new layer to the DB. Attribute mapping and cluster
attributes will be empty per default
@params:
name - Required : Unique name for a layer.
use_case - Required : String-identifier for the use-case
'''
adapter_new = LayerAdapter(name, use_case, [], [])
LayerAdapterService._repository.add(adapter_new)
@staticmethod
def add_complete(layer: LayerAdapter):
'''
Add a new layer to the DB. Attribute mappings and cluster
attributes of the given layer are used
@params:
layer - Required : layer object holding correct data
'''
LayerAdapterService._repository.add(layer)
@staticmethod
def delete(layer: LayerAdapter):
'''
delete all layers with the given name.
@params:
layer - Required : layer object to remove from the DB
'''
use_case = layer.use_case
LayerAdapterService._repository.delete_all_with_name_and_use_case(layer.name, use_case)
@staticmethod
def delete_all_layers():
LayerAdapterService._repository.delete_all_layers()
@staticmethod
def one(name: str, use_case: str) -> LayerAdapter:
'''
Retrieve a single layer from the DB. Returns None if no layer
was found under this name.
@params:
name - Required : Unique name for a layer
use_case - Required : String-identifier for the use-case
'''
result = LayerAdapterService._repository.one_by_name_and_usecase(name, use_case)
if len(result) == 1:
return LayerAdapter.from_serializable_dict(result[0])
else:
return None
import unittest
from db.entities.layer_adapter import LayerAdapter
class Layer_Adapter_Test(unittest.TestCase):
def test_valid_adapter(self):
adapter1 = LayerAdapter("layer1", {"a":"b", "c":"d"}, ["a"])
print(adapter1.to_serializable_dict)
if __name__ == '__main__':
unittest.main()
\ No newline at end of file
apiVersion: v1
kind: Service
metadata:
name: business-logic
spec:
type: LoadBalancer
selector:
app: business-logic
ports:
- name: http
port: 80
targetPort: 5000
nodePort: 30420
protocol: TCP
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: business-logic
spec:
replicas: 1
selector:
matchLabels:
app: business-logic
template:
metadata:
labels:
app: business-logic
spec:
containers:
- name: business-logic
image: alexx882/business-logic-microservice
ports:
- containerPort: 5000
volumeMounts:
- mountPath: /srv/articonf
name: articonf
volumes:
- name: articonf
hostPath:
path: /srv/articonf
type: Directory
---
apiVersion: v1
kind: Service
metadata:
name: business-logic-db
spec:
type: LoadBalancer
selector:
app: business-logic-db
ports:
- name: http
port: 27017
targetPort: 27017
nodePort: 30421
protocol: TCP
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: business-logic-db
spec:
replicas: 1
selector:
matchLabels:
app: business-logic-db
template:
metadata:
labels:
app: business-logic-db
spec:
containers:
- name: business-logic-db
image: mongo
env:
- name: MONGO_INITDB_ROOT_USERNAME
value: root
- name: MONGO_INITDB_ROOT_PASSWORD
value: root
ports:
- containerPort: 27017
\ No newline at end of file
......@@ -4,6 +4,10 @@ info:
description: This is the documentation for the RESTful API gateway.
version: "1.0.0"
# Import security definitions from seperate file
securityDefinitions:
$ref: '../security/security.yml#securityDefinitions'
consumes:
- "application/json"
produces:
......@@ -11,11 +15,129 @@ produces:
basePath: "/api"
# Paths supported by the server application
paths:
/secret:
get:
security:
- JwtRegular: []
operationId: "routes.user.secret"
tags:
- "User"
summary: "Testpage for authentication"
description: "Should only be accessible with a valid JWT token in the 'authorization' header"
responses:
'200':
description: "OK"
'401':
description: "No or an invalid token was provided"
/tokens/{token}:
post:
operationId: "routes.user.verify"
tags:
- "User"
summary: "Verifies a user token"
description: "Verifies a user token"
parameters:
- name: "token"
in: "path"
description: "Target token that will be verified"
required: true
type: "string"
responses:
'200':
description: "Verification successful"
'401':
description: "Invalid token"
/tokens:
post:
operationId: "routes.user.authenticate"
tags:
- "User"
summary: "Authenticates user at the backend"
description: "Authenticates user at the backend creating a JWT token in the backend"
parameters:
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/TokenRequest'
responses:
'200':
description: "Authentication successful"
schema:
$ref: "#/definitions/TokenReply"
'400':
description: "Wrong credentials"
/users/username/{username}:
delete:
security:
- JwtAdmin: []
operationId: "routes.user.delete"
tags:
- "User"
summary: "Deletes a user identified by the username from the database"
description: "Deletes a user identified by the username from the database"
parameters:
- name: "username"
in: "path"
description: "Username of the user to be deleted"
required: true
type: "string"
responses:
'200':
description: "Deletion succeeded"
'400':
description: "User does not exist"
/users:
get:
security:
- JwtAdmin: []
operationId: "routes.user.all"
tags:
- "User"
summary: "Retrieves all users from the database"
description: "Retrieves all users from the database"
responses:
'200':
description: complete user object including numeric ID
schema:
type: array
items:
$ref: "#/definitions/User"
'400':
description: wrong username or password
post:
security:
- JwtAdmin: []
operationId: "routes.user.add"
tags:
- "User"
summary: "Adds a new user to the database"
description: "Adds a new user to the database"
parameters:
- in: body
name: "Object"
required: true
schema:
type: object
properties:
username:
type: string
example: "username@domain.com"
password:
type: string
example: "secure_passw0rd"
responses:
'200':
description: "User was added to the database"
'400':
description: "User already exists"
/debug:
post:
operationId: "rest.debug.echo"
operationId: "routes.debug.echo"
tags:
- "Echo"
summary: "Echo function for debugging purposes"
......@@ -27,12 +149,12 @@ paths:
schema:
type: object
responses:
200:
'200':
description: "Successful echo of request data"
/trace:
post:
operationId: "rest.blockchain_trace.receive"
operationId: "routes.blockchain_trace.receive"
tags:
- "Blockchain Trace"
summary: "Add a new blockchain trace to SMART"
......@@ -45,12 +167,56 @@ paths:
schema:
$ref: "#/definitions/BlockchainTrace"
responses:
201:
'201':
description: "Successfully added"
400:
'400':
description: "Invalid input"
definitions:
TokenReply:
type: "object"
required:
- token
properties:
token:
type: string
example: "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InVzZXJuYW1lQGRvbWFpbi5jb20iLCJjcmVhdGVkX2F0IjoiMjAyMC0wNy0xNSAxNTo0Mzo0OC43MjQ4MjciLCJ2YWxpZF91bnRpbCI6IjIwMjAtMDctMTYgMTU6NDM6NDguNzI0ODI3In0.aR2Xe3pXj_MBS9UJKqhiq4u9M6Bv41ILPaKpA8BVzIY"
TokenRequest:
type: "object"
required:
- username
- password
properties:
username:
type: string
example: "username@domain.com"
password:
type: string
example: "secure_passw0rd"
User:
type: "object"
required:
- username
- password
- role
- created_at
- last_login
properties:
username:
type: string
example: "username@domain.com"
password:
type: string
example: "secure_passw0rd"
role:
type: string
example: "u"
created_at:
type: string
example: "2020-07-14 14:37:31.670671"
last_login:
type: string
example: "2020-07-14 14:37:31.670671"
BlockchainTrace:
type: "object"
properties:
......
swagger: "2.0"
info:
title: RESTful API Gateway
description: This is the documentation for the RESTful API gateway.
version: "1.0.0"
consumes:
- "application/json"
produces:
- "application/json"
basePath: "/api"
# Paths supported by the server application
paths:
/secret:
get:
operationId: "routes.user.secret"
tags:
- "User"
summary: "Testpage for authentication"
description: "Should only be accessible with a valid JWT token in the 'authorization' header"
responses:
'200':
description: "OK"
'401':
description: "No or an invalid token was provided"
/tokens/{token}:
post:
operationId: "routes.user.verify"
tags:
- "User"
summary: "Verifies a user token"
description: "Verifies a user token"
parameters:
- name: "token"
in: "path"
description: "Target token that will be verified"
required: true
type: "string"
responses:
'200':
description: "Verification successful"
'401':
description: "Invalid token"
/tokens:
post:
operationId: "routes.user.authenticate"
tags:
- "User"
summary: "Authenticates user at the backend"
description: "Authenticates user at the backend creating a JWT token in the backend"
parameters:
- in: body
name: "Object"
required: true
schema:
$ref: '#/definitions/TokenRequest'
responses:
'200':
description: "Authentication successful"
schema:
$ref: "#/definitions/TokenReply"
'400':
description: "Wrong credentials"
/users/username/{username}:
delete:
operationId: "routes.user.delete"
tags:
- "User"
summary: "Deletes a user identified by the username from the database"
description: "Deletes a user identified by the username from the database"
parameters:
- name: "username"
in: "path"
description: "Username of the user to be deleted"
required: true
type: "string"
responses:
'200':
description: "Deletion succeeded"
'400':
description: "User does not exist"
/users:
get:
operationId: "routes.user.all"
tags:
- "User"
summary: "Retrieves all users from the database"
description: "Retrieves all users from the database"
responses:
'200':
description: complete user object including numeric ID
schema:
type: array
items:
$ref: "#/definitions/User"
'400':
description: wrong username or password
post:
operationId: "routes.user.add"
tags:
- "User"
summary: "Adds a new user to the database"
description: "Adds a new user to the database"
parameters:
- in: body
name: "Object"
required: true
schema:
type: object
properties:
username:
type: string
example: "username@domain.com"
password:
type: string
example: "secure_passw0rd"
responses:
'200':
description: "User was added to the database"
'400':
description: "User already exists"
/debug:
post:
operationId: "routes.debug.echo"
tags:
- "Echo"
summary: "Echo function for debugging purposes"
description: "Echoes the input back to the caller."
parameters:
- in: body
name: "Object"
required: true
schema:
type: object
responses:
'200':
description: "Successful echo of request data"
/trace:
post:
operationId: "routes.blockchain_trace.receive"
tags:
- "Blockchain Trace"
summary: "Add a new blockchain trace to SMART"
description: "Receives a new blockchain trace to store in SMART."
parameters:
- in: body
name: "BlockchainTrace"
description: "The trace to be added"
required: true
schema:
$ref: "#/definitions/BlockchainTrace"
responses:
'201':
description: "Successfully added"
'400':
description: "Invalid input"
definitions:
TokenReply:
type: "object"
required:
- token
properties:
token:
type: string
example: "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InVzZXJuYW1lQGRvbWFpbi5jb20iLCJjcmVhdGVkX2F0IjoiMjAyMC0wNy0xNSAxNTo0Mzo0OC43MjQ4MjciLCJ2YWxpZF91bnRpbCI6IjIwMjAtMDctMTYgMTU6NDM6NDguNzI0ODI3In0.aR2Xe3pXj_MBS9UJKqhiq4u9M6Bv41ILPaKpA8BVzIY"
TokenRequest:
type: "object"
required:
- username
- password
properties:
username:
type: string
example: "username@domain.com"
password:
type: string
example: "secure_passw0rd"
User:
type: "object"
required:
- username
- password
- role
- created_at
- last_login
properties:
username:
type: string
example: "username@domain.com"
password:
type: string
example: "secure_passw0rd"
role:
type: string
example: "u"
created_at:
type: string
example: "2020-07-14 14:37:31.670671"
last_login:
type: string
example: "2020-07-14 14:37:31.670671"
BlockchainTrace:
type: "object"
properties:
TransactionId:
type: string
format: uuid
Timestamp:
type: "string"
format: "date-time"
ApplicationType:
type: "string"
TransactionFrom:
type: "string"
format: "uuid"
TransactionFromLatLng:
type: "string"
TransactionTo:
type: "string"
format: "uuid"
TransactionToLatLng:
type: "string"
TransferredAsset:
type: "string"
ResourceIds:
type: "string"
ResourceMd5:
type: "string"
ResourceState:
type: "string"
Metadata:
type: "string"
import json
from typing import Dict
from datetime import datetime
class User:
'''
This class represents a user in the SMART system
'''
def __init__(self, username : str, password : str, role : str ="u"):
'''
Initializes the new user object with the given data
@params:
username - Required : unique identifier for the user i.e. an E-Mail address
password - Required : raw, unhashed password used to authenticate the user later
role - Optional : indicates the privileges of the user "u": standard user
'''
self.username = username
self.password = password
self.created_at = str(datetime.now())
self.last_login = str(datetime.now())
self.role = role
def to_serializable_dict(self) -> Dict:
return {
"username": self.username,
"password": self.password,
"role": self.role,
"created_at": self.created_at,
"last_login": self.last_login,
}
@staticmethod
def from_serializable_dict(user_dict: Dict):
result = User(user_dict["username"], user_dict["password"], user_dict["role"])
result.created_at = user_dict["created_at"]
result.last_login = user_dict["last_login"]
return result
def __repr__(self):
return json.dumps(self.to_serializable_dict())
def __str__(self):
return f"User({self.__repr__()})"
# global imports (dont't worry, red is normal)
import network_constants as netconst
from database.MongoRepositoryBase import MongoRepositoryBase
import pymongo
import json
from db.entities.user import User
from typing import List
class Repository(MongoRepositoryBase):
'''This is a repository for MongoDb.'''
def __init__(self):
super().__init__(netconst.REST_GATEWAY_DB_HOSTNAME,
netconst.REST_GATEWAY_DB_PORT,
'rest-gateway-db')
self._user_collection = 'user'
def one_by_username(self, username : str) -> User:
return list(super().get_entries(self._user_collection, selection={"username": username}))
def add(self, user : User):
super().insert_entry(self._user_collection, user.to_serializable_dict())
def update(self, user: User):
collection = self._database[self._user_collection]
collection.update_one({"username":user.username}, {"$set": user.to_serializable_dict()})
def delete_all_with_username(self, username: str):
collection = self._database[self._user_collection]
collection.delete_many({"username": username})
# TODO maybe movable to MongoRepositoryBase?
def all(self) -> List[User]:
result = super().get_entries(self._user_collection, projection={'_id': False})
return list(result)
\ No newline at end of file
# add modules folder to interpreter path
import sys
import os
import prance
from pathlib import Path
from typing import Dict, Any
modules_path = '../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
......@@ -12,16 +16,34 @@ logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
LOGGER = logging.getLogger(__name__)
#################################
import connexion
from security import swagger_util
from env_info import is_running_locally
# load swagger config
app = connexion.App(__name__, specification_dir='configs/')
app.add_api('swagger.yml')
@app.route('/', methods=['GET'])
def api_root():
return 'Endpoint of SMART RESTful API Gateway!'
# SSL configuration
try:
certificate_path = os.environ['ARTICONF_CERTIFICATE_PATH']
except KeyError:
certificate_path = '/srv/articonf/'
context = (os.path.normpath(f'{certificate_path}/articonf1.crt'), os.path.normpath(f'{certificate_path}/articonf1.key')) # certificate and key files
if is_running_locally():
# Local Mode
print("Running with local settings...")
app.add_api(swagger_util.get_bundled_specs(Path("configs/swagger_local.yml")),
resolver = connexion.RestyResolver("cms_rest_api"))
context = 'adhoc'
else:
app.add_api(swagger_util.get_bundled_specs(Path("configs/swagger.yml")),
resolver = connexion.RestyResolver("cms_rest_api"))
# start app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=True, use_reloader=False) # disable reloader so only subscribed once to rabbitmq
\ No newline at end of file
app.run(host='0.0.0.0', port=5000, debug=True, use_reloader=False, ssl_context=context) # disable reloader so only subscribed once to rabbitmq
\ No newline at end of file
flask
connexion[swagger-ui]
pika
deprecated
astroid==2.4.2
attrs==19.3.0
autopep8==1.5.3
bcrypt==3.1.7
certifi==2020.6.20
cffi==1.14.0
chardet==3.0.4
click==7.1.2
clickclick==1.2.2
colorama==0.4.3
connexion==2.7.0
cryptography==2.9.2
Deprecated==1.2.10
Flask==1.1.2
idna==2.10
importlib-metadata==1.7.0
inflection==0.5.0
isort==4.3.21
itsdangerous==1.1.0
Jinja2==2.11.2
jsonschema==3.2.0
lazy-object-proxy==1.4.3
MarkupSafe==1.1.1
mccabe==0.6.1
openapi-spec-validator==0.2.8
pika==1.1.0
prance==0.19.0
pycodestyle==2.6.0
pycparser==2.20
PyJWT==1.7.1
pylint==2.5.3
pymongo==3.10.1
pyOpenSSL==19.1.0
pyrsistent==0.16.0
PyYAML==5.3.1
requests==2.24.0
rope==0.17.0
semver==2.10.2
six==1.15.0
swagger-ui-bundle==0.0.6
toml==0.10.1
typed-ast==1.4.1
urllib3==1.25.9
Werkzeug==1.0.1
wrapt==1.12.1
zipp==3.1.0
......@@ -16,15 +16,5 @@ def receive():
return Response(status=400)
def isBlockchainTraceValid(trace) -> bool:
return 'TransactionId' in trace \
and 'Timestamp' in trace \
and 'ApplicationType' in trace \
and 'TransactionFrom' in trace \
and 'TransactionFromLatLng' in trace \
and 'TransactionTo' in trace \
and 'TransactionToLatLng' in trace \
and 'TransferredAsset' in trace \
and 'ResourceIds' in trace \
and 'ResourceMd5' in trace \
and 'ResourceState' in trace \
and 'Metadata' in trace
# different for every use case, no global schema
return 'ApplicationType' in trace
from flask import request
def echo():
return request.json
def test():
return "Hello there!"
\ No newline at end of file
# global imports (dont't worry, red is normal)
from db.entities.user import User
from services.user_service import UserService
from services.login_wrapper import login_required
from services.token_service import TokenService
from flask import request, Response
import bcrypt
import jwt
import json
def secret():
return "Pineapple does not belong to pizza!"
def verify(token):
'''
verifies the validity of a JWT token.
performs the following tests (int this order):
- is the JWT token parsable? (it has not been damaged + the signature is valid)
- does the payload contain all necessary fields?
- does the user specified by the payload exist?
- is the expiration/creation date sound?
'''
try:
user = TokenService.verify("Bearer "+token)
return Response(status = 200, response=json.dumps(user.to_serializable_dict()))
except ValueError as e:
return Response(status=401, response=str(e))
def authenticate():
'''
takes the credentials from the user and generates a JWT token out of them
'''
data = request.json
username = data["username"]
try:
user = UserService.get_by_credentials(username, data["password"])
TokenService.successful_authentication(user)
return {"token": TokenService.generate_token(user)}
except ValueError as e:
# return 400 if the user does not exist or the password is wrong
return Response(status = 400, response=str(e))
def delete(username):
'''
deletes a user from the DB. should be protected later
'''
try:
UserService.delete(username)
return Response(status = 200)
except ValueError as e:
# return 400 if the user already exists
return Response(status = 400, response=str(e))
def add():
'''
adds a new user to the DB. expects the provided password to be plaintext i.e. it should not
be encrypted, encoded or hashed
'''
data = request.json
# overwrite possibly existing role with "regular user"
data["role"] = "u"
username = data["username"]
try :
UserService.add(username, data["password"], data["role"])
except ValueError as e:
# return 400 if the user already exists
return Response(status = 400, response=str(e))
return Response(status=200)
def all():
'''
return all users stored in the DB
'''
users = UserService._repository.all()
return str(users)
from functools import wraps
from flask import g, request, redirect, url_for
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
auth = request.authorization
if auth == None:
return redirect(url_for('/api.rest_user_forbidden', next=request.url))
return f(*args, **kwargs)
return decorated_function
# global imports (dont't worry, red is normal)
from db.entities.user import User
from services.user_service import UserService
import jwt
from datetime import datetime, timedelta
from typing import Dict
TOKEN_VALIDITY_IN_DAYS = 1
def verifyTokenRegular(token, required_scopes) -> Dict:
try:
user = TokenService.verify(token)
except ValueError as e:
print(f'ERROR DURING TOKEN VALIDATION: {str(e)}')
return None
if not user.role in UserService._valid_roles:
return None
TokenService.successful_authentication(user)
return {"sub": user.username}
def verifyTokenAdmin(token, required_scopes) -> Dict:
try:
user = TokenService.verify(token)
except ValueError as e:
print(f'ERROR DURING TOKEN VALIDATION: {str(e)}')
return None
if not user.role == "a":
print(f"Required Role:'a', Provided Role: '{user.role}'")
return None
TokenService.successful_authentication(user)
return {"sub": user.username}
class TokenService:
@staticmethod
def successful_authentication(user: User):
user.last_login = str(datetime.now())
UserService.update(user)
@staticmethod
def read_secret() -> str:
with open('/srv/articonf/jwt_secret.txt', 'r') as file:
secret = file.read().replace('\n', '')
return secret
@staticmethod
def generate_token(user: User) -> str:
'''
creates a JWT token for a user which has the following fields:
- username
- created_at
- valid_until
'''
created_at = datetime.now()
valid_until = created_at + timedelta(days=1)
return jwt.encode(
{
'username': user.username,
'created_at': str(created_at),
'valid_until': str(valid_until),
},
TokenService.read_secret(),
algorithm='HS256'
).decode("utf-8")
@staticmethod
def verify(token : str, **kwargs) -> User:
'''
verifies the validity of a JWT token. Raises a ValueError if one of the tests failes
performs the following tests (int this order):
- is the JWT token parsable? (it has not been damaged + the signature is valid)
- does the payload contain all necessary fields?
- does the user specified by the payload exist?
- is the expiration/creation date sound?
@params:
token - Required : JWT token from authorization header, must start with "Bearer "
'''
if not token.startswith("Bearer "):
raise ValueError('Invalid JWT token (must be a Bearer string)')
token = token[7:]
try:
payload = jwt.decode(
token,
TokenService.read_secret(),
algorithms=['HS256']
)
except:
raise ValueError('Invalid JWT token (decoding failed)')
# check if all needed fields are in the payload
if not "username" in payload or not "created_at" in payload or not "valid_until" in payload:
return 'Invalid JWT token (missing fields)'
user = UserService.get_by_username(payload["username"])
# check if token has already expired
token_created_at = datetime.strptime(payload["created_at"], '%Y-%m-%d %H:%M:%S.%f')
valid_until = datetime.strptime(payload["valid_until"], '%Y-%m-%d %H:%M:%S.%f')
now = datetime.now()
if now <= token_created_at or now >= valid_until:
raise ValueError('Invalid JWT token (token expired)')
return user
# global imports (dont't worry, red is normal)
from db.repository import Repository
from db.entities.user import User
from datetime import datetime
import bcrypt
class UserService:
_repository = Repository()
# u ... regular user
# a ... admin user
_valid_roles = ["u", "a"]
@staticmethod
def update(user: User):
UserService._repository.update(user)
@staticmethod
def get_by_username(username: str):
'''
fetches the given user from the database
throws a ValueError if the user does not exist
@params:
username - Required : string identifier for the user i.e. an email address
'''
user = UserService._repository.one_by_username(username)
# return 400 if the user does not exist
if len(user) == 0:
raise ValueError(f'User with username "{username}" does not exist')
return User.from_serializable_dict(user[0])
@staticmethod
def get_by_credentials(username, password):
'''
fetches the given user from the database and checks if the password matches the stored one
throws a ValueError if the user does not exist or the password is wrong
@params:
username - Required : string identifier for the user i.e. an email address
password - Required : passphrase used to authenticate later, raw plaintext
'''
user = UserService.get_by_username(username)
hashed_password = user.password
if not bcrypt.checkpw(password.encode("utf-8"), hashed_password.encode("utf-8")):
raise ValueError(f'Wrong credentials for user "{username}"')
return user
@staticmethod
def delete(username):
'''
deletes the given user from the database
throws a ValueError if the user does not exist
@params:
username - Required : string identifier for the user i.e. an email address
'''
reference_users = UserService._repository.one_by_username(username)
# return 400 if the user does not exist
if len(reference_users) == 0:
raise ValueError(f'User with username "{username}" does not exist')
UserService._repository.delete_all_with_username(username)
@staticmethod
def add(username, password, role="u"):
'''
adds the given user to the database
throws a ValueError if the user already exists
@params:
username - Required : string identifier for the user i.e. an email address
password - Required : passphrase used to authenticate later, raw plaintext
role - Optional : user type, one of the following: [u=regular user (default)]
'''
if role not in UserService._valid_roles:
raise ValueError(f'Role "{role}" is invalid. Must be one of {UserService._valid_roles}')
reference_users = UserService._repository.one_by_username(username)
if len(reference_users) > 0:
raise ValueError(f'User with username "{username}" already exists')
created_at = str(datetime.now())
last_login = str(datetime.min)
# hash the password using the BCrypt algorithm, which generates a string that
# contains the algorithm, the salt and the hash
password = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8")
user_new = User(username, password, role=role)
user_new.created_at = created_at
user_new.last_login = last_login
UserService._repository.add(user_new)
\ No newline at end of file
# add modules folder to interpreter path
import sys
import os
modules_paths = ['../', '../../../modules/']
modules_paths = ['./', '../', '../../../modules/', '../../modules']
for path in modules_paths:
if os.path.exists(path):
sys.path.insert(1, path)
......@@ -5,45 +5,24 @@ from messaging.DummyMessageManager import DummyMessageManager
# init dummy message manager so no connection to rabbitmq is established
_ = DummyMessageManager.get_instance()
import rest.blockchain_trace as blockchain_trace
import routes.blockchain_trace as blockchain_trace
class Test_BlockchainTrace(unittest.TestCase):
def _get_valid_input(self):
def _get_min_valid_input(self):
return {
"ApplicationType": "string",
"Metadata": {},
"ResourceIds": "string",
"ResourceMd5": "string",
"ResourceState": "string",
"Timestamp": "2019-08-27T14:00:48.587Z",
"TransactionFrom": "string",
"TransactionFromLatLng": "string",
"TransactionId": "string",
"TransactionTo": "string",
"TransactionToLatLng": "string",
"TransferredAsset": "string"
"ApplicationType": "string"
}
def test_isBlockchainTraceValid_validInputAndTypes(self):
input = self._get_valid_input()
input = self._get_min_valid_input()
self.assertTrue(blockchain_trace.isBlockchainTraceValid(input), "Trace should be valid")
# def test_isBlockchainTraceValid_invalidMetadataInputType(self):
# input = self._get_valid_input()
# input["Metadata"] = "string"
# self.assertFalse(blockchain_trace.isBlockchainTraceValid(input), "Metadata type should be invalid")
# def test_isBlockchainTraceValid_invalidTransactionFromLatLngInputType(self):
# input = self._get_valid_input()
# input["TransactionFromLatLng"] = ["55.1", "44.1"]
# self.assertFalse(blockchain_trace.isBlockchainTraceValid(input), "TransactionFromLatLng type should be invalid")
def test_isBlockchainTraceValid_emptyInput(self):
input = {}
self.assertFalse(blockchain_trace.isBlockchainTraceValid(input), "Empty input should not be accepted")
def test_isBlockchainTraceValid_missingKeys(self):
def test_isBlockchainTraceValid_additionalKeys(self):
input = {
"ApplicationType": "string",
"Metadata": {},
......@@ -57,7 +36,7 @@ class Test_BlockchainTrace(unittest.TestCase):
"TransactionToLatLng": "string",
"TransferredAsset": "string"
}
self.assertFalse(blockchain_trace.isBlockchainTraceValid(input), "Input should not be accepted because timestamp is missing")
self.assertTrue(blockchain_trace.isBlockchainTraceValid(input), "Input should be accepted")
if __name__ == '__main__':
unittest.main()
\ No newline at end of file
......@@ -32,3 +32,51 @@ spec:
image: alexx882/rest-gateway
ports:
- containerPort: 5000
volumeMounts:
- mountPath: /srv/articonf
name: articonf
volumes:
- name: articonf
hostPath:
path: /srv/articonf
type: Directory
---
apiVersion: v1
kind: Service
metadata:
name: rest-gateway-db
spec:
type: LoadBalancer
selector:
app: rest-gateway-db
ports:
- name: http
port: 27017
targetPort: 27017
nodePort: 30402
protocol: TCP
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: rest-gateway-db
spec:
replicas: 1
selector:
matchLabels:
app: rest-gateway-db
template:
metadata:
labels:
app: rest-gateway-db
spec:
containers:
- name: rest-gateway-db
image: mongo
env:
- name: MONGO_INITDB_ROOT_USERNAME
value: root
- name: MONGO_INITDB_ROOT_PASSWORD
value: root
ports:
- containerPort: 27017
# global import, red is normal don't worry
import network_constants
import os
import json
import requests
class TokenManager:
_instance = None
@staticmethod
def getInstance():
''' Static access method. '''
if TokenManager._instance == None:
TokenManager._instance = TokenManager()
return TokenManager._instance
def __init__(self):
self._token = None
def getToken(self) -> str:
if self._token == None:
try:
credentials_path = '../../../modules/security/'
except KeyError:
credentials_path = '/srv/articonf/'
print("Looking for credentials at ... "+str(credentials_path))
with open(f'{credentials_path}regular_user_credentials.json') as file:
credentials = json.loads(file.read())
url = f'https://{network_constants.REST_GATEWAY_HOSTNAME}:{network_constants.REST_GATEWAY_REST_PORT}/api/tokens'
response = requests.post(
url,
verify = False,
proxies = { "http":None, "https":None },
json = credentials
)
data = json.loads(response.text)
self._token = data["token"]
return self._token
\ No newline at end of file
......@@ -13,9 +13,35 @@ basePath: "/api"
# Paths supported by the server application
paths:
/transactions:
delete:
operationId: "routes.transactions.delete_all_transactions"
tags:
- "Transactions"
summary: "Delete all Transactions in the DB"
description: "Delete all Transactions in the DB"
responses:
'200':
description: "Successful Request"
/transactions/use_case/{use_case}:
get:
operationId: "routes.transactions.all_for_use_case"
tags:
- "Transactions"
summary: "Retrieves all Transactions in the given Use-Case"
description: "Retrieves all Transactions in the given Use-Case"
parameters:
- in: path
name: "use_case"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
/debug:
post:
operationId: "rest.debug.echo"
operationId: "routes.debug.echo"
tags:
- "Echo"
summary: "Echo function for debugging purposes"
......@@ -27,12 +53,12 @@ paths:
schema:
type: object
responses:
200:
'200':
description: "Successful echo of request data"
/trace:
post:
operationId: "rest.blockchain_trace.post"
operationId: "routes.blockchain_trace.post"
tags:
- "Blockchain Trace"
summary: "Add a new blockchain trace to SMART"
......@@ -45,19 +71,19 @@ paths:
schema:
$ref: "#/definitions/BlockchainTrace"
responses:
201:
'201':
description: "Successful operation"
400:
'400':
description: "Invalid input"
get:
operationId: "rest.blockchain_trace.get"
operationId: "routes.blockchain_trace.get"
tags:
- "Blockchain Trace"
summary: "Get blockchain traces"
description: "Returns all blockchain traces in the database"
parameters: []
responses:
200:
'200':
description: "Successful operation"
schema:
$ref: "#/definitions/BlockchainTrace"
......
from typing import Dict
class Transaction:
def __init__(self, use_case:str, properties:Dict):
self.use_case = use_case
self.properties = properties
def to_serializable_dict(self):
return {
"use_case": self.use_case,
"id": self.properties["UniqueID"],
"properties": self.properties
}
def id(self):
return self.properties["UniqueID"]
@staticmethod
def from_serializable_dict(data: Dict):
return Transaction(data["use_case"], data["properties"])
\ No newline at end of file
# global imports (dont't worry, red is normal)
import network_constants as netconst
from database.MongoRepositoryBase import MongoRepositoryBase
from database.entities.transaction import Transaction
import pymongo
import json
from typing import List, Dict
class Repository(MongoRepositoryBase):
'''This is a repository for MongoDb.'''
def __init__(self):
super().__init__(netconst.BUSINESS_LOGIC_DB_HOSTNAME,
netconst.BUSINESS_LOGIC_DB_PORT,
'rest-gateway-db')
self._transaction_collection = 'transactions'
def delete_all_transactions(self):
collection = self._database[self._transaction_collection]
collection.delete_many({})
def add_transaction(self, transaction: Transaction):
reference = self.get_transaction_with_id(transaction.id())
if reference == None:
super().insert_entry(self._transaction_collection, transaction.to_serializable_dict())
def all_transactions_for_use_case(self, use_case: str) -> List[Transaction]:
result = super().get_entries(self._transaction_collection, projection={'_id': False}, selection={"use_case": use_case})
return [Transaction.from_serializable_dict(row) for row in list(result)]
def get_transaction_with_id(self, unique_id: str) -> Transaction:
result = list(super().get_entries(self._transaction_collection, projection={'_id': False}, selection={"id": unique_id}))
if len(result) == 1:
return Transaction.from_serializable_dict(result[0])
return None
\ No newline at end of file
import json
from authentication.token_manager import TokenManager
import network_constants
from database.entities.transaction import Transaction
from database.repository import Repository
import json
import hashlib
import logging
import requests
from typing import Dict
LOGGER = logging.getLogger(__name__)
class MessageHandler:
......@@ -12,6 +21,8 @@ class MessageHandler:
_mongo_repo = None
_message_sender = None
_repository = Repository()
def __init__(self, mongo_repo, message_sender):
self._mongo_repo = mongo_repo
self._message_sender = message_sender
......@@ -43,9 +54,84 @@ class MessageHandler:
LOGGER.info(result)
return result
def _resolve_path(self, data: Dict, path:str) -> Dict:
'''
resolves a path without concatenation in a json dictionary
@params
data - Required: Dictionary that is the decoded json string
path - Required: path of multiple keys seperated by "//" and list indizes "[5]"
'''
path_pieces = path.split("//")
value = data
# resolve all pieces of the path in order
for i in range(0,len(path_pieces)):
piece = path_pieces[i]
# is the current path piece in the form attribute[index]?
if piece[-1] == "]":
start = piece.index("[")
# stem ... attribute name
# index ... list index
stem = piece[:start]
index = int(piece[start+1:-1])
value = value[stem][index]
else:
value = value[piece]
return value
def handle_blockchain_transaction(self, transaction):
self._mongo_repo.insert_trace(transaction)
jwt_token = TokenManager.getInstance().getToken()
transaction_data = json.loads(transaction)
use_case = transaction_data["content"]["ApplicationType"]
# query schema information
url = f'https://{network_constants.BUSINESS_LOGIC_HOSTNAME}:{network_constants.BUSINESS_LOGIC_REST_PORT}/api/use-cases/{use_case}/schema'
print(f"CALLING: {url}")
response = requests.get(
url,
verify = False,
proxies = { "http":None, "https":None },
headers = { "Authorization": f"Bearer {jwt_token}"}
)
print(f"RESPONSE: {response.text}")
data = json.loads(response.text)
mappings = data["mappings"]
flattened = {}
# iterate over schema mappings and resolve paths
for mapping in mappings.keys():
full_path = mappings[mapping]
concat_paths = full_path.split("+")
values = []
for path in concat_paths:
values.append(
self._resolve_path(transaction_data["content"], path)
)
if len(values) > 1:
final_value = "".join(values)
else:
final_value = values[0]
flattened[mapping] = final_value
flattened["UniqueID"] = hashlib.sha256(flattened["UniqueID"].encode("utf-8")).hexdigest()
transaction = Transaction(use_case, flattened)
MessageHandler._repository.add_transaction(transaction)
# inform semantic linking microservice
msg = {'type': 'new-traces-available'}
self._message_sender.send_message('datahub', 'semantic-linking', json.dumps(msg))
\ No newline at end of file
self._message_sender.send_message('datahub', 'semantic-linking', json.dumps(transaction.to_serializable_dict()))
\ No newline at end of file
......@@ -2,3 +2,5 @@ from flask import request
def echo():
return request.json
\ No newline at end of file
#global imports
from database.entities.transaction import Transaction
from database.repository import Repository
import json
from flask import Response, request
_repository = Repository()
def all_for_use_case(use_case: str):
transactions = _repository.all_transactions_for_use_case(use_case)
return [t.to_serializable_dict() for t in transactions]
def delete_all_transactions():
_repository.delete_all_transactions()
return Response(status=200)
\ No newline at end of file
# add modules folder to interpreter path
import sys
import os
modules_paths = ['../', '../../../../modules/']
modules_paths = ['./', '../', '../../../../modules/', '../../../modules/']
for path in modules_paths:
if os.path.exists(path):
sys.path.insert(1, path)
......@@ -22,6 +22,27 @@ class Test_MessageHandler(unittest.TestCase):
self.msg_sender = DummyMessageSender.get_instance()
self.handler = MessageHandler(self.repo, self.msg_sender)
def _get_valid_message(self) -> str:
message_values = \
{ 'type': 'blockchain-transaction',
'content':
{
"ApplicationType": "string",
"Metadata": {},
"ResourceIds": "string",
"ResourceMd5": "string",
"ResourceState": "string",
"Timestamp": "2019-08-27T14:00:48.587Z",
"TransactionFrom": "string",
"TransactionFromLatLng": "string",
"TransactionId": "string",
"TransactionTo": "string",
"TransactionToLatLng": "string",
"TransferredAsset": "string"
}
}
return json.dumps(message_values)
def test_handleGeneric_emptyMessage_NotJsonError(self):
res = self.handler.handle_generic('')
self.assertEqual(self.handler.MSG_NOT_JSON, res)
......@@ -60,32 +81,12 @@ class Test_MessageHandler(unittest.TestCase):
res = self.handler.handle_generic(message)
self.assertEqual(self.handler.MSG_NOT_PROCESSED, res)
def _get_valid_message(self) -> str:
message_values = \
{ 'type': 'blockchain-transaction',
'content':
{
"ApplicationType": "string",
"Metadata": {},
"ResourceIds": "string",
"ResourceMd5": "string",
"ResourceState": "string",
"Timestamp": "2019-08-27T14:00:48.587Z",
"TransactionFrom": "string",
"TransactionFromLatLng": "string",
"TransactionId": "string",
"TransactionTo": "string",
"TransactionToLatLng": "string",
"TransferredAsset": "string"
}
}
return json.dumps(message_values)
def test_handleGeneric_correctTraceContent_ProcessedResult(self):
res = self.handler.handle_generic(self._get_valid_message())
self.assertEqual(self.handler.MSG_TRACE_PROCESSED, res)
def test_handleGeneric_correctTraceContent_AddedToRepo(self):
# TODO repo should contain processed datapoint
msg = self._get_valid_message()
_ = self.handler.handle_generic(msg)
......@@ -93,6 +94,7 @@ class Test_MessageHandler(unittest.TestCase):
self.assertEqual(trace, self.repo.last_trace)
def test_handleGeneric_correctTraceContent_NotificationSentCorrectly(self):
# TODO message queue should contain new message with datapoint as content
msg = self._get_valid_message()
_ = self.handler.handle_generic(msg)
......
import unittest
import manage_sys_paths
import json
from messaging.MessageHandler import MessageHandler
class DummyMongoRepo:
'''Dummy class to be used for testing the MessageHandler'''
last_trace = None
def insert_trace(self, trace):
self.last_trace = trace
from messaging.DummyMessageManager import DummyMessageManager as DummyMessageSender
class Test_MessageHandler(unittest.TestCase):
handler = None
repo = None
msg_sender = None
def setUp(self):
self.repo = DummyMongoRepo()
self.msg_sender = DummyMessageSender.get_instance()
self.handler = MessageHandler(self.repo, self.msg_sender)
def _get_valid_message(self) -> str:
message_values = \
{
'type': 'blockchain-transaction',
'content':
{
"ApplicationType": "smart-energy",
"Customer": 13,
"Postcode": 2261,
"Timestamp": "01.07.2012 00:30",
"Solar_Production_kWh": 0.0,
"Energy_Consumption_kWh": 0.23399999999999999,
"Heating_Consumption_kWh": 0.23399999999999999,
"Price_AUD/MWh": 57.04,
"Total_Demand_MWh": 8097.93,
"Latitude": -33.362679,
"Longitude": 151.447302,
}
}
return json.dumps(message_values)
def _get_pizza_message(self) -> str:
message_values = \
{
'type': 'blockchain-transaction',
'content':
{
"ApplicationType": "debug",
"name": "Margherita",
"dough": {
"type": "wheat",
"cheese": False,
},
"sauces": [
{
"name": "tomato"
},
{
"name": "caramel"
}
]
}
}
return json.dumps(message_values)
# def test_handleGeneric_correctTraceContent_NotificationSentCorrectly(self):
# msg = self._get_valid_message()
# _ = self.handler.handle_blockchain_transaction(msg)
def test_HandlePizzaMessage(self):
print("STARTING THE TEST...")
msg = self._get_pizza_message()
_ = self.handler.handle_blockchain_transaction(msg)
if __name__ == '__main__':
unittest.main()
\ No newline at end of file
......@@ -18,3 +18,7 @@ COPY templates/ /app/templates/
RUN chmod a+x main.py
CMD ["python", "./main.py"]
# docker build -t alexx882/hello-articonf .
# docker run --name articonf-home -p 80:5000 -v /srv/articonf:/srv/articonf -d alexx882/hello-articonf
# docker run --name articonf-home-ssl -p 443:5000 -v /srv/articonf:/srv/articonf -d alexx882/hello-articonf
\ No newline at end of file
import os
from flask import Flask, render_template
app = Flask(__name__)
......@@ -6,4 +7,11 @@ app = Flask(__name__)
def hello_world():
return render_template('index.html')
app.run(host='0.0.0.0', port=5000, debug=True)
\ No newline at end of file
# SSL configuration
try:
certificate_path = os.environ['ARTICONF_CERTIFICATE_PATH']
except KeyError:
certificate_path = '/srv/articonf/'
context = (os.path.normpath(f'{certificate_path}/articonf1.crt'), os.path.normpath(f'{certificate_path}/articonf1.key')) # certificate and key files
app.run(host='0.0.0.0', port=5000, debug=False, ssl_context=context)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment