Commit 891abc69 authored by Bogdan's avatar Bogdan

Initial Federated Learning Submit

parent d670155e
FROM python:3
LABEL maintainer="Alexander Lercher"
ENV http_proxy http://proxy.uni-klu.ac.at:3128/
ENV https_proxy http://proxy.uni-klu.ac.at:3128/
RUN apt-get update
EXPOSE 5000
WORKDIR /app
COPY src/participation-hub/federated-learning-microservice/app/requirements.txt /app/
RUN pip install -r requirements.txt
COPY src/modules/ /app/
COPY src/participation-hub/federated-learning-microservice/app/ /app/
RUN chmod a+x main.py
CMD ["python", "./main.py"]
\ No newline at end of file
# Federated Learning Microservice
The business model microservice serves as an interface for the individual use cases. Here, schema information for all use cases is stored to enable context-aware processing.
## Technologies
- Python 3.x
- Docker
- Kubernetes
\ No newline at end of file
swagger: "2.0"
info:
title: Federated Learning microservice
description: This is the documentation for the federated learning microservice.
version: "1.0.0"
consumes:
- "application/json"
produces:
- "application/json"
basePath: "/api"
# Import security definitions from seperate file
securityDefinitions:
$ref: '../security/security.yml#securityDefinitions'
paths:
$ref: 'routes.yml#paths'
\ No newline at end of file
swagger: "2.0"
info:
title: Federated Learning microservice
description: This is the documentation for the federated learning microservice.
version: "1.0.0"
consumes:
- "application/json"
produces:
- "application/json"
basePath: "/api"
# Import security definitions from seperate file
securityDefinitions:
$ref: '../../../../modules/security/security_local.yml#securityDefinitions'
paths:
$ref: 'routes.yml#paths'
\ No newline at end of file
from typing import Dict, List
class LayerAdapter:
'''
represents the mapping from an internal layer with a set of attributes and what
attributes from the dataset correspond to each one
'''
def __init__(self, name: str, use_case: str, table: str, properties: List[str], cluster_properties: List[str]):
'''
Creates a new instance of LayerAdapter
@params:
name - Required : unique identifier for the layer
properties - Required : list of mappings from the Schema
cluster_properties - Required : subset of properties to indicate what properties are used for clustering
use_case - Required : identifier for the use-case this layer belongs to
'''
self.name = name
self.properties = properties
self.use_case = use_case
self.table = table
for prop in cluster_properties:
if prop not in properties:
raise ValueError(f"{prop} is no property in the layer!")
self.cluster_properties = cluster_properties
def add_mapping(self, internal: str):
'''
Add a new mapping to the layer. This mapping consists of an internal representation.
@params:
internal - Required: string identifier used internally, f.e. "startTime"
external - Required: string identifier the column has in the external dataset f.e. "arrivalTimeOfCustomer"
'''
if internal not in self.properties:
self.properties.append(internal)
def delete_mapping(self, internal: str):
'''
Removes a proprety from the layer. Raises a ValueError if the property identified by the internal representation
does not exist.
@params:
internal - Required: string identifier used internally, f.e. "startTime"
'''
if internal not in self.properties:
raise ValueError(f"Attribute {internal} is not an internal attribute!")
if internal in self.cluster_properties:
self.delete_cluster_mapping(internal)
self.properties.remove(internal)
def add_cluster_mapping(self, attribute:str):
'''
Adds an attribute of the internal representations to the set of attributes to cluster after. The set of cluster
attribute is always a subset of self.properties.keys().
@params:
attribute - Required: string identifier used internally, f.e. "startTime"
'''
if attribute not in self.properties:
raise ValueError(f"Attribute {attribute} is not an internal attribute!")
if attribute not in self.cluster_properties:
self.cluster_properties.append(attribute)
def delete_cluster_mapping(self, attribute:str):
'''
Removes an attribute from the set of cluster-attributes. Raises a ValueError if either the attribute does not exist
as an internal representation
@params:
attribute - Required: string identifier used internally, f.e. "startTime"
'''
if attribute not in self.properties:
raise ValueError(f"Attribute {attribute} is not an internal attribute!")
self.cluster_properties.remove(attribute)
def to_serializable_dict(self) -> Dict:
return {
"name": self.name,
"properties": self.properties,
"cluster_properties": self.cluster_properties,
"use_case": self.use_case,
"table": self.table
}
@staticmethod
def from_serializable_dict(user_dict: Dict):
'''
creates a layer object from a dictionary. has to have the following keys:
- name
- properties
- cluster_properties
- use_case
'''
return LayerAdapter(
user_dict["name"],
user_dict["use_case"],
user_dict["table"],
user_dict["properties"],
user_dict["cluster_properties"]
)
from typing import Dict
class Table:
def __init__(self, use_case: str, name: str, mappings: Dict[str, str]):
self.use_case = use_case
self.name = name
self.mappings = mappings
def to_serializable_dict(self) -> Dict:
return {
"name": self.name,
"use_case": self.use_case,
"mappings": self.mappings
}
def add_mapping(self, internal:str, external:str):
if internal not in self.mappings.keys():
self.mappings[internal] = external
@staticmethod
def from_serializable_dict(data: Dict):
return Table(
data["use_case"],
data["name"],
data["mappings"]
)
from typing import Dict, List
class UseCase:
'''
represents the mapping from an internal layer with a set of attributes and what
attributes from the dataset correspond to each one
'''
def __init__(self, name: str):
'''
Creates a new instance of LayerAdapter
@params:
name - Required : unique identifier for the layer
properties - Required : maps InternalPropertyName->DatasetPropertyName
cluster_properties - Required : subset of the keys of properties, marks properties to cluster with
use_case - Required : identifier for the use-case this layer belongs to
'''
self.name = name
def to_serializable_dict(self) -> Dict:
return {
"name": self.name,
}
@staticmethod
def from_serializable_dict(user_dict: Dict):
'''
creates a layer object from a dictionary. has to have the following keys:
- name
- properties
- cluster_properties
- use_case
'''
return UseCase(
user_dict["name"],
)
# global imports (dont't worry, red is normal)
import network_constants as netconst
from database.MongoRepositoryBase import MongoRepositoryBase
from db.entities.layer_adapter import LayerAdapter
from db.entities.use_case import UseCase
import pymongo
import json
from typing import List, Dict
class Repository(MongoRepositoryBase):
'''This is a repository for MongoDb.'''
def __init__(self):
super().__init__(netconst.BUSINESS_LOGIC_DB_HOSTNAME,
netconst.BUSINESS_LOGIC_DB_PORT,
'business-logic-db')
self._adapter_collection = 'layer_adapters'
self._use_case_collection = 'use_cases'
def all(self) -> List[Dict]:
result = super().get_entries(self._adapter_collection, projection={'_id': False})
return [LayerAdapter.from_serializable_dict(row) for row in list(result)]
def all_for_use_case(self, use_case: str) -> List[LayerAdapter]:
result = super().get_entries(self._adapter_collection, projection={'_id': False}, selection={"use_case": use_case})
return [LayerAdapter.from_serializable_dict(row) for row in list(result)]
def all_for_use_case_and_table(self, use_case: str, table: str) -> List[LayerAdapter]:
result = super().get_entries(self._adapter_collection, projection={'_id': False}, selection={"use_case": use_case, "table": table})
return [LayerAdapter.from_serializable_dict(row) for row in list(result)]
def one(self, name : str, use_case: str, table: str) -> LayerAdapter:
result = list(super().get_entries(self._adapter_collection, selection={"name": name, "use_case": use_case, "table": table}))
if len(result) == 1:
return LayerAdapter.from_serializable_dict(result[0])
return None
def delete_all(self):
collection = self._database[self._adapter_collection]
collection.delete_many({})
def add(self, adapter : LayerAdapter):
super().insert_entry(self._adapter_collection, adapter.to_serializable_dict())
def update_use_case(self, adapter : LayerAdapter, use_case: str):
collection = self._database[self._adapter_collection]
collection.update_one({"name":adapter.name, "use_case": use_case, "table": adapter.table}, {"$set": adapter.to_serializable_dict()})
def update(self, adapter : LayerAdapter):
collection = self._database[self._adapter_collection]
collection.update_one({"name":adapter.name, "use_case": adapter.use_case, "table": adapter.table}, {"$set": adapter.to_serializable_dict()})
def delete(self, adapter : LayerAdapter):
collection = self._database[self._adapter_collection]
collection.delete_many({"name": adapter.name, "use_case": adapter.use_case, "table": adapter.table})
\ No newline at end of file
# global imports (dont't worry, red is normal)
import network_constants as netconst
from database.MongoRepositoryBase import MongoRepositoryBase
from db.entities.table import Table
from typing import Dict, List
class TableRepository(MongoRepositoryBase):
def __init__(self):
super().__init__(netconst.BUSINESS_LOGIC_DB_HOSTNAME,
netconst.BUSINESS_LOGIC_DB_PORT,
'business-logic-db')
self._collection = 'tables'
def get_all(self) -> List[Table]:
result = super().get_entries(self._collection, projection={'_id': False})
return [Table.from_serializable_dict(row) for row in list(result)]
def get_all_for_use_case(self, use_case:str) -> List[Table]:
result = super().get_entries(self._collection, selection={'use_case': use_case}, projection={'_id': False})
return [Table.from_serializable_dict(row) for row in result]
def get_for_use_case_and_name(self, use_case:str, name:str) -> Table:
result = list(super().get_entries(self._collection, selection={"use_case":use_case, "name":name}, projection={'_id': False}))
if len(result) == 1:
return Table.from_serializable_dict(result[0])
if len(result) == 0:
return None
raise ValueError("No more than 1 Schema allowed per use-case!")
def add(self, table:Table):
super().insert_entry(self._collection, table.to_serializable_dict())
def delete_for_use_case(self, use_case: str):
collection = self._database[self._collection]
collection.delete_many({"use_case": use_case})
def delete_all(self):
collection = self._database[self._collection]
collection.delete_many({})
def update(self, table: Table):
collection = self._database[self._collection]
collection.update_one({"use_case": table.use_case, "name": table.name}, {"$set": table.to_serializable_dict()})
\ No newline at end of file
# global imports (dont't worry, red is normal)
import network_constants as netconst
from database.MongoRepositoryBase import MongoRepositoryBase
from db.entities.use_case import UseCase
import pymongo
import json
from typing import List, Dict
class UseCaseRepository(MongoRepositoryBase):
'''This is a repository for MongoDb.'''
def __init__(self):
super().__init__(netconst.BUSINESS_LOGIC_DB_HOSTNAME,
netconst.BUSINESS_LOGIC_DB_PORT,
'business-logic-db')
self._use_case_collection = 'use_cases'
def all(self) -> List[UseCase]:
dicts = list(super().get_entries(self._use_case_collection, projection={'_id': False}))
return [UseCase.from_serializable_dict(d) for d in dicts]
def delete_all(self):
collection = self._database[self._use_case_collection]
collection.delete_many({})
def delete_all_with_name(self, name: str):
collection = self._database[self._use_case_collection]
collection.delete_many({"name": name})
def get_by_name(self, name: str):
result = list(super().get_entries(self._use_case_collection, {"name":name}))
if len(result) == 1:
return UseCase.from_serializable_dict(result[0])
if len(result) == 0:
return None
raise ValueError("More than one Use-Case in the DB!")
def put(self, use_case_name: str):
use_cases = self.all()
existing_use_cases = list(filter(lambda use_case: use_case.name == use_case_name, use_cases))
if len(existing_use_cases) == 0:
use_case = UseCase(use_case_name)
super().insert_entry(self._use_case_collection, use_case.to_serializable_dict())
\ No newline at end of file
from flask import request
def echo():
return request.json
\ No newline at end of file
# add modules folder to interpreter path
import sys
import os
from pathlib import Path
from typing import Dict, Any
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
# load swagger config
import connexion
from security import swagger_util
from env_info import is_running_locally, get_resources_path
from flask import request
from flask import redirect
app = connexion.App(__name__, specification_dir='configs/')
from db.entities.layer_adapter import LayerAdapter
@app.app.before_request
def before_request():
if request.url.startswith('http://'):
url = request.url.replace('http://', 'https://', 1)
code = 301
return redirect(url, code=code)
@app.route('/', methods=['GET'])
def api_root():
return redirect('/api/ui')
# SSL configuration
certificate_path = get_resources_path()
context = (os.path.normpath(f'{certificate_path}/articonf1.crt'), os.path.normpath(f'{certificate_path}/articonf1.key')) # certificate and key files
if is_running_locally():
print("Running locally...")
app.add_api(swagger_util.get_bundled_specs(Path("configs/swagger_local.yml")),
resolver = connexion.RestyResolver("cms_rest_api"))
else:
app.add_api(swagger_util.get_bundled_specs(Path("configs/swagger.yml")),
resolver = connexion.RestyResolver("cms_rest_api"))
# start app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=False, ssl_context=context)
astroid==2.4.2
attrs==19.3.0
autopep8==1.5.4
certifi==2020.6.20
cffi==1.14.2
chardet==3.0.4
click==7.1.2
clickclick==1.2.2
colorama==0.4.3
connexion==2.7.0
coverage==5.3.1
cryptography==3.1
Flask==1.1.2
idna==2.10
importlib-metadata==1.7.0
inflection==0.5.0
isort==4.3.21
itsdangerous==1.1.0
Jinja2==2.11.2
jsonschema==3.2.0
lazy-object-proxy==1.4.3
MarkupSafe==1.1.1
mccabe==0.6.1
openapi-spec-validator==0.2.9
prance==0.19.0
pycodestyle==2.6.0
pycparser==2.20
pylint==2.5.3
pymongo==3.11.0
pyrsistent==0.16.0
PyYAML==5.3.1
requests==2.24.0
rope==0.17.0
semver==2.10.2
six==1.15.0
swagger-ui-bundle==0.0.8
toml==0.10.1
typed-ast==1.4.1
urllib3==1.25.10
Werkzeug==1.0.1
wrapt==1.12.1
zipp==3.1.0
#global imports
from db.entities.layer_adapter import LayerAdapter
from db.repository import Repository
from db.table_repository import TableRepository
from db.use_case_repository import UseCaseRepository
from services.layer_adapter_service import LayerAdapterService
import json
from flask import Response, request
table_repository = TableRepository()
layer_repository = Repository()
use_case_repository = UseCaseRepository()
def all():
return [layer.to_serializable_dict() for layer in layer_repository.all()]
def delete_all_layers():
'''
delete all layers from the DB
'''
layer_repository.delete_all()
return Response(status=200)
def add_complete():
'''
add a layer already containing attribute mappings to the BD.
'''
data = request.json
if "name" not in data or "properties" not in data or "cluster_properties" not in data or "use_case" not in data or "table" not in data:
return Response(status=400, response=f"Field missing! Fields required: (name, properties, cluster_properties, table), present:({data.keys()})")
use_case = data["use_case"]
table_name = data["table"]
layer_name = data["name"]
table = table_repository.get_for_use_case_and_name(use_case, table_name)
if table == None:
return Response(status=400, response="Table does not exist!")
layer = layer_repository.one(layer_name, use_case, table.name)
if layer != None:
return Response(status=400, response=f'Layer with name "{data["name"]}" already exists!')
# check if table contains mapping
for prop in data["properties"]:
if prop not in table.mappings.keys():
return Response(status=400, response=f"'{prop}' is no property of the Table!")
try:
layer_new = LayerAdapter.from_serializable_dict(data)
except BaseException as e:
print(f"Exception: {e}")
return Response(status=400, response=f"{e}")
try:
LayerAdapterService.add_complete(layer_new)
except ValueError as e:
return Response(status = 400, response=f"{e}")
return Response(status=200)
def add_cluster_mapping(use_case: str, table: str, name: str):
'''
add a mapped property to the list of properties to cluster with
@params:
use_case - Required : String-identifier for the Use-Case the Layer belongs to
table - Required : unique identifier of the Table the Layer belongs to
name - Required : unique identifier for the Layer
'''
layer = layer_repository.one(name, use_case, table)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
data = request.json
if "attribute" not in data:
return Response(status=400, response=f"Field missing! Fields required: (attribute)")
try:
layer.add_cluster_mapping(data["attribute"])
layer_repository.update(layer)
return Response(status=200)
except:
return Response(status=400, response=f'{data["attribute"]} is no attribute in the layer!')
def delete_cluster_mapping(use_case: str, table: str, name: str):
'''
remove a mapped property from the list of properties to cluster with
@params:
use_case - Required : String-identifier for the Use-Case the Layer belongs to
table - Required : unique identifier of the Table the Layer belongs to
name - Required : unique identifier for the Layer
'''
layer = layer_repository.one(name, use_case, table)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
data = request.json
if "attribute" not in data:
return Response(status=400, response=f"Field missing! Fields required: (attribute)")
try:
layer.delete_cluster_mapping(data["attribute"])
layer_repository.update(layer)
return Response(status=200)
except ValueError as e:
print(e)
return Response(status=400, response=f'{data["attribute"]} is no attribute in the layer!')
def get_all_for_use_case(use_case: str):
'''
get all layers assigned to the given use_case
'''
use_case_repository.put(use_case)
return [layer.to_serializable_dict() for layer in layer_repository.all_for_use_case(use_case)]
def one(use_case: str, table: str, name: str):
'''
fetch a single layer from the DB
@params:
use_case - Required : String-identifier for the Use-Case the Layer belongs to
table - Required : unique identifier of the Table the Layer belongs to
name - Required : unique identifier for the Layer
'''
layer = layer_repository.one(name, use_case, table)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
return Response(status=200, response=json.dumps(layer.to_serializable_dict()))
def delete_mapping(use_case: str, table: str, name: str):
'''
delete a mapping from the layer identified by the internal representation
@params:
use_case - Required : String-identifier for the Use-Case the Layer belongs to
table - Required : unique identifier of the Table the Layer belongs to
name - Required : unique identifier for the Layer
'''
use_case_repository.put(use_case)
layer = layer_repository.one(name, use_case, table)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
data = request.json
if "internal" not in data:
return Response(status=400, response=f"Field missing! Fields required: (internal)")
try:
layer.delete_mapping(data["internal"])
layer_repository.update(layer)
except ValueError:
return Response(status=400, response=f'{data["internal"]} is not a property of the layer!')
return Response(status=200)
def add_mapping(name: str, table: str, use_case: str):
'''
add a new mapping to the layer identified by name
@params:
use_case - Required : String-identifier for the Use-Case the Layer belongs to
table - Required : unique identifier of the Table the Layer belongs to
name - Required : unique identifier for the Layer
'''
use_case_repository.put(use_case)
layer = layer_repository.one(name, use_case)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
data = request.json
if "internal" not in data:
return Response(status=400, response=f"Field missing! Fields required: (internal)")
# check if schema contains mapping
table = table_repository.get_for_use_case_and_name(use_case, table)
if table == None:
return Response(status=400, response=f'Table does not exist.')
if data["internal"] not in table.mappings:
return Response(status=400, response=f'{data["internal"]} is not existent in the table!')
layer.add_mapping(data["internal"])
layer_repository.update(layer)
return Response(status=200)
def delete_one(use_case: str, table: str, name: str):
'''
delete a layer and all its mappings from the Db
@params:
use_case - Required : String-identifier for the Use-Case the Layer belongs to
table - Required : unique identifier of the Table the Layer belongs to
name - Required : unique identifier for the Layer
'''
layer = layer_repository.one(name, use_case, table)
if layer == None:
return Response(status=404, response=f"Layer with name '{name}' does not exist!")
layer_repository.delete(layer)
return Response(status=200)
def get_layers_for_use_case_and_table(use_case: str, table: str):
return [layer.to_serializable_dict() for layer in layer_repository.all_for_use_case_and_table(use_case, table)]
\ No newline at end of file
#global imports
from db.table_repository import TableRepository
from db.use_case_repository import UseCaseRepository
from db.entities.table import Table
from flask import Response, request
table_repository = TableRepository()
use_case_repository = UseCaseRepository()
def all():
return [t.to_serializable_dict() for t in table_repository.get_all()]
def all_for_use_case(use_case:str):
return [t.to_serializable_dict() for t in table_repository.get_all_for_use_case(use_case)]
def put_mapping(use_case:str, name:str):
body = request.json
if "internal" not in body or "external" not in body:
return Response(status=400, response=f"Field missing! Fields required: (internal, external)")
table = table_repository.get_for_use_case_and_name(use_case, name)
if table == None:
print("table not there, creating it...")
table = table_repository.add(Table(use_case, name, {}))
table.add_mapping(body["internal"], body["external"])
table_repository.update(table)
return Response(status=200)
def add_complete(use_case: str):
body = request.json
body["use_case"] = use_case
# check if fields are present
if "name" not in body or "mappings" not in body:
return Response(status=400, response="Field missing! Fields required: (name, mappings)")
# check if table exists
table_reference = table_repository.get_for_use_case_and_name(body["use_case"], body["name"])
if table_reference != None:
return Response(status=400, response="Table already exists!")
use_case_repository.put(body["use_case"])
table_new = Table.from_serializable_dict(body)
table_repository.add(table_new)
return Response(status=200)
def delete_all_for_use_case(use_case: str):
table_repository.delete_for_use_case(use_case)
return Response(status=200)
def delete_all():
table_repository.delete_all()
return Response(status=200)
\ No newline at end of file
# global imports
from db.entities.layer_adapter import LayerAdapter
from db.use_case_repository import UseCaseRepository
from db.table_repository import TableRepository
from db.repository import Repository
from services.layer_adapter_service import LayerAdapterService
import json
from flask import Response, request
use_case_repository = UseCaseRepository()
table_repository = TableRepository()
repository = Repository()
def all():
return [use_case.to_serializable_dict() for use_case in use_case_repository.all()]
def delete_all():
use_case_repository.delete_all()
repository.delete_all()
table_repository.delete_all()
return Response(status=200)
def add():
body = request.json
if "name" not in body.keys():
return Response(status=400, response="Field missing! Fields required: (name)")
name = body["name"]
# check if use-case exists
reference = use_case_repository.get_by_name(name)
if not reference == None:
return Response(status=400, response="Use-Case already exists!")
use_case_repository.put(name)
return Response(status=200)
#global imports
from db.repository import Repository
from db.table_repository import TableRepository
from db.use_case_repository import UseCaseRepository
from db.entities.layer_adapter import LayerAdapter
from db.entities.table import Table
from typing import List
class LayerAdapterService:
_table_repository = TableRepository()
_layer_repository = Repository()
_use_case_repository = UseCaseRepository()
@staticmethod
def check_layer(layer: LayerAdapter):
'''
checks if the given layer has correct mappings regarding the schema of the use_case
'''
# TODO implement with tables
# schema = LayerAdapterService._schema_repository.put(layer.use_case)
# for p in layer.properties:
# if p not in schema.mappings:
# raise ValueError(f'{p} is not existent in the schema!')
@staticmethod
def add_complete(layer: LayerAdapter):
'''
Add a new layer to the DB. Attribute mappings and cluster attributes of the given layer
are used. Before inserting, the layer gets checked for consistency with the schema.
@params:
layer - Required : layer object holding correct data
'''
LayerAdapterService.check_layer(layer)
LayerAdapterService._layer_repository.add(layer)
@staticmethod
def delete_all_use_cases():
# TODO
LayerAdapterService._layer_repository.delete_all_use_cases()
LayerAdapterService._table_repository.delete_all()
LayerAdapterService._use_case_repository.delete_all()
\ No newline at end of file
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
#####################################
### Don't include for test report ###
#####################################
try:
class TestCoverage(unittest.TestCase):
def test_init_main(self):
try:
# python -m unittest discover
# add modules folder to interpreter path
import sys
import os
from pathlib import Path
from typing import Dict, Any
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
# load swagger config
import connexion
from security import swagger_util
from env_info import is_running_locally, get_resources_path
from flask import request
from flask import redirect
app = connexion.App(__name__, specification_dir='configs/')
from db.entities.layer_adapter import LayerAdapter
except Exception as e:
print ("Exception found:")
print (e)
try:
import main
except Exception as e:
print ("Exception found:")
print (e)
def test_db_main(self):
try:
import network_constants as netconst
from database.MongoRepositoryBase import MongoRepositoryBase
from db.entities import layer_adapter
from db.entities import table
from db.entities import use_case
import pymongo
import json
from typing import List, Dict
except Exception as e:
print ("Exception found:")
print (e)
try:
from db import repository
from db import table_repository
from db import use_case_repository
except Exception as e:
print ("Exception found:")
print (e)
def test_routes(self):
try:
from routes import layer
except Exception as e:
print ("Exception found:")
print (e)
try:
from routes import tables
except Exception as e:
print ("Exception found:")
print (e)
try:
from routes import use_case
except Exception as e:
print ("Exception found:")
print (e)
def test_services(self):
try:
from services import layer_adapter_service
except Exception as e:
print ("Exception found:")
print (e)
def test_use_case_scripts(self):
try:
import network_constants as nc
from security.token_manager import TokenManager
import requests
from typing import List
from _add_use_case_scripts import requestPost
except Exception as e:
print ("Exception found:")
print (e)
#######
#from _add_use_case_scripts.bank-app import add_bank_app_schema ##eror not importing? invalid folder name?
#from _add_use_case_scripts.bank-app.tables import add_bank_app_schema
try:
from _add_use_case_scripts.car_sharing import add_carsharing_schema
from _add_use_case_scripts.car_sharing.tables import add_car
from _add_use_case_scripts.car_sharing.tables import add_hash
from _add_use_case_scripts.car_sharing.tables import add_offer
from _add_use_case_scripts.car_sharing.tables import add_publication
from _add_use_case_scripts.car_sharing.tables import add_travel
from _add_use_case_scripts.car_sharing.tables import add_user
except Exception as e:
print ("Exception found:")
print (e)
try:
from _add_use_case_scripts.crowd_journalism import add_crowdjournalism_schema
from _add_use_case_scripts.crowd_journalism.tables import add_classification
from _add_use_case_scripts.crowd_journalism.tables import add_event
from _add_use_case_scripts.crowd_journalism.tables import add_purchase
from _add_use_case_scripts.crowd_journalism.tables import add_tag
from _add_use_case_scripts.crowd_journalism.tables import add_video
except Exception as e:
print ("Exception found:")
print (e)
try:
from _add_use_case_scripts.debug import add_debug_schema
from _add_use_case_scripts.debug.tables import add_pizza_table
except Exception as e:
print ("Exception found:")
print (e)
#from _add_use_case_scripts.smart-energy import add_smart_energy_schema
#from _add_use_case_scripts.smart-energy.tables import add_smart_energy
try:
from _add_use_case_scripts.vialog import add_vialog_schema
from _add_use_case_scripts.vialog.tables import add_user
from _add_use_case_scripts.vialog.tables import add_video
except Exception as e:
print ("Exception found:")
print (e)
if __name__ == '__main__':
unittest.main()
except Exception as e:
print ("Exception found:")
print (e)
\ No newline at end of file
# add modules folder to interpreter path
import sys
import os
modules_paths = ['./', '../', '../../../modules/']
for path in modules_paths:
if os.path.exists(path):
sys.path.insert(1, path)
import unittest
import manage_sys_paths
from db.entities.layer_adapter import LayerAdapter
class Test_Layer_Adapter(unittest.TestCase):
def test_LayerAdapter_newLayerAdapterObj_validInstantiation(self):
adapter1 = LayerAdapter("layer1", "use_case", "table", ["a", "c"], ["a"])
print(adapter1.to_serializable_dict)
if __name__ == '__main__':
unittest.main()
apiVersion: v1
kind: Service
metadata:
name: federated-learning
spec:
type: LoadBalancer
selector:
app: federated-learning
ports:
- name: http
port: 80
targetPort: 5000
nodePort: 30422
protocol: TCP
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: federated-learning
spec:
replicas: 1
selector:
matchLabels:
app: federated-learning
template:
metadata:
labels:
app: federated-learning
spec:
containers:
- name: federated-learning
image: alexx882/federated-learning-microservice
ports:
- containerPort: 5000
volumeMounts:
- mountPath: /srv/articonf
name: articonf
volumes:
- name: articonf
hostPath:
path: /srv/articonf
type: Directory
---
apiVersion: v1
kind: Service
metadata:
name: federated-learning-db
spec:
type: LoadBalancer
selector:
app: federated-learning-db
ports:
- name: http
port: 27017
targetPort: 27017
nodePort: 30423
protocol: TCP
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: federated-learning-db
spec:
replicas: 1
selector:
matchLabels:
app: federated-learning-db
template:
metadata:
labels:
app: federated-learning-db
spec:
containers:
- name: federated-learning-db
image: mongo
env:
- name: MONGO_INITDB_ROOT_USERNAME
value: root
- name: MONGO_INITDB_ROOT_PASSWORD
value: root
ports:
- containerPort: 27017
volumeMounts:
- mountPath: /data/db
name: dbdata
volumes:
- name: dbdata
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment