Commit b5c28936 authored by Manuel's avatar Manuel

Merge branch 'develop' into feature/dashboard

parents cf1c7d37 72b17cf4
......@@ -23,3 +23,5 @@ src/dashboard/.dart_tool/
src/dashboard/build/61d113a1f91ed254ee3636485549491e/
src/dashboard/build/
reports/
......@@ -61,7 +61,7 @@ def delete_deployment(name) -> int:
if __name__ == '__main__':
deployment_file_paths = []
for p, _, f in os.walk('./'):
for p, _, f in os.walk('./src/'):
for file in f:
if 'deployment.yml' == file:
deployment_file_paths.append(os.path.normpath(p))
......
......@@ -2,7 +2,8 @@ import os
import sys
import importlib.util
import pathlib
import shutil
import re
'''
This script searches for all 'tests/' directories and executes all tests
by cd'ing into the dir and executing unittest discover.
......@@ -12,29 +13,107 @@ Use command line argument '-w' to run on windows.
PY = sys.argv[2] if (len(sys.argv) > 1 and sys.argv[1] == '-py') else 'python3.7' # use -py to use your own python command
ROOT = pathlib.Path(__file__).parent.parent.absolute()
REPORTS = ROOT / 'reports'
TESTS_FOLDER_NAME = os.path.normpath("/tests")
print("Creating VENV")
os.system(f"{PY} -m venv venv")
PY = f"~/smart/venv/bin/{PY}"
print("\nSearching for tests at the path: "+ str(ROOT))
count = 0
resultCodeList = []
microservice_coverage_paths_set = set()
for (dirname, dirs, files) in os.walk(ROOT):
#I assume all the tests are placed in a folder named "tests"
if (TESTS_FOLDER_NAME in str(dirname)) \
and 'src' in str(dirname) \
and not(f"{TESTS_FOLDER_NAME}{os.path.normpath('/')}" in str(dirname)) \
and not("venv" in str(dirname)):
and not("venv" in str(dirname)) \
and not("Lib" in str(dirname)):
try:
print(f"Executing tests in {dirname}")
os.chdir(os.path.normpath(dirname))
# TODO do this during docker image setup
exit_val = os.system(f"{PY} -m pip install -r ../requirements.txt") # install pip dependencies
exit_val = os.system(f"{PY} -m unittest discover") # execute the tests
#resultCodeList.append(exit_val)
#exit_val = os.system(f"{PY} -m unittest discover") # execute the tests
exit_val = os.system(f"{PY} -m coverage run --append --omit=*/site-packages*,*/dist-packages* -m unittest discover") #TEST CODE COVERAGE
microservice_coverage_paths_set.add(os.path.normpath(dirname))
resultCodeList.append(exit_val) #once per folder i.e if 3 tests are in a folder and crash, there will be just one exit val
except Exception as e:
print(e)
continue
try:
cur_dir = pathlib.Path(os.path.normpath(dirname)).parent.absolute()
filename_regular_expresion = re.compile('(test_.*)|(TEST_.*)')
for filename in os.listdir(cur_dir):
if filename_regular_expresion.match(filename):
#gets here only if there is a test file which matches the regular expression in the app folder,
#cur_dir = os.path(dirname).parent()
os.chdir(cur_dir)
print(f"Executing coverage test in {cur_dir}")
exit_val = os.system(f"{PY} -m coverage run --append --omit=*/site-packages* -m unittest discover")
microservice_coverage_paths_set.add(os.path.normpath(cur_dir))
except Exception as e:
print(e)
continue
#CHANGE FOLDER TO REPORTS, in order to combine the coverage
try:
if not os.path.exists(REPORTS):
os.makedirs(REPORTS)
except:
pass
try:
os.chdir(REPORTS)
target = REPORTS
target = os.path.normpath( str(target) + f'/.coverage' )
os.remove(target) #Try to Remove old coverage file, if exists
except Exception as e:
pass
print("Combinging coverages")
counter = 0
for path in microservice_coverage_paths_set:
try:
path += '/.coverage'
original = os.path.normpath( path )
target = REPORTS
target = os.path.normpath( str(target) + f'/.coverage.{counter}' )
counter += 1
shutil.copyfile(original,target) #copy new generated coverage files
os.remove(original)
except Exception as e:
print(e)
continue
try:
coverage_xml_path = os.path.normpath( str(REPORTS) + '/coverage.xml')
os.remove(coverage_xml_path)
#coverage_html_path = os.path.normpath( str(REPORTS) + '/htmlcov' )
#os.rmdir(coverage_html_path)
except Exception as e:
print(e)
print("Generating Combined report")
os.system(f"{PY} -m coverage combine")
os.system(f"{PY} -m coverage xml")
os.system(f"{PY} -m coverage html") #if you want to generate the html as well
firstError = -1
i = 0
......@@ -48,4 +127,5 @@ while i < len(resultCodeList):
if(firstError<0): #no errors found
sys.exit(0)
else:
sys.exit(1) #return code>0
\ No newline at end of file
sys.exit(1) #return code>0
......@@ -24,6 +24,7 @@ This token is used for authentication as _regular user_ on all microservices cur
```
{
"ApplicationType": "use-case identifier as string",
"docType": "use-case-table identifier as string",
"key": "value",
...
}
......@@ -91,4 +92,4 @@ Returns the computed similarity. Two clusters belonging to the SAME layer will b
## Connected Cluster
Intermediary data-structure used only by the function which computes the similarity. Clusters are connected only to other clusters belonging to a DIFFERENT layer.
```GET https://articonf1.itec.aau.at:30103/api/use_cases/{use_case}/tables{table}/connectedClusters``` returns all connected clusters for the given use-case and table.
```GET https://articonf1.itec.aau.at:30103/api/use_cases/{use_case}/tables/{table}/connectedClusters``` returns all connected clusters for the given use-case and table.
FROM python:3
LABEL maintainer="Alexander Lercher"
ENV http_proxy http://proxy.uni-klu.ac.at:3128/
ENV https_proxy http://proxy.uni-klu.ac.at:3128/
LABEL maintainer="Manuel Herold"
RUN apt-get update
RUN pip install flask
......
FROM python:3
LABEL maintainer="Alexander Lercher"
ENV http_proxy http://proxy.uni-klu.ac.at:3128/
ENV https_proxy http://proxy.uni-klu.ac.at:3128/
RUN apt-get update
RUN pip install flask
RUN pip install connexion[swagger-ui]
......
FROM python:3
LABEL maintainer="Alexander Lercher"
ENV http_proxy http://proxy.uni-klu.ac.at:3128/
ENV https_proxy http://proxy.uni-klu.ac.at:3128/
LABEL maintainer="Manuel Herold"
RUN apt-get update
RUN pip install flask
......
FROM python:3
LABEL maintainer="Alexander Lercher"
ENV http_proxy http://proxy.uni-klu.ac.at:3128/
ENV https_proxy http://proxy.uni-klu.ac.at:3128/
RUN apt-get update
EXPOSE 5000
......
......@@ -18,9 +18,11 @@ from pathlib import Path
from env_info import is_running_locally, get_resources_path
from flask import request
from flask import redirect
from flask_cors import CORS
# load swagger config
app = connexion.App(__name__, specification_dir='configs/')
CORS(app.app)
@app.app.before_request
def before_request():
......
......@@ -8,11 +8,13 @@ Click==7.0
clickclick==1.2.2
colorama==0.4.3
connexion==2.6.0
coverage==5.3.1
cryptography==3.1
cycler==0.10.0
decorator==4.4.1
Deprecated==1.2.7
Flask==1.1.1
Flask-Cors==3.0.10
idna==2.8
importlib-metadata==1.5.0
inflection==0.3.1
......
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
#####################################
### Don't include for test report ###
#####################################
try:
class TestCoverage(unittest.TestCase):
def test_init_main(self):
try:
# python -m unittest discover
from db.entities import Cluster
from datetime import date, datetime
import json
# add modules folder to interpreter path
import sys
import os
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
### init logging ###
import logging
LOG_FORMAT = ('%(levelname) -5s %(asctime)s %(name)s:%(funcName) -35s %(lineno) -5d: %(message)s')
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
LOGGER = logging.getLogger(__name__)
#############################
import connexion
from security import swagger_util
from pathlib import Path
from env_info import is_running_locally, get_resources_path
from flask import request
from flask import redirect
except Exception as e:
print ("Exception found:")
print (e)
try:
import main #error when importing main, ModuleNotFoundError: No module named 'security'
#exec(open('main.py').read())
except Exception as e:
print ("Exception found:")
print (e)
def test_init_run_clustering(self):
try:
import sys
import os
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
import json
from db.entities import Layer, Cluster
from typing import List, Dict, Tuple, Any
from db.repository import Repository
from processing.clustering import Clusterer, ClusterResult
except Exception as e:
print ("Exception found:")
print (e)
try:
import run_clustering
except Exception as e:
print ("Exception found:")
print (e)
def test_init_run_node(self):
try:
import sys
import os
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
import json
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
except Exception as e:
print ("Exception found:")
print (e)
try:
import processing.fetching.fetching as f
import run_node_fetching
except Exception as e:
print ("Exception found:")
print (e)
def test_init_run_similarity(self):
try:
import processing.similarityFiles.similarityMain as SimilarityCalc
from db.repository import Repository
import run_similarity_calc
except Exception as e:
print ("Exception found:")
print (e)
def test_init_run_time(self):
try:
import sys
import os
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
import json
from datetime import datetime, date
from db.repository import Repository
from db.entities import ClusterSet, Cluster, Layer, TimeSlice
from typing import Tuple, Dict, Any, List
except Exception as e:
print ("Exception found:")
print (e)
try:
import run_time_slicing
except Exception as e:
print ("Exception found:")
print (e)
if __name__ == '__main__':
unittest.main()
except Exception as e:
print ("Exception found:")
print (e)
\ No newline at end of file
import unittest
import sys
for path in ['../', './']:
......@@ -19,6 +20,5 @@ class TestCluster(unittest.TestCase):
self.assertEqual(1, c.cluster_label)
self.assertEqual([1, 2, 3], c.nodes)
if __name__ == '__main__':
unittest.main()
......@@ -11,7 +11,9 @@ class TestClusterResult(unittest.TestCase):
converter:ClusterResultConverter = None
def setUp(self):
self.converter = ClusterResultConverter()
def test_result_undefined_feature(self):
cluster_groups = self._get_some_cluster_groups_1d()
......@@ -29,11 +31,13 @@ class TestClusterResult(unittest.TestCase):
cluster_groups=cluster_groups,
features=['v']
)
self.assert_correct_cluster_result_len(cluster_groups, cluster_res)
self.assert_correct_cluster_result_labels(['-1.0 -- 1.0','10.0 -- 11.0','2.0 -- 2.0'], cluster_res)
def test_result_2d_features(self):
cluster_groups = self._get_some_cluster_groups_2d()
cluster_res = self.converter.convert_to_cluster_results(
cluster_groups=cluster_groups,
......@@ -42,6 +46,7 @@ class TestClusterResult(unittest.TestCase):
self.assert_correct_cluster_result_len(cluster_groups, cluster_res)
self.assert_correct_cluster_result_labels([str((0.0,0.0)), str((10.5,10.5)), str((2.0,2.0)), str((3.0,6.0))], cluster_res)
#region Custom Assertions
......@@ -52,6 +57,7 @@ class TestClusterResult(unittest.TestCase):
self.assertEqual(len(expected[i]), len(actual[i].nodes))
self.assertEqual(expected[i], actual[i].nodes)
def assert_correct_cluster_result_labels(self, expected: List[str], actual: Dict[Any, ClusterResult]):
self.assertEqual(len(expected), len(actual))
for i in range(len(expected)):
......
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
# python -m unittest discover
from processing.clustering import Clusterer, ClusterResult
import numpy as np
......@@ -187,8 +190,9 @@ class TestClusterer(unittest.TestCase):
self.fail(f"Cluster key ({k}, {type(k)}) not in result.")
self.assertListEqual(expected[k], actual[k].nodes)
#endregion helper methods
if __name__ == '__main__':
unittest.main()
......@@ -23,7 +23,7 @@ import json
class TestSimilarity(unittest.TestCase):
'''Tests the similarity calculation which works without object orientation.'''
def test_integration_similarityCalculation(self):
def test_integration_calculateSimilarity_ClustersDict_CorrectValue(self):
'''
Only for testing, can be deleted at any time.\n
Served as a testing example to make sure the computations are correct
......
FROM python:3
LABEL maintainer="Alexander Lercher"
ENV http_proxy http://proxy.uni-klu.ac.at:3128/
ENV https_proxy http://proxy.uni-klu.ac.at:3128/
RUN apt-get update
EXPOSE 5000
......
......@@ -20,6 +20,7 @@ from env_info import is_running_locally, get_resources_path
from messaging.ReconnectingMessageManager import ReconnectingMessageManager
from messaging.MessageHandler import MessageHandler
from flask import request
from flask_cors import CORS
from flask import redirect
# init message handler
......@@ -30,6 +31,7 @@ def message_received_callback(channel, method, properties, body):
# load swagger config
app = connexion.App(__name__, specification_dir='configs/')
CORS(app.app)
@app.app.before_request
def before_request():
......
......@@ -7,8 +7,10 @@ click==7.1.2
clickclick==1.2.2
colorama==0.4.3
connexion==2.7.0
coverage==5.3.1
cryptography==3.1
Flask==1.1.2
Flask-Cors==3.0.10
idna==2.9
importlib-metadata==1.6.1
inflection==0.5.0
......
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
#####################################
### Don't include for test report ###
#####################################
try:
class TestCoverage(unittest.TestCase):
def test_init_main(self):
try:
# add modules folder to interpreter path
import sys
import os
import prance
from pathlib import Path
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
# init logging to file
import logging
LOG_FORMAT = ('%(levelname) -5s %(asctime)s %(name)s:%(funcName) -35s %(lineno) -5d: %(message)s')
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
LOGGER = logging.getLogger(__name__)
#################################
import connexion
from security import swagger_util
from env_info import is_running_locally, get_resources_path
from messaging.ReconnectingMessageManager import ReconnectingMessageManager
from messaging.MessageHandler import MessageHandler
from flask import request
from flask import redirect
# init message handler
from db.repository import Repository
except Exception as e:
print ("Exception found:")
print (e)
try:
import main
except Exception as e:
print ("Exception found:")
print (e)
def test_routes(self):
try:
from routes import debug
except Exception as e:
print ("Exception found:")
print (e)
try:
from routes import layers
except Exception as e:
print ("Exception found:")
print (e)
try:
from routes import nodes
except Exception as e:
print ("Exception found:")
print (e)
def test_messaging(self):
try:
import network_constants as netconst
from security.token_manager import TokenManager
from db.entities import Layer
import json
import requests
from typing import Dict, List
from threading import Thread
import logging
except Exception as e:
print ("Exception found:")
print (e)
try:
from messaging import MessageHandler
except Exception as e:
print ("Exception found:")
print (e)
def test_db(self):
try:
import network_constants as netconst
from database.MongoRepositoryBase import MongoRepositoryBase
from db.entities import Layer
import pymongo
import json
from typing import List, Dict
# init logging to file
import logging
except Exception as e:
print ("Exception found:")
print (e)
try:
from db import repository
from db.entities import layer
except Exception as e:
print ("Exception found:")
print (e)
if __name__ == '__main__':
unittest.main()
except Exception as e:
print ("Exception found:")
print (e)
\ No newline at end of file
......@@ -67,8 +67,8 @@ class Test_Pipeline(unittest.TestCase):
}
}
}
def testTraceProcessing(self):
#original name testTraceProcessing
def test_handle_new_trace_newTraceMsg_correctlyInserted(self):
msg = self._buildTraceMessage()
self.handler.handle_new_trace(msg["content"])
self.assertEqual(len(self.handler._repository.layernodes),1)
......
......@@ -84,4 +84,31 @@ else:
BUSINESS_LOGIC_REST_PORT = 30420
BUSINESS_LOGIC_DB_PORT = 30421
## Federated Learning
if server:
FEDERATED_LEARNING_HOSTNAME = 'federated-learning'
#FEDERATED_LEARNING_DB_HOSTNAME = f'{EDERATED_LEARNING_HOSTNAME}-db'
FEDERATED_LEARNING_REST_PORT = 80
#FEDERATED_LEARNING_DB_PORT = 27017
else:
FEDERATED_LEARNING_HOSTNAME = 'articonf1.itec.aau.at'
#FEDERATED_LEARNING_DB_HOSTNAME = 'articonf1.itec.aau.at'
FEDERATED_LEARNING_REST_PORT = 30422
#FEDERATED_LEARNING_DB_PORT = 30423
#endregion Participation Hub
#region Federated Training
## Federated Training
if server:
FEDERATED_TRAINING_HOSTNAME = 'gpu3.itec.aau.at'
#FEDERATED_TRAINING_DB_HOSTNAME = f'{FEDERATED_TRAINING_HOSTNAME}-db'
FEDERATED_TRAINING_REST_PORT = 30424
#FEDERATED_TRAINING_DB_PORT = 27017
else:
FEDERATED_TRAINING_HOSTNAME = 'gpu3.itec.aau.at'
#FEDERATED_TRAINING_DB_HOSTNAME = 'articonf1.itec.aau.at'
FEDERATED_TRAINING_REST_PORT = 30424
#FEDERATED_TRAINING_DB_PORT = 30425
#endregion Federated Training
\ No newline at end of file
FROM python:3
LABEL maintainer="Alexander Lercher"
ENV http_proxy http://proxy.uni-klu.ac.at:3128/
ENV https_proxy http://proxy.uni-klu.ac.at:3128/
RUN apt-get update
EXPOSE 5000
......
......@@ -4,4 +4,4 @@ from _add_use_case_scripts.car_sharing.tables import add_offer
from _add_use_case_scripts.car_sharing.tables import add_publication
from _add_use_case_scripts.car_sharing.tables import add_travel
from _add_use_case_scripts.car_sharing.tables import add_user
from _add_use_case_scripts.car_sharing.tables import requestPost
\ No newline at end of file
from _add_use_case_scripts import requestPost
\ No newline at end of file
import sys
import os
from pathlib import Path
from typing import Dict, Any
import requests
modules_paths = ['.', '../../../modules/']
for modules_path in modules_paths:
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
# import _add_use_case_scripts.car_sharing.tables.add_user as user
from _add_use_case_scripts.car_sharing_official.tables import add_car, add_hash, add_media, add_offer, add_offerEndPlaces, add_publication, add_travel, add_travelCancelledBy
from _add_use_case_scripts.car_sharing_official.tables import add_travelFinishedBy, add_travelStartedBy, add_travelSuggestedEndPlaces, add_travelUsers, add_user
import network_constants as nc
from security.token_manager import TokenManager
def add_use_case(use_case: str):
use_case = "car-sharing-official"
jwt = TokenManager.getInstance().getToken()
url = f"https://articonf1.itec.aau.at:30420/api/use-cases"
response = requests.post(
url,
verify=False,
proxies = { "http":None, "https":None },
headers = { "Authorization": f"Bearer {jwt}"},
json = {"name": use_case}
)
print(url+": "+str(response.content))
if __name__ == "__main__":
use_case = "car-sharing-official"
# disable ssl warnings :)
requests.packages.urllib3.disable_warnings()
add_use_case(use_case)
add_car.main(use_case)
add_hash.main(use_case)
add_media.main(use_case)
add_offer.main(use_case)
add_offerEndPlaces.main(use_case)
add_publication.main(use_case)
add_travel.main(use_case)
add_travelCancelledBy.main(use_case)
add_travelFinishedBy.main(use_case)
add_travelStartedBy.main(use_case)
add_travelSuggestedEndPlaces.main(use_case)
add_travelUsers.main(use_case)
add_user.main(use_case)
\ No newline at end of file
from _add_use_case_scripts.car_sharing_official.tables import add_car
from _add_use_case_scripts.car_sharing_official.tables import add_hash
from _add_use_case_scripts.car_sharing_official.tables import add_offer
from _add_use_case_scripts.car_sharing_official.tables import add_offerEndPlaces
from _add_use_case_scripts.car_sharing_official.tables import add_publication
from _add_use_case_scripts.car_sharing_official.tables import add_travel
from _add_use_case_scripts.car_sharing_official.tables import add_travelCancelledBy
from _add_use_case_scripts.car_sharing_official.tables import add_travelFinishedBy
from _add_use_case_scripts.car_sharing_official.tables import add_travelStartedBy
from _add_use_case_scripts.car_sharing_official.tables import add_travelSuggestedEndPlaces
from _add_use_case_scripts.car_sharing_official.tables import add_travelUsers
from _add_use_case_scripts.car_sharing_official.tables import add_user
from _add_use_case_scripts import requestPost
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name:str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"brand",
"carLicensePlate",
"colour",
"deleted",
"model",
"numberOfEvaluations",
"observations",
"ownerId",
"seats",
"state",
"sumOfEvaluations",
"year"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "carLicensePlate"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "GeneralCar_layer",
"properties": [
"UniqueID",
"brand",
"carLicensePlate",
"colour",
"deleted",
"model",
"numberOfEvaluations",
"observations",
"ownerId",
"seats",
"state",
"sumOfEvaluations",
"year"
],
"cluster_properties": [
"brand",
"model",
"seats",
"year",
"ownerId",
"state"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "Owner_layer",
"properties": [
"UniqueID",
"brand",
"carLicensePlate",
"colour",
"deleted",
"model",
"numberOfEvaluations",
"observations",
"ownerId",
"seats",
"state",
"sumOfEvaluations",
"year"
],
"cluster_properties": [
"ownerId",
"UniqueID"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "CarAge_layer",
"properties": [
"UniqueID",
"brand",
"carLicensePlate",
"colour",
"deleted",
"model",
"numberOfEvaluations",
"observations",
"ownerId",
"seats",
"state",
"sumOfEvaluations",
"year"
],
"cluster_properties": [
"year",
"state"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "Model_layer",
"properties": [
"UniqueID",
"brand",
"carLicensePlate",
"colour",
"deleted",
"model",
"numberOfEvaluations",
"observations",
"ownerId",
"seats",
"state",
"sumOfEvaluations",
"year"
],
"cluster_properties": [
"brand",
"model",
"seats"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "CarState_layer",
"properties": [
"UniqueID",
"ownerId",
"deleted",
"state"
],
"cluster_properties": [
"deleted",
"state"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "CarEvaluation_layer",
"properties": [
"UniqueID",
"brand",
"colour",
"deleted",
"model",
"numberOfEvaluations",
"observations",
"ownerId",
"seats",
"state",
"sumOfEvaluations",
"year"
],
"cluster_properties": [
"numberOfEvaluations",
"sumOfEvaluations"
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("CAR")
table_name = "car"
add_table(use_case,table_name)
add_layers(use_case, table_name)
\ No newline at end of file
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"connection"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "documents[0]//id"
columns["database"] = "connection//database"
columns["databaseType"] = "connection//databaseType"
columns["host"] = "connection//host"
columns["port"] = "connection//port"
columns["documentCollection"] = "documents[0]//collection"
columns["documentHash"] = "documents[0]//hash"
columns["documentId"] = "documents[0]//id"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "document_Layer",
"properties": [
"UniqueID",
"documentCollection",
"documentHash",
"documentId"
],
"cluster_properties": [
"documentId",
"documentCollection"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "hostAndPort_Layer",
"properties": [
"UniqueID",
"database",
"databaseType",
"host",
"port"
],
"cluster_properties": [
"host",
"port"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "databaseType_Layer",
"properties": [
"UniqueID",
"database",
"databaseType",
"host",
"port"
],
"cluster_properties": [
"databaseType"
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("HASH")
table_name = "hash"
add_table(use_case,table_name)
add_layers(use_case,table_name)
\ No newline at end of file
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"data",
"extension",
"id",
"name",
"publication",
"type"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "publication+id"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "MediaExtension_Layer",
"properties": [
"UniqueID",
"data",
"extension",
"id",
"name",
"publication",
"type"
],
"cluster_properties": [
"extension"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "MediaType_Layer",
"properties": [
"UniqueID",
"data",
"extension",
"id",
"name",
"publication",
"type"
],
"cluster_properties": [
"type"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "MediaName_Layer",
"properties": [
"data",
"extension",
"id",
"name",
"publication",
"type"
],
"cluster_properties": [
"name"
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("MEDIA")
table_name = "media"
add_table(use_case,table_name)
add_layers(use_case, table_name)
\ No newline at end of file
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"available",
"deleted",
"deposit",
"endDate",
"id",
"priceForKm",
"priceForTime",
"startDate"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "id"
columns["carID"] = "car//carLicensePlate"
columns["carOwner"] = "car//ownerId"
columns["startPlaceAddress"] = "startPlace//address"
columns["startPlaceLatitude"] = "startPlace//latitude"
columns["startPlaceLongitude"] = "startPlace//longitude"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "OfferStartPlace_layer",
"properties": [
"UniqueID",
"carID",
"carOwner",
"startPlaceAddress",
"startPlaceLatitude",
"startPlaceLongitude",
"startDate"
],
"cluster_properties": [
"carOwner",
"startPlaceLatitude",
"startPlaceLongitude"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "OfferedCar_layer",
"properties": [
"UniqueID",
"carID",
"carOwner"
],
"cluster_properties": [
"carID",
"carOwner"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "OfferPrice_layer",
"properties": [
"UniqueID",
"carOwner",
"priceForKm",
"priceForTime",
"deposit"
],
"cluster_properties": [
"carOwner",
"priceForKm",
"priceForTime",
"deposit"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "OfferTime_layer",
"properties": [
"UniqueID",
"carOwner",
"startDate",
"endDate"
],
"cluster_properties": [
"carOwner",
"startDate",
"endDate"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "OfferStatus_layer",
"properties": [
"UniqueID",
"carOwner",
"available",
"deleted"
],
"cluster_properties": [
"carOwner",
"available",
"deleted"
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("OFFER")
table_name = "offer"
add_table(use_case,table_name)
add_layers(use_case,table_name)
\ No newline at end of file
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
#columns = [
# # None
#]
#columns = { c : c for c in columns }
columns = dict()
#TODO unique id is dependent on offferId -> error?
columns["offerId"] = "offer//id"
columns["UniqueID"] = "offerId+endPlacesLatitude+endPlacesLongitude"
columns["endPlacesAddress"] = "endPlaces[0]//address"
columns["endPlacesLatitude"] = "endPlaces[0]//latitude"
columns["endPlacesLongitude"] = "endPlaces[0]//longitude"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "OfferEndPlace_layer",
"properties": [
"UniqueID",
"offerId",
"endPlacesAddress",
"endPlacesLatitude",
"endPlacesLongitude"
],
"cluster_properties": [
"endPlacesLatitude",
"endPlacesLongitude"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "OfferEndAddress_layer",
"properties": [
"UniqueID",
"offerId",
"endPlacesAddress",
"endPlacesLatitude",
"endPlacesLongitude"
],
"cluster_properties": [
"endPlacesAddress"
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("OFFER_ENDPLACES")
table_name = "offerEndPlaces"
add_table(use_case,table_name)
add_layers(use_case,table_name)
\ No newline at end of file
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"answerTo",
"date",
"deleted",
"id",
"labels",
"text",
"user"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "id"
columns["latitude"] = "location//latitude"
columns["longitude"] = "location//longitude"
columns["likesNumber"] = "likesUsers//length"
columns["reportsNumber"] = "usersReports//length"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "PublicationLocation_Layer",
"properties": [
"UniqueID",
"user",
"latitude",
"longitude"
],
"cluster_properties": [
"latitude",
"longitude"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "PublicationTime_Layer",
"properties": [
"UniqueID",
"user",
"latitude",
"longitude",
"date"
],
"cluster_properties": [
"date"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "PublicationUser_Layer",
"properties": [
"UniqueID",
"text",
"user",
"labels"
],
"cluster_properties": [
"user"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "PublicationStatus_Layer",
"properties": [
"UniqueID",
"text",
"user",
"deleted",
"answerTo"
],
"cluster_properties": [
"deleted",
"answerTo"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "PublicationLikeReport_Layer",
"properties": [
"UniqueID",
"text",
"user",
"deleted",
"answerTo",
"likesNumber",
"reportsNumber"
],
"cluster_properties": [
"likesNumber",
"reportsNumber"
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("PUBLICATION")
table_name = "publication"
add_table(use_case,table_name)
add_layers(use_case, table_name)
\ No newline at end of file
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"carLicensePlate",
"carOwner",
"depositBalance",
"endDate",
"id",
"observations",
"offerId",
"priceBalance",
"rentForTime",
"rewardBalance",
"seats",
"startDate",
"status",
"totalPrice",
]
columns = { c : c for c in columns }
columns["UniqueID"] = "id"
columns["endPlaceAddress"] = "endPlace//address"
columns["endPlaceLatitude"] = "endPlace//latitude"
columns["endPlaceLongitude"] = "endPlace//longitude"
columns["startPlaceAddress"] = "startPlace//address"
columns["startPlaceLatitude"] = "startPlace//latitude"
columns["startPlaceLongitude"] = "startPlace//longitude"
columns["checkedByUser"] = "checkedBy[0]//user"
columns["checkedByMoment"] = "checkedBy[0]//moment"
columns["checkedByLatitude"] = "checkedBy[0]//coordinate//latitude"
columns["checkedByLongitude"] = "checkedBy[0]//coordinate//longitude"
columns["checkedByState"] = "checkedBy[0]//state"
columns["scoreNumber"] = "score//length"
columns["scoreSum"] = "score//sum"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "StartingPoint_Layer",
"properties": [
'UniqueID',
'startPlaceLatitude',
'startPlaceLongitude',
'startDate',
'endDate'
],
"cluster_properties": [
'startPlaceLatitude',
'startPlaceLatitude'
]
},{
"use_case": use_case,
"table": table_name,
"name": "StartingTime_Layer",
"properties": [
'UniqueID',
'startDate',
'endDate'
],
"cluster_properties": [
'startDate'
]
},{
"use_case": use_case,
"table": table_name,
"name": "CheckedByLocation_Layer",
"properties": [
'UniqueID',
'checkedByUser',
'checkedByMoment',
'checkedByLatitude',
'checkedByLongitude',
'checkedByState',
'startPlaceLatitude',
'startPlaceLongitude'
],
"cluster_properties": [
'checkedByLatitude',
'checkedByLongitude'
]
},{
"use_case": use_case,
"table": table_name,
"name": "CheckedByMoment_Layer",
"properties": [
'UniqueID',
'checkedByUser',
'checkedByMoment',
'checkedByLatitude',
'checkedByLongitude',
'checkedByState',
'startDate',
'startPlaceLatitude',
'startPlaceLongitude'
],
"cluster_properties": [
'checkedByMoment',
'startDate'
]
},{
"use_case": use_case,
"table": table_name,
"name": "FinishedTime_Layer",
"properties": [
'UniqueID',
'startDate',
'endDate'
],
"cluster_properties": [
'endDate'
]
},{
"use_case": use_case,
"table": table_name,
"name": "Price_Layer",
"properties": [
'UniqueID',
'totalPrice',
'priceBalance',
'depositBalance',
'rewardBalance'
],
"cluster_properties": [
'totalPrice',
'priceBalance',
'depositBalance',
'rewardBalance'
]
},
{
"use_case": use_case,
"table": table_name,
"name": "Status_Layer",
"properties": [
'UniqueID',
'status',
'rentForTime'
],
"cluster_properties": [
'status',
'rentForTime'
]
},
{
"use_case": use_case,
"table": table_name,
"name": "Score_Layer",
"properties": [
'UniqueID',
'scoreSum',
'scoreNumber'
],
"cluster_properties": [
'scoreSum',
'scoreNumber'
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("TRAVEL")
table_name = "travel"
add_table(use_case,table_name)
add_layers(use_case, table_name)
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"travelId"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "travelId+user"
columns["moment"] = "travel//cancelledBy[0]//moment"
columns["latitude"] = "travel//cancelledBy[0]//cancellationPlace//latitude"
columns["longitude"] = "travel//cancelledBy[0]//cancellationPlace//longitude"
columns["user"] = "travel//cancelledBy[0]//user"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "CancelledByAtLocation_Layer",
"properties": [
'UniqueID',
'travelId',
'moment',
'latitude',
'longitude',
'user'
],
"cluster_properties": [
'latitude',
'latitude'
]
},{
"use_case": use_case,
"table": table_name,
"name": "CancelledByAtMoment_Layer",
"properties": [
'UniqueID',
'travelId',
'moment',
'latitude',
'longitude',
'user'
],
"cluster_properties": [
'moment'
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("TRAVEL_CancelledBY")
table_name = "travelCancelledBy"
add_table(use_case,table_name)
add_layers(use_case, table_name)
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"travelId"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "travelId+user"
columns["moment"] = "travel//finishedBy[0]//moment"
columns["latitude"] = "travel//finishedBy[0]//endPlace//latitude"
columns["longitude"] = "travel//finishedBy[0]//endPlace//longitude"
columns["user"] = "travel//finishedBy[0]//user"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "FinishedByAtLocation_Layer",
"properties": [
'UniqueID',
'travelId',
'moment',
'latitude',
'longitude',
'user'
],
"cluster_properties": [
'latitude',
'latitude'
]
},{
"use_case": use_case,
"table": table_name,
"name": "FinishedByAtMoment_Layer",
"properties": [
'UniqueID',
'travelId',
'moment',
'latitude',
'longitude',
'user'
],
"cluster_properties": [
'moment'
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("TRAVEL_FinishedBy")
table_name = "travelFinishedBy"
add_table(use_case,table_name)
add_layers(use_case, table_name)
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"travelId"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "travelId+user"
columns["moment"] = "travel//startedBy[0]//moment"
columns["latitude"] = "travel//startedBy[0]//startPlace//latitude"
columns["longitude"] = "travel//startedBy[0]//startPlace//longitude"
columns["user"] = "travel//startedBy[0]//user"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "StartedByAtLocation_Layer",
"properties": [
'UniqueID',
'travelId',
'moment',
'latitude',
'longitude',
'user'
],
"cluster_properties": [
'latitude',
'latitude'
]
},{
"use_case": use_case,
"table": table_name,
"name": "StartedByAtMoment_Layer",
"properties": [
'UniqueID',
'travelId',
'moment',
'latitude',
'longitude',
'user'
],
"cluster_properties": [
'moment'
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("TRAVEL_STARTEDBY")
table_name = "travelStartedBy"
add_table(use_case,table_name)
add_layers(use_case, table_name)
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"travelId"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "travelId+user"
columns["latitude"] = "travel//suggestedEndPlaces[0]//endPlace//latitude"
columns["longitude"] = "travel//suggestedEndPlaces[0]//endPlace//longitude"
columns["reward"] = "travel//suggestedEndPlaces[0]//reward"
columns["user"] = "travel//suggestedEndPlaces[0]//suggestedBy"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "SuggestedEndPlacesAtLocation_Layer",
"properties": [
'UniqueID',
'travelId',
'reward',
'latitude',
'longitude',
'user'
],
"cluster_properties": [
'latitude',
'latitude'
]
},{
"use_case": use_case,
"table": table_name,
"name": "SuggestedEndPlacesAtReward_Layer",
"properties": [
'UniqueID',
'travelId',
'reward',
'latitude',
'longitude',
'user'
],
"cluster_properties": [
'reward'
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("TRAVEL_suggestedEndPlaces")
table_name = "travelSuggestedEndPlaces"
add_table(use_case,table_name)
add_layers(use_case, table_name)
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"travelId"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "travelId+user"
columns["isDriver"] = "travel//users[0]//userIdisDriver"
columns["passengersNr"] = "ravel//users[0]//passengers"
columns["user"] = "travel//users[0]//userId"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "Driver_layer",
"properties": [
'UniqueID',
'travelId',
'isDriver',
'passengersNr',
'user'
],
"cluster_properties": [
'isDriver'
]
},{
"use_case": use_case,
"table": table_name,
"name": "Passangers_layer",
"properties": [
'UniqueID',
'travelId',
'isDriver',
'passengersNr',
'user'
],
"cluster_properties": [
'passengersNr'
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("TRAVEL_USERS")
table_name = "travelUsers"
add_table(use_case,table_name)
add_layers(use_case, table_name)
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"id", # pk
"balance",
"numberOfTripsDone",
"role",
"sumOfTripsScores"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "id"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "User_layer",
"properties": [
"UniqueID",
"balance",
"role",
"numberOfTripsDone",
"sumOfTripsScores"
],
"cluster_properties": [
"balance",
"numberOfTripsDone",
"sumOfTripsScores"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "UserBalance_layer",
"properties": [
"UniqueID",
"balance",
"role",
"numberOfTripsDone",
"sumOfTripsScores"
],
"cluster_properties": [
"balance",
"sumOfTripsScores"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "UserVeterancy_layer",
"properties": [
"UniqueID",
"balance",
"role",
"numberOfTripsDone",
"sumOfTripsScores"
],
"cluster_properties": [
"numberOfTripsDone",
"sumOfTripsScores"
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("USER")
table_name = "user"
add_table(use_case, table_name)
add_layers(use_case, table_name)
\ No newline at end of file
import sys
import os
from pathlib import Path
from typing import Dict, Any
import requests
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
import network_constants as nc
from security.token_manager import TokenManager
import tables.add_reddit as reddit
def add_use_case(use_case: str):
jwt = TokenManager.getInstance().getToken()
url = f"https://articonf1.itec.aau.at:30420/api/use-cases"
response = requests.post(
url,
verify=False,
proxies = { "http":None, "https":None },
headers = { "Authorization": f"Bearer {jwt}"},
json = {"name": use_case}
)
print(url+": "+str(response.status_code))
if __name__ == "__main__":
use_case = "reddit"
# disable ssl warnings :)
requests.packages.urllib3.disable_warnings()
add_use_case(use_case)
reddit.main(use_case)
\ No newline at end of file
import network_constants as nc
from security.token_manager import TokenManager
import requests
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = {}
use_case = "reddit"
columns = [
# "doctype",
"id",
"user_id",
"title",
"content",
"permalink",
"upvotes",
"percentage_upvoted",
"n_comments",
"subreddit",
"created_at"
]
columns = { c : c for c in columns }
columns["UniqueID"] = "user_id+subreddit+id"
table = {
"name": table_name,
"mappings": columns
}
url = f"https://articonf1.itec.aau.at:30420/api/use-cases/{use_case}/tables"
jwt = TokenManager.getInstance().getToken()
response = requests.post(
url,
verify=False,
proxies = { "http":None, "https":None },
headers = { "Authorization": f"Bearer {jwt}"},
json = table
)
print(url+": "+str(response.status_code))
def add_layers(use_case:str, table_name: str):
layers = [
# { #subreddit is string cannot cluster
# "use_case": use_case,
# "table": table_name,
# "name": "Subreddit_Layer",
# "properties": [
# "UniqueID",
# "subreddit",
# "user_id",
# "title",
# "content",
# "permalink",
# "upvotes",
# "percentage_upvoted",
# "n_comments"
# ],
# "cluster_properties": [
# "subreddit"
# ]
# },
{
"use_case": use_case,
"table": table_name,
"name": "Upvotes_Layer", #TODO Probably do something like Total Votes? so we can get a popularity?
"properties": [
"UniqueID",
"subreddit",
"user_id",
"title",
"content",
"permalink",
"upvotes",
"percentage_upvoted",
"n_comments",
"created_at"
],
"cluster_properties": [
"upvotes",
"percentage_upvoted"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "Percentage_Layer",
"properties": [
"UniqueID",
"subreddit",
"user_id",
"title",
"content",
"permalink",
"upvotes",
"percentage_upvoted",
"n_comments",
"created_at"
],
"cluster_properties": [
"percentage_upvoted"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "Engagement_Layer",
"properties": [
"UniqueID",
"subreddit",
"user_id",
"title",
"content",
"permalink",
"upvotes",
"percentage_upvoted",
"n_comments",
"created_at"
],
"cluster_properties": [
"n_comments"
]
},
{
"use_case": use_case,
"table": table_name,
"name": "Time_Layer",
"properties": [
"UniqueID",
"subreddit",
"user_id",
"title",
"content",
"permalink",
"upvotes",
"percentage_upvoted",
"n_comments",
"created_at"
],
"cluster_properties": [
"created_at"
]
}
]
jwt = TokenManager.getInstance().getToken()
for layer in layers:
url = f"https://articonf1.itec.aau.at:30420/api/layers"
response = requests.post(
url,
verify=False,
proxies = { "http":None, "https":None },
headers = { "Authorization": f"Bearer {jwt}"},
json = layer
)
print(url+": "+str(response.status_code))
def main(use_case: str):
print("reddit")
table_name = "reddit"
add_table(use_case,table_name)
add_layers(use_case,table_name)
......@@ -9,7 +9,7 @@ for modules_path in modules_paths:
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
from _add_use_case_scripts.vialog.tables import add_user, add_video
from _add_use_case_scripts.vialog.tables import add_user, add_video, add_change
import network_constants as nc
from security.token_manager import TokenManager
......@@ -38,4 +38,4 @@ if __name__ == "__main__":
add_use_case(use_case)
add_user.main(use_case)
add_video.main(use_case)
\ No newline at end of file
add_change.main(use_case)
\ No newline at end of file
import sys
import os
from pathlib import Path
from typing import Dict, Any
import requests
modules_paths = ['.', '../../../modules/']
for modules_path in modules_paths:
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
from _add_use_case_scripts.vialog.tables import add_user, add_video, add_change
import network_constants as nc
from security.token_manager import TokenManager
def add_use_case(use_case: str):
#use_case = "vialog"
jwt = TokenManager.getInstance().getToken()
url = f"https://articonf1.itec.aau.at:30420/api/use-cases"
response = requests.post(
url,
verify=False,
proxies = { "http":None, "https":None },
headers = { "Authorization": f"Bearer {jwt}"},
json = {"name": use_case}
)
print(url+": "+str(response.content))
if __name__ == "__main__":
use_case = "vialog-new-enum"
# disable ssl warnings :)
requests.packages.urllib3.disable_warnings()
add_use_case(use_case)
add_video.main(use_case)
add_change.main(use_case)
\ No newline at end of file
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"changeType",
"changedValue",
"previousValue",
"newValue",
]
columns = { c : c for c in columns }
columns["UniqueID"] = "videoId+changeId"
columns["changeType"] = "enum(changeType)"
columns["changedValue"] = "enum(changedValue)"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "Changetype_Layer",
"properties": [
"changeType",
],
"cluster_properties": [
"changeType",
]
},
{
"use_case": use_case,
"table": table_name,
"name": "Changedvalue_Layer",
"properties": [
"changedValue",
"previousValue",
"newValue"
],
"cluster_properties": [
"changedValue"
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("user")
table_name = "change"
add_table(use_case,table_name)
add_layers(use_case,table_name)
\ No newline at end of file
......@@ -6,27 +6,32 @@ def add_table(use_case: str, table_name: str):
'''
columns = [
# "docType",
"videoId",
"Video_Token",
"replyTo",
"Created",
"Duration",
"videoResolution",
"Label",
"ThreadId",
"Position",
"ModifiedDate",
"Views",
"ModeratedBy",
"CommunityManagerNotes",
"Rewards",
"Video_State",
"Video_Type"
'videoId',
'objType',
'eventName',
'video_token',
'replyTo',
'created',
'duration',
'videoResolution',
'label',
'threadId',
'position',
'views',
'moderatedBy',
'moderationDate',
'communityManagerNotes',
'rewards',
'video_state',
'video_type'
]
columns = { c : c for c in columns }
columns["UniqueID"] = "videoId"
columns["moderatedBy"] = "enum(moderatedBy)"
columns["video_state"] = "enum(video_state)"
columns["video_type"] = "enum(video_type)"
columns["videoResolution"] = "enum(videoResolution)"
table = {
"name": table_name,
......@@ -43,14 +48,14 @@ def add_layers(use_case:str, table_name: str):
"name": "Manager_Layer",
"properties": [
"UniqueID",
"ModifiedDate",
"ModeratedBy",
"Video_State",
"Video_Type"
"moderationDate",
"moderatedBy",
"video_state",
"video_type"
],
"cluster_properties": [
"ModeratedBy",
"Video_State"
"moderatedBy",
"video_state"
]
},
{
......@@ -59,16 +64,16 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Popularity_Layer",
"properties": [
"UniqueID",
"Label",
"Created",
"Views",
"Rewards",
"Video_State",
"Video_Type"
"label",
"created",
"views",
"rewards",
"video_state",
"video_type"
],
"cluster_properties": [
"Views",
"Video_Type"
"views",
"video_type"
]
},
{
......@@ -77,15 +82,15 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Age_Layer",
"properties": [
"UniqueID",
"Label",
"Created",
"Views",
"Rewards",
"Video_State",
"Video_Type"
"label",
"created",
"views",
"rewards",
"video_state",
"video_type"
],
"cluster_properties": [
"Created"
"created"
]
},
{
......@@ -94,15 +99,15 @@ def add_layers(use_case:str, table_name: str):
"name": "Rewards_Layer",
"properties": [
"UniqueID",
"Label",
"Created",
"Views",
"Rewards",
"Video_State",
"Video_Type"
"label",
"created",
"views",
"rewards",
"video_state",
"video_type"
],
"cluster_properties": [
"Rewards"
"rewards"
]
},
{
......@@ -111,14 +116,14 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Lenght_Layer",
"properties": [
"UniqueID",
"Created",
"Views",
"Duration",
"Video_State",
"Video_Type"
"created",
"views",
"duration",
"video_state",
"video_type"
],
"cluster_properties": [
"Duration"
"duration"
]
},
{
......@@ -127,11 +132,11 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Resolution_Layer",
"properties": [
"UniqueID",
"Created",
"Views",
"created",
"views",
"videoResolution",
"Video_State",
"Video_Type"
"video_state",
"video_type"
],
"cluster_properties": [
"videoResolution"
......
......@@ -5,7 +5,7 @@ paths:
/use-cases:
post:
security:
- JwtRegular: []
- JwtAdmin: []
operationId: "routes.use_case.add"
tags:
- "Use-Cases"
......@@ -46,9 +46,17 @@ paths:
- "Use-Cases"
summary: "Delete all Use-Cases"
description: "Delete all Use-Cases"
parameters:
- in: query
name: confirmation
type: string
required: true
description: "'yes' to confirm deletion"
responses:
'200':
description: "Successful Request"
'403':
description: "Confirmation required"
#####
# TABLES
#####
......@@ -66,15 +74,23 @@ paths:
description: "Successful Request"
delete:
security:
- JwtRegular: []
- JwtAdmin: []
operationId: "routes.tables.delete_all"
tags:
- "Tables"
summary: "Delete all Tables"
description: "Delete all Tables"
parameters:
- in: query
name: confirmation
type: string
required: true
description: "'yes' to confirm deletion"
responses:
'200':
description: "Successful Request"
'403':
description: "Confirmation required"
/use-cases/{use_case}/tables:
get:
security:
......@@ -95,7 +111,7 @@ paths:
description: "Successful Request"
post:
security:
- JwtRegular: []
- JwtAdmin: []
operationId: "routes.tables.add_complete"
tags:
- "Tables"
......@@ -131,15 +147,22 @@ paths:
description: "Name of the Use-Case the Table belongs to"
required: true
type: "string"
- in: query
name: confirmation
type: string
required: true
description: "'yes' to confirm deletion"
responses:
'200':
description: "Successful Request"
'400':
description: "Table with the name already exists or missing fields in the request."
'403':
description: "Confirmation required"
/use-cases/{use_case}/tables/{name}/mapping:
put:
security:
- JwtRegular: []
- JwtAdmin: []
operationId: "routes.tables.put_mapping"
tags:
- "Tables"
......@@ -168,8 +191,6 @@ paths:
# END-TABLES
#####
#####
# LAYERS
#####
......@@ -193,12 +214,20 @@ paths:
- "Layers"
summary: "Delete all Layers from the DB"
description: "Delete all Layers from the DB"
parameters:
- in: query
name: confirmation
type: string
required: true
description: "'yes' to confirm deletion"
responses:
'200':
description: "Successful Request"
'403':
description: "confirmation required"
post:
security:
- JwtRegular: []
- JwtAdmin: []
operationId: "routes.layer.add_complete"
tags:
- "Layers"
......@@ -243,7 +272,7 @@ paths:
/use-cases/{use_case}/tables/{table}/layers/{name}/cluster-mapping:
put:
security:
- JwtRegular: []
- JwtAdmin: []
operationId: "routes.layer.add_cluster_mapping"
tags:
- "Layers"
......@@ -284,7 +313,7 @@ paths:
description: "Field in request is missing or attribute does not exist in the Layer"
delete:
security:
- JwtRegular: []
- JwtAdmin: []
operationId: "routes.layer.delete_cluster_mapping"
tags:
- "Layers"
......@@ -316,6 +345,11 @@ paths:
type: string
example: "end_time"
description: "Internal name of the attribute"
- in: query
name: confirmation
type: string
required: true
description: "'yes' to confirm deletion"
responses:
'200':
description: "Successful Request"
......@@ -323,6 +357,8 @@ paths:
description: "Layer does not exist"
'400':
description: "Field in request is missing or attribute does not exist in the Layer"
'403':
description: "Confirmation required"
/use-cases/{use_case}/layers:
get:
security:
......@@ -375,7 +411,7 @@ paths:
description: "Layer does not exist"
delete:
security:
- JwtRegular: []
- JwtAdmin: []
operationId: "routes.layer.delete_one"
tags:
- "Layers"
......@@ -397,15 +433,22 @@ paths:
description: "Name of the Layer"
required: true
type: "string"
- in: query
name: confirmation
type: string
required: true
description: "'yes' to confirm deletion"
responses:
'200':
description: "Successful Request"
'404':
description: "Layer does not exist"
'403':
description: "Confirmation required"
/use-cases/{use_case}/layers/{name}/mapping:
put:
security:
- JwtRegular: []
- JwtAdmin: []
operationId: "routes.layer.add_mapping"
tags:
- "Layers"
......@@ -458,6 +501,11 @@ paths:
required: true
schema:
$ref: '#/definitions/LayerMapping'
- in: query
name: confirmation
type: string
required: true
description: "'yes' to confirm deletion"
responses:
'200':
description: "Successful Request"
......@@ -465,6 +513,8 @@ paths:
description: "Layer does not exist"
'400':
description: "Field in request is missing"
'403':
description: "Confirmation required"
definitions:
LayerMapping:
......
......@@ -14,8 +14,10 @@ from security import swagger_util
from env_info import is_running_locally, get_resources_path
from flask import request
from flask import redirect
from flask_cors import CORS
app = connexion.App(__name__, specification_dir='configs/')
CORS(app.app)
from db.entities.layer_adapter import LayerAdapter
......
......@@ -8,8 +8,10 @@ click==7.1.2
clickclick==1.2.2
colorama==0.4.3
connexion==2.7.0
coverage==5.3.1
cryptography==3.1
Flask==1.1.2
Flask-Cors==3.0.10
idna==2.10
importlib-metadata==1.7.0
inflection==0.5.0
......
......@@ -16,10 +16,16 @@ def all():
return [layer.to_serializable_dict() for layer in layer_repository.all()]
def delete_all_layers():
def delete_all_layers(confirmation: str):
'''
delete all layers from the DB
@params:
confirmation - Required : "yes" to confirm deletion
'''
if confirmation != "yes":
return Response(status=403)
layer_repository.delete_all()
return Response(status=200)
......@@ -142,7 +148,7 @@ def one(use_case: str, table: str, name: str):
return Response(status=200, response=json.dumps(layer.to_serializable_dict()))
def delete_mapping(use_case: str, table: str, name: str):
def delete_mapping(use_case: str, table: str, name: str, confirmation: str):
'''
delete a mapping from the layer identified by the internal representation
......@@ -150,6 +156,7 @@ def delete_mapping(use_case: str, table: str, name: str):
use_case - Required : String-identifier for the Use-Case the Layer belongs to
table - Required : unique identifier of the Table the Layer belongs to
name - Required : unique identifier for the Layer
confirmation- Required : "yes" to confirm deletion
'''
use_case_repository.put(use_case)
......@@ -206,7 +213,7 @@ def add_mapping(name: str, table: str, use_case: str):
return Response(status=200)
def delete_one(use_case: str, table: str, name: str):
def delete_one(use_case: str, table: str, name: str, confirmation: str):
'''
delete a layer and all its mappings from the Db
......@@ -214,8 +221,11 @@ def delete_one(use_case: str, table: str, name: str):
use_case - Required : String-identifier for the Use-Case the Layer belongs to
table - Required : unique identifier of the Table the Layer belongs to
name - Required : unique identifier for the Layer
confirmation- Required : "yes" to confirm deletion
'''
if confirmation != "yes":
return Response(status=403)
layer = layer_repository.one(name, use_case, table)
if layer == None:
......
......@@ -51,10 +51,18 @@ def add_complete(use_case: str):
return Response(status=200)
def delete_all_for_use_case(use_case: str):
def delete_all_for_use_case(use_case: str, confirmation: str):
if confirmation != "yes":
return Response(status=403)
table_repository.delete_for_use_case(use_case)
return Response(status=200)
def delete_all():
def delete_all(confirmation: str):
if confirmation != "yes":
return Response(status=403)
table_repository.delete_all()
return Response(status=200)
\ No newline at end of file
......@@ -15,7 +15,11 @@ repository = Repository()
def all():
return [use_case.to_serializable_dict() for use_case in use_case_repository.all()]
def delete_all():
def delete_all(confirmation: str):
if confirmation != "yes":
return Response(status=403)
use_case_repository.delete_all()
repository.delete_all()
table_repository.delete_all()
......
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
#####################################
### Don't include for test report ###
#####################################
try:
class TestCoverage(unittest.TestCase):
def test_init_main(self):
try:
# python -m unittest discover
# add modules folder to interpreter path
import sys
import os
from pathlib import Path
from typing import Dict, Any
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
# load swagger config
import connexion
from security import swagger_util
from env_info import is_running_locally, get_resources_path
from flask import request
from flask import redirect
app = connexion.App(__name__, specification_dir='configs/')
from db.entities.layer_adapter import LayerAdapter
except Exception as e:
print ("Exception found:")
print (e)
try:
import main
except Exception as e:
print ("Exception found:")
print (e)
def test_db_main(self):
try:
import network_constants as netconst
from database.MongoRepositoryBase import MongoRepositoryBase
from db.entities import layer_adapter
from db.entities import table
from db.entities import use_case
import pymongo
import json
from typing import List, Dict
except Exception as e:
print ("Exception found:")
print (e)
try:
from db import repository
from db import table_repository
from db import use_case_repository
except Exception as e:
print ("Exception found:")
print (e)
def test_routes(self):
try:
from routes import layer
except Exception as e:
print ("Exception found:")
print (e)
try:
from routes import tables
except Exception as e:
print ("Exception found:")
print (e)
try:
from routes import use_case
except Exception as e:
print ("Exception found:")
print (e)
def test_services(self):
try:
from services import layer_adapter_service
except Exception as e:
print ("Exception found:")
print (e)
def test_use_case_scripts(self):
try:
import network_constants as nc
from security.token_manager import TokenManager
import requests
from typing import List
from _add_use_case_scripts import requestPost
except Exception as e:
print ("Exception found:")
print (e)
#######
#from _add_use_case_scripts.bank-app import add_bank_app_schema ##eror not importing? invalid folder name?
#from _add_use_case_scripts.bank-app.tables import add_bank_app_schema
try:
from _add_use_case_scripts.car_sharing import add_carsharing_schema
from _add_use_case_scripts.car_sharing.tables import add_car
from _add_use_case_scripts.car_sharing.tables import add_hash
from _add_use_case_scripts.car_sharing.tables import add_offer
from _add_use_case_scripts.car_sharing.tables import add_publication
from _add_use_case_scripts.car_sharing.tables import add_travel
from _add_use_case_scripts.car_sharing.tables import add_user
except Exception as e:
print ("Exception found:")
print (e)
try:
from _add_use_case_scripts.crowd_journalism import add_crowdjournalism_schema
from _add_use_case_scripts.crowd_journalism.tables import add_classification
from _add_use_case_scripts.crowd_journalism.tables import add_event
from _add_use_case_scripts.crowd_journalism.tables import add_purchase
from _add_use_case_scripts.crowd_journalism.tables import add_tag
from _add_use_case_scripts.crowd_journalism.tables import add_video
except Exception as e:
print ("Exception found:")
print (e)
try:
from _add_use_case_scripts.debug import add_debug_schema
from _add_use_case_scripts.debug.tables import add_pizza_table
except Exception as e:
print ("Exception found:")
print (e)
#from _add_use_case_scripts.smart-energy import add_smart_energy_schema
#from _add_use_case_scripts.smart-energy.tables import add_smart_energy
try:
from _add_use_case_scripts.vialog import add_vialog_schema
from _add_use_case_scripts.vialog.tables import add_user
from _add_use_case_scripts.vialog.tables import add_video
except Exception as e:
print ("Exception found:")
print (e)
if __name__ == '__main__':
unittest.main()
except Exception as e:
print ("Exception found:")
print (e)
\ No newline at end of file
......@@ -4,7 +4,7 @@ from db.entities.layer_adapter import LayerAdapter
class Test_Layer_Adapter(unittest.TestCase):
def test_valid_adapter(self):
def test_LayerAdapter_newLayerAdapterObj_validInstantiation(self):
adapter1 = LayerAdapter("layer1", "use_case", "table", ["a", "c"], ["a"])
print(adapter1.to_serializable_dict)
......
FROM python:3
LABEL maintainer="Alexander Lercher"
#ENV http_proxy http://proxy.uni-klu.ac.at:3128/
#ENV https_proxy http://proxy.uni-klu.ac.at:3128/
RUN apt-get update
EXPOSE 5000
WORKDIR /app
COPY src/participation-hub/federated-learning-microservice/app/requirements.txt /app/
RUN pip install -r requirements.txt
COPY src/modules/ /app/
COPY src/participation-hub/federated-learning-microservice/app/ /app/
RUN chmod a+x main.py
CMD ["python", "./main.py"]
\ No newline at end of file
# Federated Learning Microservice
The Federated Learning microservice serves as an interface for the Federated TRAINING microservice which handles all the federated processing in the back-end.
## Technologies
- Python 3.x
- Docker
- Kubernetes
\ No newline at end of file
swagger: '2.0'
paths:
#####
# Owners
#####
/Owners/use_case/{use_case}/last_train:
get:
security:
- JwtRegular: []
operationId: "routes.owners.last"
tags:
- "Owners"
summary: "Get last train session data"
description: "Get last train session data"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case to get the last training session from (must already exist)"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Use case train session data does not exist"
/Owners/use_cases/{use_case}/upload_and_train:
post:
security:
- JwtAdmin: []
operationId: "routes.owners.upload_and_train"
tags:
- "Owners"
summary: "Upload the files required for the federated training"
description: "Upload the files required for the federated training"
consumes:
- multipart/form-data
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case to upload to (must already exist)"
required: true
type: "string"
- name: "global_hyperparameters"
in: "formData"
description: "File containing the global hyperparameters"
required: true
type: "file"
- name: "preprocessing"
in: "formData"
description: "File containing the preprocessing"
required: true
type: "file"
- name: "model"
in: "formData"
description: "File containing the keras model"
required: true
type: "file"
- name: "dataset_file1"
in: "formData"
description: "File1 of the dataset. Functionality is use_case dependendent. (i.e. True Data)"
required: true
type: "file"
- name: "dataset_file2"
in: "formData"
description: "File2 of the dataset. Functionality is use_case dependendent. (i.e. False Data)"
required: true
type: "file"
#- in: body
# name: "Object"
# required: true
# schema:
# $ref: '#/definitions/LayerMapping'
responses:
'200':
description: "Successful Request"
'404':
description: "Use case does not exist"
'400':
description: "Field in request is missing"
#####
# Developers
#####
/Developers/use_case/{use_case}/last_train:
get:
security:
- JwtRegular: []
operationId: "routes.developers.last"
tags:
- "Developers"
summary: "Get last train session data"
description: "Get last train session data"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case to get the last training session from (must already exist)"
required: true
type: "string"
responses:
'200':
description: "Successful Request"
'404':
description: "Use case train session data does not exist"
/Developers/use_cases/{use_case}/developer_id/{developer_id}/upload_and_train:
post:
security:
- JwtAdmin: []
operationId: "routes.developers.upload_and_train"
tags:
- "Developers"
summary: "Upload the files required and starts the federated training"
description: "Upload the files required and starts the federated training"
consumes:
- multipart/form-data
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case to upload to (must already exist)"
required: true
type: "string"
- name: "developer_id"
in: "path"
description: "Id used to uniquely identify the developer who starts the training"
required: true
type: "integer"
- name: "dataset_file1"
in: "formData"
description: "File1 of the dataset. Functionality is use_case dependendent. (i.e. True Data)"
required: true
type: "file"
- name: "dataset_file2"
in: "formData"
description: "File2 of the dataset. Functionality is use_case dependendent. (i.e. False Data)"
required: true
type: "file"
#- in: body
# name: "Object"
# required: true
# schema:
# $ref: '#/definitions/LayerMapping'
responses:
'200':
description: "Successful Request"
'404':
description: "Use case does not exist"
'400':
description: "Field in request is missing"
#####
# Users
#####
/Users/use_case/{use_case}/data_entry/{data_entry}/check_article:
post:
security:
- JwtAdmin: []
operationId: "routes.users.check_article"
tags:
- "Users"
summary: "Use the trained model to evaluate an input"
description: "Use the trained model to evaluate an input"
parameters:
- name: "use_case"
in: "path"
description: "Name of the Use-Case to upload to (must already exist)"
required: true
type: "string"
- in: "path"
name: "data_entry"
required: true
type: "string"
description: "Data to be verified/parsed by the trained model. I.e: ''Santa Claus is actually real. A new study revealed...'' etc. "
responses:
'200':
description: "Successful Request"
'404':
description: "Use case does not exist"
'400':
description: "Field in request is missing"
# definitions:
# Data_entry:
# type: "object"
# required:
# - internal
# properties:
# internal:
# type: string
# example: "todo, create the preprocessing part"
\ No newline at end of file
swagger: "2.0"
info:
title: Federated Learning microservice
description: This is the documentation for the federated learning microservice.
version: "1.0.0"
consumes:
- "application/json"
produces:
- "application/json"
basePath: "/api"
# Import security definitions from seperate file
securityDefinitions:
$ref: '../security/security.yml#securityDefinitions'
paths:
$ref: 'routes.yml#paths'
\ No newline at end of file
swagger: "2.0"
info:
title: Business Logic microservice
description: This is the documentation for the business logic microservice.
version: "1.0.0"
consumes:
- "application/json"
produces:
- "application/json"
basePath: "/api"
# Import security definitions from seperate file
securityDefinitions:
$ref: '../../../../modules/security/security_local.yml#securityDefinitions'
paths:
$ref: 'routes.yml#paths'
\ No newline at end of file
from flask import request
def echo():
return request.json
\ No newline at end of file
server_weights
<float32[784,10],float32[10]>@SERVER
mean_client_wieghts
<float32[784,10],float32[10]>@SERVER
## Starting...
## Preprocessing the federated_train_data
## Declaring the model
## Declaring the federated algorithm
tf_dataset_type
<x=float32[?,784],y=int32[?,1]>*
model_weights_type
<float32[784,10],float32[10]>
server_state type
<class 'list'>
<class 'numpy.ndarray'>
FINISHEEED
server_state[1]
[ 5.4024562e-04 -1.6237081e-03 6.2275940e-04 1.4378619e-05
-3.4344319e-04 4.4040685e-04 -6.7906491e-05 -3.0773325e-04
1.3574951e-04 5.8925571e-04]
server2_state[1]
[ 5.8093132e-04 -2.8670396e-05 1.1061553e-04 -1.5197636e-04
-4.6668845e-04 2.7149473e-04 -1.8408171e-04 5.8942172e-05
-3.8304061e-04 1.9247324e-04]
merged_state[1]
[ 5.6058844e-04 -8.2618924e-04 3.6668745e-04 -6.8798872e-05
-4.0506583e-04 3.5595079e-04 -1.2599411e-04 -1.2439553e-04
-1.2364556e-04 3.9086447e-04]
## Evaluation of the model
server_weights
<float32[784,10],float32[10]>@SERVER
mean_client_wieghts
<float32[784,10],float32[10]>@SERVER
## Starting...
## Preprocessing the federated_train_data
## Declaring the model
## Declaring the federated algorithm
tf_dataset_type
<x=float32[?,784],y=int32[?,1]>*
model_weights_type
<float32[784,10],float32[10]>
server_state type
<class 'list'>
<class 'numpy.ndarray'>
FINISHEEED
server_state[1]
[ 5.40245732e-04 -1.62370806e-03 6.22759399e-04 1.43785965e-05
-3.43443331e-04 4.40406700e-04 -6.79065852e-05 -3.07733397e-04
1.35749448e-04 5.89255709e-04]
server2_state[1]
[ 5.8093149e-04 -2.8670451e-05 1.1061558e-04 -1.5197645e-04
-4.6668845e-04 2.7149462e-04 -1.8408173e-04 5.8942172e-05
-3.8304055e-04 1.9247322e-04]
merged_state[1]
[ 5.6058861e-04 -8.2618924e-04 3.6668748e-04 -6.8798923e-05
-4.0506589e-04 3.5595067e-04 -1.2599415e-04 -1.2439561e-04
-1.2364556e-04 3.9086447e-04]
## Evaluation of the model
server_weights
<float32[784,10],float32[10]>@SERVER
mean_client_wieghts
<float32[784,10],float32[10]>@SERVER
server_weights
<float32[784,10],float32[10]>@SERVER
mean_client_wieghts
<float32[784,10],float32[10]>@SERVER
## Starting...
## Preprocessing the federated_train_data
## Declaring the model
## Declaring the federated algorithm
tf_dataset_type
<x=float32[?,784],y=int32[?,1]>*
model_weights_type
<float32[784,10],float32[10]>
server_state type
<class 'list'>
<class 'numpy.ndarray'>
FINISHEEED
server_state[1]
[ 5.4024585e-04 -1.6237078e-03 6.2275951e-04 1.4378643e-05
-3.4344324e-04 4.4040682e-04 -6.7906469e-05 -3.0773322e-04