Commit f96b335c authored by Alfonso Orta's avatar Alfonso Orta

Merge branch 'develop' into 'staging'

Develop

See merge request !25
parents fae52f42 b87fc012
......@@ -17,3 +17,5 @@ src/modules/security/regular_user_credentials.json
src/modules/security/default_users.json
resources/
reports/
......@@ -2,6 +2,8 @@ import os
import sys
import importlib.util
import pathlib
import shutil
import re
'''
This script searches for all 'tests/' directories and executes all tests
......@@ -11,30 +13,98 @@ Use command line argument '-w' to run on windows.
'''
PY = sys.argv[2] if (len(sys.argv) > 1 and sys.argv[1] == '-py') else 'python3.7' # use -py to use your own python command
COVERAGE = "coverage run --append --omit=*/site-packages*"
ROOT = pathlib.Path(__file__).parent.parent.absolute()
REPORTS = ROOT / 'reports'
TESTS_FOLDER_NAME = os.path.normpath("/tests")
print("\nSearching for tests at the path: "+ str(ROOT))
count = 0
resultCodeList = []
coverage_paths_set = set()
for (dirname, dirs, files) in os.walk(ROOT):
#I assume all the tests are placed in a folder named "tests"
if (TESTS_FOLDER_NAME in str(dirname)) \
and 'src' in str(dirname) \
and not(f"{TESTS_FOLDER_NAME}{os.path.normpath('/')}" in str(dirname)) \
and not("venv" in str(dirname)):
and not("venv" in str(dirname)) \
and not("Lib" in str(dirname)):
try:
print(f"Executing tests in {dirname}")
os.chdir(os.path.normpath(dirname))
# TODO do this during docker image setup
exit_val = os.system(f"{PY} -m pip install -r ../requirements.txt") # install pip dependencies
exit_val = os.system(f"{PY} -m unittest discover") # execute the tests
#resultCodeList.append(exit_val)
#exit_val = os.system(f"{PY} -m unittest discover") # execute the tests
exit_val = os.system(f"coverage run --append --omit=*/site-packages* -m unittest discover") #TEST CODE COVERAGE
coverage_paths_set.add(os.path.normpath(dirname))
resultCodeList.append(exit_val) #once per folder i.e if 3 tests are in a folder and crash, there will be just one exit val
except Exception as e:
print(e)
continue
try:
cur_dir = pathlib.Path(os.path.normpath(dirname)).parent.absolute()
filename_regular_expresion = re.compile('(test_.*)|(TEST_.*)')
for filename in os.listdir(cur_dir):
if filename_regular_expresion.match(filename):
#gets here only if there is a test file which matches the regular expression in the app folder,
#cur_dir = os.path(dirname).parent()
os.chdir(cur_dir)
print(f"Executing coverage test in {cur_dir}")
exit_val = os.system(f"coverage run --append --omit=*/site-packages* -m unittest discover")
coverage_paths_set.add(os.path.normpath(cur_dir))
except Exception as e:
print(e)
continue
#CHANGE FOLDER TO REPORTS, in order to combine the coverage
try:
if not os.path.exists(REPORTS):
os.makedirs(REPORTS)
except:
pass
os.chdir(REPORTS)
target = REPORTS
target = str(target) + f'\\.coverage'
try:
os.remove(target) #Try to Remove old coverage file, if exists
except Exception as e:
pass
print("Combinging coverages")
counter = 0
for path in coverage_paths_set:
try:
path += '\\.coverage'
original = path
target = REPORTS
target = str(target) + f'\\.coverage.{counter}'
counter += 1
shutil.copyfile(original,target) #copy new generated coverage files
os.remove(original)
except Exception as e:
print(e)
continue
print("Generating Combined report")
os.system("coverage combine")
os.system("coverage xml")
os.system("coverage html") #if you want to generate the html as well
firstError = -1
i = 0
......@@ -48,4 +118,5 @@ while i < len(resultCodeList):
if(firstError<0): #no errors found
sys.exit(0)
else:
sys.exit(1) #return code>0
\ No newline at end of file
sys.exit(1) #return code>0
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
class TestCoverage(unittest.TestCase):
def test_init_main(self):
# python -m unittest discover
from db.entities import Cluster
from datetime import date, datetime
import json
# add modules folder to interpreter path
import sys
import os
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
### init logging ###
import logging
LOG_FORMAT = ('%(levelname) -5s %(asctime)s %(name)s:%(funcName) -35s %(lineno) -5d: %(message)s')
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
LOGGER = logging.getLogger(__name__)
#############################
import connexion
from security import swagger_util
from pathlib import Path
from env_info import is_running_locally, get_resources_path
from flask import request
from flask import redirect
import main #error when importing main, ModuleNotFoundError: No module named 'security'
#exec(open('main.py').read())
def test_init_run_clustering(self):
import sys
import os
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
import json
from db.entities import Layer, Cluster
from typing import List, Dict, Tuple, Any
from db.repository import Repository
from processing.clustering import Clusterer, ClusterResult
import run_clustering
def test_init_run_node(self):
import sys
import os
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
import json
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
import processing.fetching.fetching as f
import run_node_fetching
def test_init_run_similarity(self):
import processing.similarityFiles.similarityMain as SimilarityCalc
from db.repository import Repository
import run_similarity_calc
def test_init_run_time(self):
import sys
import os
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
import json
from datetime import datetime, date
from db.repository import Repository
from db.entities import ClusterSet, Cluster, Layer, TimeSlice
from typing import Tuple, Dict, Any, List
import run_time_slicing
if __name__ == '__main__':
unittest.main()
......@@ -9,12 +9,14 @@ for modules_path in modules_paths:
sys.path.insert(1, modules_path)
from messaging.MessageHandler import MessageHandler
from db.repository import Repository
# file to read the data from
CSV_FILE = r'Energy_Dataset.csv'
handler = MessageHandler()
CSV_FILE = r'dummy_upload\smart_energy\Energy_Dataset.csv'
handler = MessageHandler(Repository())
processed_transactions = []
def upload_transaction(transaction):
'''{"type": "new-trace",
"content": {"use_case": "smart-energy", "table": "smart-energy", "id": "dd2c5146c919b046d77a32a5cf553d5133163562f7b7e1298c878c575d516025",
......@@ -28,7 +30,31 @@ def upload_transaction(transaction):
'id': uid,
'properties': transaction,
}
handler.handle_new_trace(t)
# handler.handle_new_trace(t)
processed_transactions.append(t)
def store_transactions_for_mirsat():
'''
Stores the processed transactions as if they would be returned
by Trace Retrieval microservice after fixing the message queue bug.
'''
flattened_transactions = []
for transaction in processed_transactions:
transaction = transaction['properties']
transaction['use_case'] = transaction['ApplicationType']
del transaction['ApplicationType']
transaction['table'] = transaction['docType']
del transaction['docType']
flattened_transactions.append(transaction)
import json
with open('flattened_smart_energy_data.json', 'w') as file:
file.write(json.dumps(flattened_transactions))
if __name__ == '__main__':
......@@ -56,3 +82,4 @@ if __name__ == '__main__':
upload_transaction(transaction)
store_transactions_for_mirsat()
\ No newline at end of file
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
class TestCoverage(unittest.TestCase):
def test_init_main(self):
# add modules folder to interpreter path
import sys
import os
import prance
from pathlib import Path
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
# init logging to file
import logging
LOG_FORMAT = ('%(levelname) -5s %(asctime)s %(name)s:%(funcName) -35s %(lineno) -5d: %(message)s')
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
LOGGER = logging.getLogger(__name__)
#################################
import connexion
from security import swagger_util
from env_info import is_running_locally, get_resources_path
from messaging.ReconnectingMessageManager import ReconnectingMessageManager
from messaging.MessageHandler import MessageHandler
from flask import request
from flask import redirect
# init message handler
from db.repository import Repository
import main
def test_routes(self):
from routes import debug
from routes import layers
from routes import nodes
def test_messaging(self):
import network_constants as netconst
from security.token_manager import TokenManager
from db.entities import Layer
import json
import requests
from typing import Dict, List
from threading import Thread
import logging
from messaging import MessageHandler
def test_db(self):
import network_constants as netconst
from database.MongoRepositoryBase import MongoRepositoryBase
from db.entities import Layer
import pymongo
import json
from typing import List, Dict
# init logging to file
import logging
from db import repository
from db.entities import layer
if __name__ == '__main__':
unittest.main()
import pika
from pika import spec
from threading import Thread
import network_constants as netconst
......@@ -47,10 +48,18 @@ class MessageManager:
if self._error_callback != None:
self._error_callback("Connection closed")
def _message_confirmed_callback(self, frame):
if isinstance(frame.method, spec.Basic.Ack):
LOGGER.debug("message acknowledged")
return
else:
LOGGER.warning("Message was rejected by broker!")
def _channel_created_callback(self, channel: pika.channel.Channel):
# Assign both channels
if self._send_channel == None:
self._send_channel = channel
self._send_channel.confirm_delivery(ack_nack_callback=self._message_confirmed_callback)
else:
self._receive_channel = channel
LOGGER.info("RabbitMQ connection established")
......@@ -91,8 +100,11 @@ class MessageManager:
def send_message(self, exchange_name, routing_key, message):
'''Sends a message to the exchange'''
self._send_channel.basic_publish(exchange_name, routing_key, message)
try:
self._send_channel.basic_publish(exchange_name, routing_key, message, mandatory=True)
except pika.exceptions.UnroutableError:
raise pika.exceptions.UnroutableError
def disconnect(self):
'''Stops listening for messages and closes the connection'''
try:
......@@ -100,4 +112,4 @@ class MessageManager:
self._connection.close()
LOGGER.info("RabbitMQ connection closed")
except pika.exceptions.ConnectionWrongStateError:
LOGGER.warning("RabbitMQ connection already closed")
LOGGER.warning("RabbitMQ connection already closed")
\ No newline at end of file
......@@ -3,6 +3,8 @@ import time
from messaging.MessageManager import MessageManager
import logging
import json
import pika
LOGGER = logging.getLogger(__name__)
class ReconnectingMessageManager:
......@@ -91,5 +93,13 @@ class ReconnectingMessageManager:
'''Sends a message to the exchange'''
try:
self._message_manager.send_message(exchange_name, routing_key, message)
except pika.exceptions.UnroutableError:
message = json.loads(message)
if message["retries"] < 10:
message["retries"] += 1
message = json.dumps(message)
self.send_message(exchange_name, routing_key, message)
else:
LOGGER.error("Message went through too many send retries and was not delivered")
except:
LOGGER.error("Error while sending message")
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
class TestCoverage(unittest.TestCase):
def test_init_main(self):
# python -m unittest discover
# add modules folder to interpreter path
import sys
import os
from pathlib import Path
from typing import Dict, Any
modules_path = '../../../modules/'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
# load swagger config
import connexion
from security import swagger_util
from env_info import is_running_locally, get_resources_path
from flask import request
from flask import redirect
app = connexion.App(__name__, specification_dir='configs/')
from db.entities.layer_adapter import LayerAdapter
import main
def test_db_main(self):
import network_constants as netconst
from database.MongoRepositoryBase import MongoRepositoryBase
from db.entities import layer_adapter
from db.entities import table
from db.entities import use_case
import pymongo
import json
from typing import List, Dict
from db import repository
from db import table_repository
from db import use_case_repository
def test_routes(self):
from routes import layer
from routes import tables
from routes import use_case
def test_services(self):
from services import layer_adapter_service
def test_use_case_scripts(self):
import network_constants as nc
from security.token_manager import TokenManager
import requests
from typing import List
from _add_use_case_scripts import requestPost
#######
#from _add_use_case_scripts.bank-app import add_bank_app_schema ##eror not importing? invalid folder name?
#from _add_use_case_scripts.bank-app.tables import add_bank_app_schema
from _add_use_case_scripts.car_sharing import add_carsharing_schema
from _add_use_case_scripts.car_sharing.tables import add_car
from _add_use_case_scripts.car_sharing.tables import add_hash
from _add_use_case_scripts.car_sharing.tables import add_offer
from _add_use_case_scripts.car_sharing.tables import add_publication
from _add_use_case_scripts.car_sharing.tables import add_travel
from _add_use_case_scripts.car_sharing.tables import add_user
from _add_use_case_scripts.crowd_journalism import add_crowdjournalism_schema
from _add_use_case_scripts.crowd_journalism.tables import add_classification
from _add_use_case_scripts.crowd_journalism.tables import add_event
from _add_use_case_scripts.crowd_journalism.tables import add_purchase
from _add_use_case_scripts.crowd_journalism.tables import add_tag
from _add_use_case_scripts.crowd_journalism.tables import add_video
from _add_use_case_scripts.debug import add_debug_schema
from _add_use_case_scripts.debug.tables import add_pizza_table
#from _add_use_case_scripts.smart-energy import add_smart_energy_schema
#from _add_use_case_scripts.smart-energy.tables import add_smart_energy
from _add_use_case_scripts.vialog import add_vialog_schema
from _add_use_case_scripts.vialog.tables import add_user
from _add_use_case_scripts.vialog.tables import add_video
if __name__ == '__main__':
unittest.main()
......@@ -9,7 +9,7 @@ def receive():
body = request.json
if isBlockchainTraceValid(body):
message = {'type': 'blockchain-transaction', 'content': body}
message = {'type': 'blockchain-transaction', 'retries': 0, 'content': body}
message_sender.send_message('inhub', 'trace-retrieval', json.dumps(message))
return Response(status=201)
......
......@@ -25,11 +25,17 @@ class Repository(MongoRepositoryBase):
collection.delete_many({})
def add_transaction(self, transaction: Transaction):
'''
Adds a transaction to mongodb repository.
@throws
KeyError - Duplicate transaction ID
'''
reference = self.get_transaction_with_id(transaction.id())
if reference == None:
super().insert_entry(self._transaction_collection, transaction.to_serializable_dict())
else:
raise ValueError(f"ID {transaction['UniqueID']} already exists")
raise KeyError(f"ID {transaction['UniqueID']} already exists")
def all_transactions_for_use_case(self, use_case: str) -> List[Transaction]:
result = super().get_entries(self._transaction_collection, projection={'_id': False}, selection={"use_case": use_case})
......
......@@ -140,7 +140,9 @@ class MessageHandler:
# check if there is a doctype in the message
if "docType" not in transaction_message.keys():
LOGGER.error("Transaction has no docType, ignoring it.")
LOGGER.error("Transaction has no docType, storing it under docType 'unknown'.")
transaction_message["docType"] = "unknown"
self._mongo_repo.add_failed_transaction(transaction_message)
return
use_case = transaction_message["ApplicationType"]
......@@ -149,7 +151,7 @@ class MessageHandler:
try:
tables = self._rest_fetcher.fetch_schema_information(use_case)
except ValueError as e:
print(f"{e}")
LOGGER.error(f"{e}\nStoring it as a failed transaction.")
self._mongo_repo.add_failed_transaction(transaction_message)
return
......@@ -162,7 +164,8 @@ class MessageHandler:
# abort if table does not exist.
if target_table == None:
LOGGER.error(f"There is no table '{docType}', ignoring the message.")
LOGGER.error(f"There is no table '{docType}', storing it as a failed transaction.")
self._mongo_repo.add_failed_transaction(transaction_message)
return
mappings = table["mappings"]
......@@ -189,8 +192,11 @@ class MessageHandler:
try:
self._mongo_repo.add_transaction(transaction)
except ValueError as e:
LOGGER.error(f"{e}, ignoring node")
except KeyError as e:
LOGGER.error(f"{e}")
self._mongo_repo.add_failed_transaction(transaction_message)
return
msg = {
"type": "new-trace",
......
......@@ -18,7 +18,7 @@ class RestFetcher:
)
if response.status_code != 200:
raise ValueError("Error while retrieving schema information")
raise ValueError(f"Error while retrieving schema information.\tStatus-code:{response.status_code}")
tables = json.loads(response.text)
return tables
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment