Commit e7b081ad authored by Alfonso Orta's avatar Alfonso Orta

Merge branch 'develop' into 'staging'

Develop

See merge request !38
parents 108a70c8 5c630a95
...@@ -24,6 +24,7 @@ This token is used for authentication as _regular user_ on all microservices cur ...@@ -24,6 +24,7 @@ This token is used for authentication as _regular user_ on all microservices cur
``` ```
{ {
"ApplicationType": "use-case identifier as string", "ApplicationType": "use-case identifier as string",
"docType": "use-case-table identifier as string",
"key": "value", "key": "value",
... ...
} }
...@@ -91,4 +92,4 @@ Returns the computed similarity. Two clusters belonging to the SAME layer will b ...@@ -91,4 +92,4 @@ Returns the computed similarity. Two clusters belonging to the SAME layer will b
## Connected Cluster ## Connected Cluster
Intermediary data-structure used only by the function which computes the similarity. Clusters are connected only to other clusters belonging to a DIFFERENT layer. Intermediary data-structure used only by the function which computes the similarity. Clusters are connected only to other clusters belonging to a DIFFERENT layer.
```GET https://articonf1.itec.aau.at:30103/api/use_cases/{use_case}/tables{table}/connectedClusters``` returns all connected clusters for the given use-case and table. ```GET https://articonf1.itec.aau.at:30103/api/use_cases/{use_case}/tables/{table}/connectedClusters``` returns all connected clusters for the given use-case and table.
...@@ -18,9 +18,11 @@ from pathlib import Path ...@@ -18,9 +18,11 @@ from pathlib import Path
from env_info import is_running_locally, get_resources_path from env_info import is_running_locally, get_resources_path
from flask import request from flask import request
from flask import redirect from flask import redirect
from flask_cors import CORS
# load swagger config # load swagger config
app = connexion.App(__name__, specification_dir='configs/') app = connexion.App(__name__, specification_dir='configs/')
CORS(app.app)
@app.app.before_request @app.app.before_request
def before_request(): def before_request():
......
...@@ -14,6 +14,7 @@ cycler==0.10.0 ...@@ -14,6 +14,7 @@ cycler==0.10.0
decorator==4.4.1 decorator==4.4.1
Deprecated==1.2.7 Deprecated==1.2.7
Flask==1.1.1 Flask==1.1.1
Flask-Cors==3.0.10
idna==2.8 idna==2.8
importlib-metadata==1.5.0 importlib-metadata==1.5.0
inflection==0.3.1 inflection==0.3.1
......
...@@ -20,6 +20,7 @@ from env_info import is_running_locally, get_resources_path ...@@ -20,6 +20,7 @@ from env_info import is_running_locally, get_resources_path
from messaging.ReconnectingMessageManager import ReconnectingMessageManager from messaging.ReconnectingMessageManager import ReconnectingMessageManager
from messaging.MessageHandler import MessageHandler from messaging.MessageHandler import MessageHandler
from flask import request from flask import request
from flask_cors import CORS
from flask import redirect from flask import redirect
# init message handler # init message handler
...@@ -30,6 +31,7 @@ def message_received_callback(channel, method, properties, body): ...@@ -30,6 +31,7 @@ def message_received_callback(channel, method, properties, body):
# load swagger config # load swagger config
app = connexion.App(__name__, specification_dir='configs/') app = connexion.App(__name__, specification_dir='configs/')
CORS(app.app)
@app.app.before_request @app.app.before_request
def before_request(): def before_request():
......
...@@ -10,6 +10,7 @@ connexion==2.7.0 ...@@ -10,6 +10,7 @@ connexion==2.7.0
coverage==5.3.1 coverage==5.3.1
cryptography==3.1 cryptography==3.1
Flask==1.1.2 Flask==1.1.2
Flask-Cors==3.0.10
idna==2.9 idna==2.9
importlib-metadata==1.6.1 importlib-metadata==1.6.1
inflection==0.5.0 inflection==0.5.0
......
...@@ -9,7 +9,7 @@ for modules_path in modules_paths: ...@@ -9,7 +9,7 @@ for modules_path in modules_paths:
if os.path.exists(modules_path): if os.path.exists(modules_path):
sys.path.insert(1, modules_path) sys.path.insert(1, modules_path)
from _add_use_case_scripts.vialog.tables import add_user, add_video from _add_use_case_scripts.vialog.tables import add_user, add_video, add_change
import network_constants as nc import network_constants as nc
from security.token_manager import TokenManager from security.token_manager import TokenManager
...@@ -38,4 +38,4 @@ if __name__ == "__main__": ...@@ -38,4 +38,4 @@ if __name__ == "__main__":
add_use_case(use_case) add_use_case(use_case)
add_user.main(use_case) add_user.main(use_case)
add_video.main(use_case) add_video.main(use_case)
add_change.main(use_case)
\ No newline at end of file \ No newline at end of file
import sys
import os
from pathlib import Path
from typing import Dict, Any
import requests
modules_paths = ['.', '../../../modules/']
for modules_path in modules_paths:
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
from _add_use_case_scripts.vialog.tables import add_user, add_video, add_change
import network_constants as nc
from security.token_manager import TokenManager
def add_use_case(use_case: str):
#use_case = "vialog"
jwt = TokenManager.getInstance().getToken()
url = f"https://articonf1.itec.aau.at:30420/api/use-cases"
response = requests.post(
url,
verify=False,
proxies = { "http":None, "https":None },
headers = { "Authorization": f"Bearer {jwt}"},
json = {"name": use_case}
)
print(url+": "+str(response.content))
if __name__ == "__main__":
use_case = "vialog-new-enum"
# disable ssl warnings :)
requests.packages.urllib3.disable_warnings()
add_use_case(use_case)
add_video.main(use_case)
add_change.main(use_case)
\ No newline at end of file
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"changeType",
"changedValue",
"previousValue",
"newValue",
]
columns = { c : c for c in columns }
columns["UniqueID"] = "videoId+changeId"
columns["changeType"] = "enum(changeType)"
columns["changedValue"] = "enum(changedValue)"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "Changetype_Layer",
"properties": [
"changeType",
],
"cluster_properties": [
"changeType",
]
},
{
"use_case": use_case,
"table": table_name,
"name": "Changedvalue_Layer",
"properties": [
"changedValue",
"previousValue",
"newValue"
],
"cluster_properties": [
"changedValue"
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("user")
table_name = "change"
add_table(use_case,table_name)
add_layers(use_case,table_name)
\ No newline at end of file
...@@ -6,27 +6,32 @@ def add_table(use_case: str, table_name: str): ...@@ -6,27 +6,32 @@ def add_table(use_case: str, table_name: str):
''' '''
columns = [ columns = [
# "docType", 'videoId',
"videoId", 'objType',
"Video_Token", 'eventName',
"replyTo", 'video_token',
"Created", 'replyTo',
"Duration", 'created',
"videoResolution", 'duration',
"Label", 'videoResolution',
"ThreadId", 'label',
"Position", 'threadId',
"ModifiedDate", 'position',
"Views", 'views',
"ModeratedBy", 'moderatedBy',
"CommunityManagerNotes", 'moderationDate',
"Rewards", 'communityManagerNotes',
"Video_State", 'rewards',
"Video_Type" 'video_state',
'video_type'
] ]
columns = { c : c for c in columns } columns = { c : c for c in columns }
columns["UniqueID"] = "videoId" columns["UniqueID"] = "videoId"
columns["moderatedBy"] = "enum(moderatedBy)"
columns["video_state"] = "enum(video_state)"
columns["video_type"] = "enum(video_type)"
columns["videoResolution"] = "enum(videoResolution)"
table = { table = {
"name": table_name, "name": table_name,
...@@ -43,14 +48,14 @@ def add_layers(use_case:str, table_name: str): ...@@ -43,14 +48,14 @@ def add_layers(use_case:str, table_name: str):
"name": "Manager_Layer", "name": "Manager_Layer",
"properties": [ "properties": [
"UniqueID", "UniqueID",
"ModifiedDate", "moderationDate",
"ModeratedBy", "moderatedBy",
"Video_State", "video_state",
"Video_Type" "video_type"
], ],
"cluster_properties": [ "cluster_properties": [
"ModeratedBy", "moderatedBy",
"Video_State" "video_state"
] ]
}, },
{ {
...@@ -59,16 +64,16 @@ def add_layers(use_case:str, table_name: str): ...@@ -59,16 +64,16 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Popularity_Layer", "name": "Video_Popularity_Layer",
"properties": [ "properties": [
"UniqueID", "UniqueID",
"Label", "label",
"Created", "created",
"Views", "views",
"Rewards", "rewards",
"Video_State", "video_state",
"Video_Type" "video_type"
], ],
"cluster_properties": [ "cluster_properties": [
"Views", "views",
"Video_Type" "video_type"
] ]
}, },
{ {
...@@ -77,15 +82,15 @@ def add_layers(use_case:str, table_name: str): ...@@ -77,15 +82,15 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Age_Layer", "name": "Video_Age_Layer",
"properties": [ "properties": [
"UniqueID", "UniqueID",
"Label", "label",
"Created", "created",
"Views", "views",
"Rewards", "rewards",
"Video_State", "video_state",
"Video_Type" "video_type"
], ],
"cluster_properties": [ "cluster_properties": [
"Created" "created"
] ]
}, },
{ {
...@@ -94,15 +99,15 @@ def add_layers(use_case:str, table_name: str): ...@@ -94,15 +99,15 @@ def add_layers(use_case:str, table_name: str):
"name": "Rewards_Layer", "name": "Rewards_Layer",
"properties": [ "properties": [
"UniqueID", "UniqueID",
"Label", "label",
"Created", "created",
"Views", "views",
"Rewards", "rewards",
"Video_State", "video_state",
"Video_Type" "video_type"
], ],
"cluster_properties": [ "cluster_properties": [
"Rewards" "rewards"
] ]
}, },
{ {
...@@ -111,14 +116,14 @@ def add_layers(use_case:str, table_name: str): ...@@ -111,14 +116,14 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Lenght_Layer", "name": "Video_Lenght_Layer",
"properties": [ "properties": [
"UniqueID", "UniqueID",
"Created", "created",
"Views", "views",
"Duration", "duration",
"Video_State", "video_state",
"Video_Type" "video_type"
], ],
"cluster_properties": [ "cluster_properties": [
"Duration" "duration"
] ]
}, },
{ {
...@@ -127,11 +132,11 @@ def add_layers(use_case:str, table_name: str): ...@@ -127,11 +132,11 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Resolution_Layer", "name": "Video_Resolution_Layer",
"properties": [ "properties": [
"UniqueID", "UniqueID",
"Created", "created",
"Views", "views",
"videoResolution", "videoResolution",
"Video_State", "video_state",
"Video_Type" "video_type"
], ],
"cluster_properties": [ "cluster_properties": [
"videoResolution" "videoResolution"
......
...@@ -21,12 +21,14 @@ import connexion ...@@ -21,12 +21,14 @@ import connexion
from security import swagger_util from security import swagger_util
from env_info import is_running_locally, get_resources_path from env_info import is_running_locally, get_resources_path
from flask import request from flask import request
from flask_cors import CORS
from flask import redirect from flask import redirect
# start app # start app
if __name__ == '__main__': if __name__ == '__main__':
app = connexion.App(__name__, specification_dir='configs/') app = connexion.App(__name__, specification_dir='configs/')
CORS(app.app)
@app.app.before_request @app.app.before_request
def before_request(): def before_request():
......
import requests
import json
videoListUrl = "https://dev758755.vialog.app/Videos/Meta/ListAll"
videoUrl = "https://dev758755.vialog.app/stat/events?type=video&id="
# token from Rest Gateway to authorize
JWT_TOKEN = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InJlZ3VsYXJAaXRlYy5hYXUuYXQiLCJjcmVhdGVkX2F0IjoiMjAyMS0wNC0wNyAxMjo0OTo0MS43MTkzNjQiLCJ2YWxpZF91bnRpbCI6IjIwMjEtMDQtMDggMTI6NDk6NDEuNzE5MzY0In0.FN6qqBQeJSmXtS0-0dBiL-ojz6Ou7E5Tc9macrrhM4A'
useCase = "vialog-new-enum"
def send_transaction_to_rest_gateway(transaction: dict):
res = requests.post(
url = 'https://articonf1.itec.aau.at:30401/api/trace',
json = transaction,
headers = {"Authorization": f"Bearer {JWT_TOKEN}"},
verify = False # ignore ssl error
)
print(res)
videosRequest = requests.get(videoListUrl)
empty = set()
html = set()
if (videosRequest.status_code != 200):
print(f"Status: {videosRequest.status_code}")
dataCount = 0
for video in videosRequest.json():
dataCount += 1
id = video["videoId"]
videoRequest = requests.get(f"{videoUrl}{id}")
if videoRequest.status_code != 200:
print(f"Status: {videoRequest.status_code}")
if videoRequest.text != "" and not videoRequest.text.startswith("<!DOCTYPE html>"):
print(f"\n\n{videoUrl}{id}")
historyList = sorted(videoRequest.json()[0]["History"], key=lambda k: k['moderationDate'], reverse=True)
historyList.append(empty)
i = 0
changeList = []
for eventMap in historyList:
if historyList[i+1] == empty:
break
if i == 0:
lastState = eventMap
lastState["ApplicationType"] = useCase
lastState["docType"] = "video"
print(f"{lastState}\n\n\n")
send_transaction_to_rest_gateway(lastState)
if eventMap["eventName"] == 'r1eabcbdc8f5378b2ba71a1b6fe2038b Created' or eventMap["eventName"] == 'Created':
change = {"changeType": "Created", "changedValue": "video_state", "previousValue": "", "newValue":"Created"}
elif eventMap["eventName"] == "Restore":
change = {"changeType": "Restore", "changedValue": "", "previousValue": "", "newValue":""}
elif eventMap["eventName"] == "PositionChange":
change = {"changeType": "PositionChange", "changedValue": "position", "previousValue": historyList[i+1]["position"], "newValue": eventMap["position"]}
elif eventMap["eventName"] == "Hide":
change = {"changeType": "Hide", "changedValue": "video_state", "previousValue": historyList[i+1]["video_state"], "newValue": eventMap["video_state"]}
elif eventMap["eventName"] == "Publish":
change = {"changeType": "Publish", "changedValue": "video_state", "previousValue": historyList[i+1]["video_state"], "newValue":eventMap["video_state"]}
elif eventMap["eventName"] == "CMNote":
change = {"changeType": "CMNote", "changedValue": "communityManagerNotes", "previousValue": historyList[i+1]["communityManagerNotes"], "newValue":eventMap["communityManagerNotes"]}
elif eventMap["eventName"] == "Move":
change = {"changeType": "Move", "changedValue": "position", "previousValue": historyList[i+1]["position"], "newValue":eventMap["position"]}
elif eventMap["eventName"] == "VideoType":
change = {"changeType": "VideoType", "changedValue": "video_type", "previousValue": historyList[i+1]["video_type"], "newValue":eventMap["video_type"]}
elif eventMap["eventName"] == "Delete":
change = {"changeType": "Delete", "changedValue": "", "previousValue": "", "newValue": ""}
elif eventMap["eventName"] == "Copy":
change = {"changeType": "Copy", "changedValue": "", "previousValue": "", "newValue":""}
elif eventMap["eventName"] == "CustomLabel":
change = {"changeType": "CustomLabel", "changedValue": "label", "previousValue": historyList[i+1]["label"], "newValue":eventMap["label"]}
change["videoId"] = id
change["changeId"] = i
change["timestamp"] = eventMap["moderationDate"]
change["ApplicationType"] = useCase
change["docType"] = "change"
print(change)
send_transaction_to_rest_gateway(change)
i += 1
elif videoRequest.text == "":
empty.add(id)
else:
html.add(id)
print(f"empty: {empty}\n\n")
print(f"html page: {html}\n\n")
print(f"history: {historyList}")
print(dataCount)
...@@ -9,10 +9,10 @@ click==7.1.2 ...@@ -9,10 +9,10 @@ click==7.1.2
clickclick==1.2.2 clickclick==1.2.2
colorama==0.4.3 colorama==0.4.3
connexion==2.7.0 connexion==2.7.0
coverage==5.3.1
cryptography==2.9.2 cryptography==2.9.2
Deprecated==1.2.10 Deprecated==1.2.10
Flask==1.1.2 Flask==1.1.2
Flask-Cors==3.0.10
idna==2.10 idna==2.10
importlib-metadata==1.7.0 importlib-metadata==1.7.0
inflection==0.5.0 inflection==0.5.0
......
...@@ -31,7 +31,9 @@ class Repository(MongoRepositoryBase): ...@@ -31,7 +31,9 @@ class Repository(MongoRepositoryBase):
@throws @throws
KeyError - Duplicate transaction ID KeyError - Duplicate transaction ID
''' '''
reference = self.get_transaction_with_id(transaction.id()) use_case= transaction.use_case
table = transaction.table
reference = self.get_transaction_with_id(transaction.id(),use_case, table)
if reference == None: if reference == None:
super().insert_entry(self._transaction_collection, transaction.to_serializable_dict()) super().insert_entry(self._transaction_collection, transaction.to_serializable_dict())
else: else:
...@@ -41,10 +43,10 @@ class Repository(MongoRepositoryBase): ...@@ -41,10 +43,10 @@ class Repository(MongoRepositoryBase):
result = super().get_entries(self._transaction_collection, projection={'_id': False}, selection={"use_case": use_case}) result = super().get_entries(self._transaction_collection, projection={'_id': False}, selection={"use_case": use_case})
return [Transaction.from_serializable_dict(row) for row in list(result)] return [Transaction.from_serializable_dict(row) for row in list(result)]
def get_transaction_with_id(self, unique_id: str) -> Transaction: def get_transaction_with_id(self, unique_id: str, use_case:str, table:str ) -> Transaction:
result = list(super().get_entries(self._transaction_collection, projection={'_id': False}, selection={"UniqueID": unique_id})) result = list(super().get_entries(self._transaction_collection, projection={'_id': False}, selection={"id": unique_id,"use_case": use_case, "table":table}))
if len(result) == 1: if len(result) >= 1:
return Transaction.from_serializable_dict(result[0]) return Transaction.from_serializable_dict(result[0])
return None return None
......
...@@ -8,9 +8,11 @@ import json ...@@ -8,9 +8,11 @@ import json
import hashlib import hashlib
import logging import logging
import requests import requests
requests.packages.urllib3.disable_warnings()
from typing import Dict from typing import Dict
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
class MessageHandler: class MessageHandler:
...@@ -30,7 +32,7 @@ class MessageHandler: ...@@ -30,7 +32,7 @@ class MessageHandler:
self._rest_fetcher = rest_fetcher self._rest_fetcher = rest_fetcher
def handle_generic(self, body): def handle_generic(self, body):
LOGGER.info(f"Received message: {body}")
result = None result = None
message = None message = None
...@@ -39,11 +41,13 @@ class MessageHandler: ...@@ -39,11 +41,13 @@ class MessageHandler:
except (ValueError, TypeError): except (ValueError, TypeError):
result = self.MSG_NOT_JSON result = self.MSG_NOT_JSON
LOGGER.warning(result) LOGGER.warning(result)
LOGGER.info(f"Received message: {body}")
return result return result
if not 'type' in message: if not 'type' in message:
result = self.MSG_NO_TYPE result = self.MSG_NO_TYPE
LOGGER.warning(result) LOGGER.warning(result)
LOGGER.info(f"Received message: {body}")
return result return result
if message['type'] == 'blockchain-transaction': if message['type'] == 'blockchain-transaction':
...@@ -51,8 +55,11 @@ class MessageHandler: ...@@ -51,8 +55,11 @@ class MessageHandler:
result = self.MSG_TRACE_PROCESSED result = self.MSG_TRACE_PROCESSED
else: else:
result = self.MSG_NOT_PROCESSED result = self.MSG_NOT_PROCESSED
LOGGER.warning(result)
LOGGER.info(f"Received message: {body}")
LOGGER.info(result) #LOGGER.info(result) #too much spam
return result return result
def _resolve_path(self, data: Dict, path:str) -> Dict: def _resolve_path(self, data: Dict, path:str) -> Dict:
...@@ -132,39 +139,39 @@ class MessageHandler: ...@@ -132,39 +139,39 @@ class MessageHandler:
''' '''
# check if there is a use-case in the message # check if there is a use-case in the message
if "ApplicationType" not in transaction_message.keys(): if "ApplicationType" not in transaction_message:
LOGGER.error("Transaction has no ApplicationType, storing it under use-case 'unknown'.") LOGGER.error("Transaction has no ApplicationType, storing it under use-case 'unknown'.")
transaction_message["ApplicationType"] = "unknown" transaction_message["ApplicationType"] = "unknown"
self._mongo_repo.add_failed_transaction(transaction_message) self._mongo_repo.add_failed_transaction(transaction_message)
return return
# check if there is a doctype in the message # check if there is a table in the message
if "docType" not in transaction_message.keys(): if "docType" not in transaction_message:
LOGGER.error("Transaction has no docType, storing it under docType 'unknown'.") LOGGER.error("Transaction has no docType, storing it under table 'unknown'.")
transaction_message["docType"] = "unknown" transaction_message["docType"] = "unknown"
self._mongo_repo.add_failed_transaction(transaction_message) self._mongo_repo.add_failed_transaction(transaction_message)
return return
use_case = transaction_message["ApplicationType"] transaction_use_case = transaction_message["ApplicationType"]
docType = transaction_message["docType"] transaction_table = transaction_message["docType"]
try: try:
tables = self._rest_fetcher.fetch_schema_information(use_case) tables = self._rest_fetcher.fetch_schema_information(transaction_use_case)
except ValueError as e: except ValueError as e:
LOGGER.error(f"{e}\nStoring it as a failed transaction.") LOGGER.error(f"{e}\nCould not fetch schema, storing it as a failed transaction..")
self._mongo_repo.add_failed_transaction(transaction_message) self._mongo_repo.add_failed_transaction(transaction_message)
return return
target_table = None target_table = None
# find correct table # find correct table
for table in tables: for table in tables:
if table["name"] == docType: if table["name"] == transaction_table:
target_table = table target_table = table
break break
# abort if table does not exist. # abort if table does not exist.
if target_table == None: if target_table == None:
LOGGER.error(f"There is no table '{docType}', storing it as a failed transaction.") LOGGER.error(f"There is no table '{transaction_table}', storing it as a failed transaction.")
self._mongo_repo.add_failed_transaction(transaction_message) self._mongo_repo.add_failed_transaction(transaction_message)
return return
...@@ -172,20 +179,19 @@ class MessageHandler: ...@@ -172,20 +179,19 @@ class MessageHandler:
try: try:
flattened = self._flatten_transaction(transaction_message, mappings) flattened = self._flatten_transaction(transaction_message, mappings)
except KeyError as e: except KeyError as e:
LOGGER.error(f"Failed while flattening with KeyError: {str(e)}") LOGGER.error(f"Failed while flattening with KeyError: {str(e)}, storing it as a failed transaction.")
self._mongo_repo.add_failed_transaction(transaction_message) self._mongo_repo.add_failed_transaction(transaction_message)
return return
transaction = Transaction(use_case, target_table["name"], flattened) transaction = Transaction(transaction_use_case, target_table["name"], flattened)
#check for duplicates #check for duplicates
try: try:
reference = self._mongo_repo.get_transaction_with_id(transaction.id()) references = self._mongo_repo.get_transaction_with_id(transaction.id(),transaction_use_case,transaction_table)
if reference != None: if references != None:
if (reference[0].table == transaction.table) and (reference[0].use_case == transaction.use_case): LOGGER.info("Found duplicate, storing it as a duplicated transaction.")
LOGGER.error("Found duplicate") self._mongo_repo.add_duplicated_transaction(transaction)
self._mongo_repo.add_duplicated_transaction(transaction) return
return
except ValueError as e: except ValueError as e:
LOGGER.error(f"{e}, could not insert duplicated node.") LOGGER.error(f"{e}, could not insert duplicated node.")
return return
...@@ -193,14 +199,14 @@ class MessageHandler: ...@@ -193,14 +199,14 @@ class MessageHandler:
try: try:
self._mongo_repo.add_transaction(transaction) self._mongo_repo.add_transaction(transaction)
except KeyError as e: except KeyError as e:
LOGGER.error(f"{e}") LOGGER.error(f"{e}, ignored {transaction_message}")
self._mongo_repo.add_failed_transaction(transaction_message) # self._mongo_repo.add_failed_transaction(transaction_message)
return return
msg = { msg = {
"type": "new-trace", "type": "new-trace",
"content": transaction.to_serializable_dict(), "content": transaction.to_serializable_dict()
} }
msg_string = json.dumps(msg) msg_string = json.dumps(msg)
......
...@@ -23,7 +23,8 @@ def delete_all_failed_for_use_case(use_case: str): ...@@ -23,7 +23,8 @@ def delete_all_failed_for_use_case(use_case: str):
return Response(status=200) return Response(status=200)
def all_duplicated_for_use_case(use_case: str): def all_duplicated_for_use_case(use_case: str):
return _repository.all_duplicated_transactions_for_use_case(use_case) transactions = _repository.all_duplicated_transactions_for_use_case(use_case)
return [t.to_serializable_dict() for t in transactions]
def delete_all_duplicated_for_use_case(use_case: str): def delete_all_duplicated_for_use_case(use_case: str):
_repository.delete_all_duplicated_transactions(use_case) _repository.delete_all_duplicated_transactions(use_case)
......
...@@ -18,14 +18,14 @@ class DummyMongoRepo: ...@@ -18,14 +18,14 @@ class DummyMongoRepo:
def add_transaction(self, transaction): def add_transaction(self, transaction):
self.added_transactions.append(transaction) self.added_transactions.append(transaction)
def get_transaction_with_id(self, unique_id: str): def get_transaction_with_id(self, unique_id: str, use_case,table):
result = [] result = []
for trans in self.added_transactions: for trans in self.added_transactions:
transID = trans.id() transID = trans.id()
if transID == unique_id: if transID == unique_id and trans.use_case == use_case and trans.table == table:
result.append(trans) result.append(trans)
if len(result) > 0: if len(result) > 0:
return result return result[0]
return None return None
...@@ -178,6 +178,7 @@ class Test_MessageHandler(unittest.TestCase): ...@@ -178,6 +178,7 @@ class Test_MessageHandler(unittest.TestCase):
self.handler.handle_blockchain_transaction(msg['content']) self.handler.handle_blockchain_transaction(msg['content'])
self.handler.handle_blockchain_transaction(msg2['content']) self.handler.handle_blockchain_transaction(msg2['content'])
self.assertEqual(len(self.repo.added_transactions),len(self.repo.duplicated_transactions)) self.assertEqual(len(self.repo.added_transactions),len(self.repo.duplicated_transactions))
self.assertEqual(len(self.repo.added_transactions),1)
def test_handleBlockchainTransaction_duplicateTraceDifferentTable_bothTransactionsAddedAsUnique(self): def test_handleBlockchainTransaction_duplicateTraceDifferentTable_bothTransactionsAddedAsUnique(self):
msg = self._get_valid_message() msg = self._get_valid_message()
...@@ -197,5 +198,24 @@ class Test_MessageHandler(unittest.TestCase): ...@@ -197,5 +198,24 @@ class Test_MessageHandler(unittest.TestCase):
self.handler.handle_blockchain_transaction(msg2['content']) self.handler.handle_blockchain_transaction(msg2['content'])
self.assertEqual(len(self.repo.added_transactions),2) self.assertEqual(len(self.repo.added_transactions),2)
def test_handleBlockchainTransaction_multipleTransactions3SameIdDiffUseCaseTable_3AddedUnique2Duplicate(self):
#print("Entered Test: 3Unique 2Dupli")
msg = self._get_valid_message()
msg2 = self._get_valid_message2()
msg3 = self._get_valid_message3()
msg4 = self._get_valid_message3()
msg5 = self._get_valid_message3()
msg = eval(msg)
msg2 = eval(msg2)
msg3 = eval(msg3)
msg4 = eval(msg4)
msg5 = eval(msg5)
self.handler.handle_blockchain_transaction(msg['content'])
self.handler.handle_blockchain_transaction(msg2['content'])
self.handler.handle_blockchain_transaction(msg3['content'])
self.handler.handle_blockchain_transaction(msg4['content'])
self.handler.handle_blockchain_transaction(msg5['content'])
self.assertEqual(len(self.repo.added_transactions),3)
self.assertEqual(len(self.repo.duplicated_transactions),2)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()
\ No newline at end of file
import requests
requests.packages.urllib3.disable_warnings()
from icecream import ic
def httpget(url):
token = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InJlZ3VsYXJAaXRlYy5hYXUuYXQiLCJjcmVhdGVkX2F0IjoiMjAyMS0wMy0yNCAxMDoxMzo1MS4wMjkwNDkiLCJ2YWxpZF91bnRpbCI6IjIwMjEtMDMtMjUgMTA6MTM6NTEuMDI5MDQ5In0.V6kYV5Lmb_tUIsF-6AKNB8_lIifmJP_Dm8gHhGa5w_o'
res = requests.get(url,
verify=False,
headers = { "Authorization": f"Bearer {token}"})
return res
# list tables
res = httpget(url = 'https://articonf1.itec.aau.at:30420/api/use-cases/crowd-journalism/tables')
print("Tables: ", [entry['name'] for entry in res.json()])
# count pushed data
def count_data(json_res, table_identifier='table'):
tables = {}
for entry in json_res:
key = entry[table_identifier]
if key not in tables:
tables[key] = 0
tables[key] += 1
ic(tables)
res = httpget(url = 'https://articonf1.itec.aau.at:30001/api/use_cases/crowd-journalism/transactions')
count_data(res.json())
res_f = httpget(url = 'https://articonf1.itec.aau.at:30001/api/use_cases/crowd-journalism/transactions-failed')
count_data(res_f.json(), 'docType')
# failed tags: the "tag" is missing, but is called name
# failed purchases: duplicate keys generated from (userid, videoid, ownerid)
# failed classifications: impact is missing
# visualize content
import matplotlib.pyplot as plt
def visualize_video_coordinates():
geolocations = []
for entry in res.json():
if entry['table'] != 'video':
continue
loc_ = entry['properties']['geolocation'].split(',')
if loc_[0] == 'undefined' or loc_[1] == 'undefined':
continue
geolocations.append(loc_)
plt.scatter([float(coor[0]) for coor in geolocations], [float(coor[1]) for coor in geolocations])
plt.axis('off')
plt.show()
# visualize_video_coordinates()
def visualize_video_prices():
price = []
for entry in res.json():
if entry['table'] != 'video':
continue
price.append(entry['properties']['price'])
from collections import Counter
print(Counter(price))
plt.hist(price, bins=len(set(price)))
plt.show()
# visualize_video_prices()
def visualize_content_ratings():
impact = []
informative = []
trustiness = []
for entry in res.json():
if entry['table'] != 'classification':
continue
if entry['properties']['impact'] is not None:
impact.append(entry['properties']['impact'])
if entry['properties']['informative'] is not None:
informative.append(entry['properties']['informative'])
if entry['properties']['trustiness'] is not None:
trustiness.append(entry['properties']['trustiness'])
from collections import Counter
print(Counter(impact))
print(Counter(informative))
print(Counter(trustiness))
fig, (ax1, ax2, ax3) = plt.subplots(3)
ax1.hist(impact, bins=len(set(impact)))
ax1.set_title('impact')
ax2.hist(informative, bins=len(set(informative)))
ax2.set_title('informative')
ax3.hist(trustiness, bins=len(set(trustiness)))
ax3.set_title('trustiness')
plt.show()
# visualize_content_ratings()
# counting duplicate entries for 'purchase'
working_purchase_ids = [(entry['properties']['userid'], entry['properties']['videoid'], entry['properties']['ownerid']) for entry in res.json() if entry['table'] == 'purchase']
failed_purchase_ids = [(entry['userid'], entry['videoid'], entry['ownerid']) for entry in res_f.json() if entry['docType'] == 'purchase']
ic(len(working_purchase_ids))
ic(len(failed_purchase_ids))
cnt = 0
for failed_id in failed_purchase_ids:
if failed_id in working_purchase_ids:
cnt += 1
ic(cnt)
\ No newline at end of file
import requests
requests.packages.urllib3.disable_warnings()
from icecream import ic
token = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InJlZ3VsYXJAaXRlYy5hYXUuYXQiLCJjcmVhdGVkX2F0IjoiMjAyMS0wMy0yNCAxMDoxMzo1MS4wMjkwNDkiLCJ2YWxpZF91bnRpbCI6IjIwMjEtMDMtMjUgMTA6MTM6NTEuMDI5MDQ5In0.V6kYV5Lmb_tUIsF-6AKNB8_lIifmJP_Dm8gHhGa5w_o'
def httpget(url):
res = requests.get(url,
verify=False,
headers = { "Authorization": f"Bearer {token}"})
return res
res_f = httpget(url = 'https://articonf1.itec.aau.at:30001/api/use_cases/crowd-journalism/transactions-failed')
failed_purchases = []
for entry in res_f.json():
if entry['docType'] == 'purchase':
failed_purchases.append(entry)
print(len(failed_purchases))
# upload again
def httppost_gateway(content_):
url = 'https://articonf1.itec.aau.at:30401/api/trace'
res = requests.post(url,
verify=False,
headers = { "Authorization": f"Bearer {token}"},
json=content_)
return res
for purchase in failed_purchases:
res = httppost_gateway(purchase)
print(res)
\ No newline at end of file
...@@ -18,7 +18,7 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) ...@@ -18,7 +18,7 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def send_transaction_to_rest_gateway(transaction: dict): def send_transaction_to_rest_gateway(transaction: dict):
# token from Rest Gateway to authorize # token from Rest Gateway to authorize
JWT_TOKEN = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InJlZ3VsYXJAaXRlYy5hYXUuYXQiLCJjcmVhdGVkX2F0IjoiMjAyMS0wMi0wOCAxMzo0NzoxOC40NzUxMjEiLCJ2YWxpZF91bnRpbCI6IjIwMjEtMDItMDkgMTM6NDc6MTguNDc1MTIxIn0.DWY9c0X2XQJDz0Ef35-k1IVY6GWf00ogaVOCeX8Irlo' JWT_TOKEN = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InJlZ3VsYXJAaXRlYy5hYXUuYXQiLCJjcmVhdGVkX2F0IjoiMjAyMS0wMy0xNiAxMzoxNDoyMS42MDc1NjciLCJ2YWxpZF91bnRpbCI6IjIwMjEtMDMtMTcgMTM6MTQ6MjEuNjA3NTY3In0.ZGObriEDWYo1BgiYN3pQSosS7UuNrq10GSCSjmRHSAw'
res = requests.post( res = requests.post(
url = 'https://articonf1.itec.aau.at:30401/api/trace', url = 'https://articonf1.itec.aau.at:30401/api/trace',
...@@ -47,7 +47,6 @@ if __name__ == '__main__': ...@@ -47,7 +47,6 @@ if __name__ == '__main__':
transaction['ApplicationType'] = 'reddit' transaction['ApplicationType'] = 'reddit'
transaction['docType'] = 'reddit' transaction['docType'] = 'reddit'
for key, value in obj_dict.items(): for key, value in obj_dict.items():
transaction[key] = value transaction[key] = value
...@@ -56,6 +55,7 @@ if __name__ == '__main__': ...@@ -56,6 +55,7 @@ if __name__ == '__main__':
if (summ % 1000 == 0 ): if (summ % 1000 == 0 ):
print ("Uploaded " + str(summ) + " transactions.") print ("Uploaded " + str(summ) + " transactions.")
if summ >= 1:
break
print ("TOTAL Uploaded " + str(summ) + " transactions.") print ("TOTAL Uploaded " + str(summ) + " transactions.")
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment