Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
SMART
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
3
Issues
3
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Registry
Registry
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
UNI-KLU
SMART
Commits
31f6aadc
Commit
31f6aadc
authored
Sep 20, 2021
by
Alexander Lercher
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Changes for prediction pipeline
parent
f2b72d8f
Changes
9
Show whitespace changes
Inline
Side-by-side
Showing
9 changed files
with
20 additions
and
18 deletions
+20
-18
repository.py
...ive-community-detection-microservice/app/db/repository.py
+1
-1
run_node_fetching.py
...community-detection-microservice/app/run_node_fetching.py
+3
-2
run_prediction.py
...ve-community-detection-microservice/app/run_prediction.py
+1
-1
run_clustering.py
...b/role-stage-discovery-microservice/app/run_clustering.py
+1
-1
run_node_fetching.py
...ole-stage-discovery-microservice/app/run_node_fetching.py
+1
-1
run_time_slicing.py
...role-stage-discovery-microservice/app/run_time_slicing.py
+4
-2
dummy_upload.py
...ng-microservice/app/dummy_upload/bank_app/dummy_upload.py
+4
-2
dummy_upload.py
...icroservice/app/dummy_upload/smart_energy/dummy_upload.py
+4
-4
MessageHandler.py
...ntic-linking-microservice/app/messaging/MessageHandler.py
+1
-4
No files found.
src/data-hub/proactive-community-detection-microservice/app/db/repository.py
View file @
31f6aadc
...
@@ -30,7 +30,7 @@ class Repository(MongoRepositoryBase):
...
@@ -30,7 +30,7 @@ class Repository(MongoRepositoryBase):
assert
confirm
,
'WONT DELETE WHOLE DB WITHOUT CONFIRMATION'
assert
confirm
,
'WONT DELETE WHOLE DB WITHOUT CONFIRMATION'
for
collection_
in
[
self
.
_use_case_collection
,
self
.
_layer_collection
,
self
.
_layer_pair_collection
,
for
collection_
in
[
self
.
_use_case_collection
,
self
.
_layer_collection
,
self
.
_layer_pair_collection
,
self
.
_clusters_collection
,
self
.
_time_slice_collection
]:
self
.
_clusters_collection
,
self
.
_time_slice_collection
,
self
.
_prediction_result_collection
]:
super
()
.
drop_collection
(
collection_
)
super
()
.
drop_collection
(
collection_
)
...
...
src/data-hub/proactive-community-detection-microservice/app/run_node_fetching.py
View file @
31f6aadc
...
@@ -13,6 +13,7 @@ from db.repository import Repository
...
@@ -13,6 +13,7 @@ from db.repository import Repository
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
'''Fetches all required data from business-logic and role-stage-discovery.'''
'''Fetches all required data from business-logic and role-stage-discovery.'''
Repository
()
.
DROP
(
confirm
=
True
)
# Repository().DROP(confirm=True)
use_cases
=
[
'vialog-enum'
,
'car-sharing-official'
,
'smart-energy'
,
'crowd-journalism-enum'
]
+
[
'community-prediction-youtube-n'
,
'community-prediction-taxi'
]
use_cases
=
[
'smart-energy'
]
# ['vialog-enum', 'car-sharing-official', 'smart-energy', 'crowd-journalism-enum']+['community-prediction-youtube-n', 'community-prediction-taxi']
fetching
.
fetch
(
selected_use_cases
=
use_cases
,
selected_use_case_tables
=
None
)
fetching
.
fetch
(
selected_use_cases
=
use_cases
,
selected_use_case_tables
=
None
)
\ No newline at end of file
src/data-hub/proactive-community-detection-microservice/app/run_prediction.py
View file @
31f6aadc
...
@@ -46,7 +46,7 @@ def _run_cleanup(use_cases: List[str] = None):
...
@@ -46,7 +46,7 @@ def _run_cleanup(use_cases: List[str] = None):
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
use_cases
=
[
'vialog-enum'
,
'car-sharing-official'
,
'smart-energy'
,
'crowd-journalism-enum'
]
use_cases
=
[
'
smart-energy'
]
#['
vialog-enum', 'car-sharing-official', 'smart-energy', 'crowd-journalism-enum']
# use_cases = ['community-prediction-youtube-n', 'community-prediction-taxi']
# use_cases = ['community-prediction-youtube-n', 'community-prediction-taxi']
_run_data_preparation
(
use_cases
)
_run_data_preparation
(
use_cases
)
...
...
src/data-hub/role-stage-discovery-microservice/app/run_clustering.py
View file @
31f6aadc
...
@@ -69,4 +69,4 @@ def store_generic_clusters(clusters: List[Cluster], layer):
...
@@ -69,4 +69,4 @@ def store_generic_clusters(clusters: List[Cluster], layer):
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
# please dont delete all clusters anymore @10.11.2020
# please dont delete all clusters anymore @10.11.2020
# repo.delete_all_clusters()
# repo.delete_all_clusters()
run_generic_clustering
(
selected_use_cases
=
[
],
selected_use_case_tables
=
[],
selected_layer_names
=
[]
)
run_generic_clustering
(
selected_use_cases
=
[
'smart-energy'
],
selected_use_case_tables
=
None
,
selected_layer_names
=
None
)
src/data-hub/role-stage-discovery-microservice/app/run_node_fetching.py
View file @
31f6aadc
...
@@ -11,4 +11,4 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
...
@@ -11,4 +11,4 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
import
processing.fetching.fetching
as
f
import
processing.fetching.fetching
as
f
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
f
.
fetch_nodes_from_semantic_linking
(
selected_use_cases
=
[],
selected_use_case_tables
=
[])
f
.
fetch_nodes_from_semantic_linking
(
selected_use_cases
=
[
'smart-energy'
],
selected_use_case_tables
=
None
)
\ No newline at end of file
\ No newline at end of file
src/data-hub/role-stage-discovery-microservice/app/run_time_slicing.py
View file @
31f6aadc
...
@@ -16,12 +16,14 @@ TimeSliceKey = Tuple[int, int]
...
@@ -16,12 +16,14 @@ TimeSliceKey = Tuple[int, int]
TIME_PROPERTY_NAMES
=
[
TIME_PROPERTY_NAMES
=
[
# vialog-enum
# vialog-enum
'created'
,
'created'
,
'moderationDate'
,
'timestamp'
,
# car-sharing-official
# car-sharing-official
'available'
,
'available'
,
'date'
,
'date'
,
'startDate'
,
'startDate'
,
'moment'
,
'moment'
,
# smart-energy
# smart-energy
/ bank-app
'Timestamp'
,
'Timestamp'
,
# crowd-journalism-enum
# crowd-journalism-enum
'creationTimestamp'
,
'creationTimestamp'
,
...
@@ -156,6 +158,6 @@ def run_time_slicing(selected_use_cases: List[str] = None, selected_use_case_tab
...
@@ -156,6 +158,6 @@ def run_time_slicing(selected_use_cases: List[str] = None, selected_use_case_tab
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
use_case
=
'
community-prediction-youtube-n
'
use_case
=
'
smart-energy
'
repo
.
delete_time_slices
(
use_case
)
repo
.
delete_time_slices
(
use_case
)
run_time_slicing
(
selected_use_cases
=
[
use_case
])
run_time_slicing
(
selected_use_cases
=
[
use_case
])
\ No newline at end of file
src/data-hub/semantic-linking-microservice/app/dummy_upload/bank_app/dummy_upload.py
View file @
31f6aadc
...
@@ -9,10 +9,12 @@ for modules_path in modules_paths:
...
@@ -9,10 +9,12 @@ for modules_path in modules_paths:
sys
.
path
.
insert
(
1
,
modules_path
)
sys
.
path
.
insert
(
1
,
modules_path
)
from
messaging.MessageHandler
import
MessageHandler
from
messaging.MessageHandler
import
MessageHandler
from
db.repository
import
Repository
# file to read the data from
# file to read the data from
CSV_FILE
=
r'dummy_upload/bank_app/bank_data.csv'
CSV_FILE
=
r'dummy_upload/bank_app/bank_data.csv'
handler
=
MessageHandler
()
repo
=
Repository
()
handler
=
MessageHandler
(
repo
)
def
upload_transaction
(
transaction
):
def
upload_transaction
(
transaction
):
...
@@ -31,7 +33,7 @@ def upload_transaction(transaction):
...
@@ -31,7 +33,7 @@ def upload_transaction(transaction):
handler
.
handle_new_trace
(
t
)
handler
.
handle_new_trace
(
t
)
type_mapping
=
{
'House Rent'
:
1
,
'Payback Loan'
:
2
,
'Initial Credit'
:
3
,
'Emergency Help'
:
4
,
'Friendly Help'
:
5
}
type_mapping
=
{
'House Rent'
:
0
,
'Payback Loan'
:
1
,
'Initial Credit'
:
2
,
'Emergency Help'
:
3
,
'Friendly Help'
:
4
}
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
...
...
src/data-hub/semantic-linking-microservice/app/dummy_upload/smart_energy/dummy_upload.py
View file @
31f6aadc
...
@@ -30,9 +30,9 @@ def upload_transaction(transaction):
...
@@ -30,9 +30,9 @@ def upload_transaction(transaction):
'id'
:
uid
,
'id'
:
uid
,
'properties'
:
transaction
,
'properties'
:
transaction
,
}
}
#
handler.handle_new_trace(t)
handler
.
handle_new_trace
(
t
)
processed_transactions
.
append
(
t
)
#
processed_transactions.append(t)
def
store_transactions_for_mirsat
():
def
store_transactions_for_mirsat
():
...
@@ -67,7 +67,7 @@ if __name__ == '__main__':
...
@@ -67,7 +67,7 @@ if __name__ == '__main__':
for
row
in
reader
:
for
row
in
reader
:
transaction
=
{}
transaction
=
{}
transaction
[
'ApplicationType'
]
=
'smart-energy'
transaction
[
'ApplicationType'
]
=
'smart-energy'
transaction
[
'docType'
]
=
'smart-energy
-paper
'
transaction
[
'docType'
]
=
'smart-energy'
for
idx
in
range
(
len
(
row
)):
for
idx
in
range
(
len
(
row
)):
transaction
[
titles
[
idx
]]
=
row
[
idx
]
transaction
[
titles
[
idx
]]
=
row
[
idx
]
...
@@ -82,4 +82,4 @@ if __name__ == '__main__':
...
@@ -82,4 +82,4 @@ if __name__ == '__main__':
upload_transaction
(
transaction
)
upload_transaction
(
transaction
)
store_transactions_for_mirsat
()
# store_transactions_for_mirsat()
\ No newline at end of file
\ No newline at end of file
src/data-hub/semantic-linking-microservice/app/messaging/MessageHandler.py
View file @
31f6aadc
...
@@ -68,7 +68,7 @@ class MessageHandler:
...
@@ -68,7 +68,7 @@ class MessageHandler:
return
layers
return
layers
def
handle_new_trace
(
self
,
content
:
Dict
):
def
handle_new_trace
(
self
,
content
:
Dict
):
LOGGER
.
info
(
"new trace!
"
)
# LOGGER.info(f"Received message: {str(content)}
")
if
"use_case"
not
in
content
or
"id"
not
in
content
or
"properties"
not
in
content
or
"table"
not
in
content
:
if
"use_case"
not
in
content
or
"id"
not
in
content
or
"properties"
not
in
content
or
"table"
not
in
content
:
LOGGER
.
error
(
f
"Missing fields in trace, required fields: (use_case, id, properties, table), given fields: ({content.keys()})"
)
LOGGER
.
error
(
f
"Missing fields in trace, required fields: (use_case, id, properties, table), given fields: ({content.keys()})"
)
return
return
...
@@ -88,7 +88,6 @@ class MessageHandler:
...
@@ -88,7 +88,6 @@ class MessageHandler:
LOGGER
.
warning
(
f
"No layers available for '{use_case}'.'{table}', ignoring trace."
)
LOGGER
.
warning
(
f
"No layers available for '{use_case}'.'{table}', ignoring trace."
)
return
return
LOGGER
.
info
(
f
"{len(layers)} layers available"
)
nodes
=
[]
nodes
=
[]
for
layer
in
layers
:
for
layer
in
layers
:
...
@@ -104,12 +103,10 @@ class MessageHandler:
...
@@ -104,12 +103,10 @@ class MessageHandler:
nodes
.
append
(
node
)
nodes
.
append
(
node
)
if
len
(
nodes
)
>
0
:
if
len
(
nodes
)
>
0
:
LOGGER
.
info
(
f
"{len(layers)} layers available"
)
self
.
_repository
.
add_layer_nodes
(
nodes
)
self
.
_repository
.
add_layer_nodes
(
nodes
)
else
:
else
:
LOGGER
.
error
(
f
"did NOT add nodes..."
)
LOGGER
.
error
(
f
"did NOT add nodes..."
)
LOGGER
.
info
(
"done"
)
def
handle_new_traces_available
(
self
):
def
handle_new_traces_available
(
self
):
# get all traces and call the Processor
# get all traces and call the Processor
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment