Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
SMART
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
3
Issues
3
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Registry
Registry
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
UNI-KLU
SMART
Commits
b243899f
Commit
b243899f
authored
Jun 23, 2021
by
Bogdan Mihai (ARTICONF student)
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Cleaned up pre deploy
parent
4f908524
Changes
5
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
21 additions
and
14 deletions
+21
-14
developers.py
.../federated-learning-microservice/app/routes/developers.py
+5
-1
swagger.yml
tools/federated-training/app/configs/swagger.yml
+2
-2
swagger_local.yml
tools/federated-training/app/configs/swagger_local.yml
+1
-1
developers.py
tools/federated-training/app/routes/developers.py
+0
-1
owners.py
tools/federated-training/app/routes/owners.py
+13
-9
No files found.
src/participation-hub/federated-learning-microservice/app/routes/developers.py
View file @
b243899f
...
@@ -27,13 +27,17 @@ def upload_and_train(use_case: str, developer_id: int):
...
@@ -27,13 +27,17 @@ def upload_and_train(use_case: str, developer_id: int):
#FORWARD TO GPU SERVER WITH IP AND PORT
#FORWARD TO GPU SERVER WITH IP AND PORT
#data = {'use_case' : use_case,
# 'developer_id' : developer_id}
url
=
f
'https://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Developers/use_cases/{use_case}/developer_id/{developer_id}/upload_and_train:'
url
=
f
'https://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Developers/use_cases/{use_case}/developer_id/{developer_id}/upload_and_train:'
#url= 'gpu3.itec.aau.at/home/itec/bogdan/Articonf/smart/tools/federated-training/app/routes/developers'
response
=
requests
.
post
(
response
=
requests
.
post
(
url
,
url
,
verify
=
False
,
verify
=
False
,
proxies
=
{
"http"
:
None
,
"https"
:
None
},
proxies
=
{
"http"
:
None
,
"https"
:
None
},
files
=
request
.
files
files
=
request
.
files
,
#data = data
)
)
return
response
return
response
...
...
tools/federated-training/app/configs/swagger.yml
View file @
b243899f
swagger
:
"
2.0"
swagger
:
"
2.0"
info
:
info
:
title
:
Federated
Lear
ning microservice
title
:
Federated
Trai
ning microservice
description
:
This is the documentation for the federated
lear
ning microservice.
description
:
This is the documentation for the federated
Trai
ning microservice.
version
:
"
1.0.0"
version
:
"
1.0.0"
consumes
:
consumes
:
...
...
tools/federated-training/app/configs/swagger_local.yml
View file @
b243899f
swagger
:
"
2.0"
swagger
:
"
2.0"
info
:
info
:
title
:
Federated
Lear
ning microservice
title
:
Federated
Trai
ning microservice
description
:
This is the documentation for the federated learning microservice.
description
:
This is the documentation for the federated learning microservice.
version
:
"
1.0.0"
version
:
"
1.0.0"
...
...
tools/federated-training/app/routes/developers.py
View file @
b243899f
...
@@ -34,7 +34,6 @@ def upload_and_train(use_case: str, developer_id: int):
...
@@ -34,7 +34,6 @@ def upload_and_train(use_case: str, developer_id: int):
#file_dict = request.files
#file_dict = request.files
#db_File_True = file_dict["dataset_file1"]
#db_File_True = file_dict["dataset_file1"]
#db_File_False = file_dict["dataset_file2"]
#db_File_False = file_dict["dataset_file2"]
#TODO IMPLEMENT HERE
#THEN start processing
#THEN start processing
last_train_metrics
=
main_proc
.
start_processing
(
use_case
,
developer_id
)
last_train_metrics
=
main_proc
.
start_processing
(
use_case
,
developer_id
)
...
...
tools/federated-training/app/routes/owners.py
View file @
b243899f
...
@@ -12,7 +12,7 @@ def last(use_case: str):
...
@@ -12,7 +12,7 @@ def last(use_case: str):
bottom
=
df
.
tail
(
1
)
bottom
=
df
.
tail
(
1
)
bottom
=
bottom
.
to_json
(
orient
=
"records"
)
bottom
=
bottom
.
to_json
(
orient
=
"records"
)
print
(
bottom
[
1
:
-
1
])
print
(
bottom
[
1
:
-
1
])
metricsJson
=
bottom
[
1
:
-
1
]
#remove the
list [ { a: "1"} ...
]
metricsJson
=
bottom
[
1
:
-
1
]
#remove the
paranthesis of the list to make a dict [ { a: "1" , ... }
]
return
Response
(
status
=
200
,
response
=
metricsJson
)
return
Response
(
status
=
200
,
response
=
metricsJson
)
except
Exception
as
e
:
except
Exception
as
e
:
print
(
e
)
print
(
e
)
...
@@ -22,6 +22,7 @@ def last(use_case: str):
...
@@ -22,6 +22,7 @@ def last(use_case: str):
def
upload_and_train
(
use_case
:
str
):
def
upload_and_train
(
use_case
:
str
):
use_case_path
=
'./processing/'
+
use_case
+
'/'
use_case_path
=
'./processing/'
+
use_case
+
'/'
default_path
=
'processing/default/'
#Remove old files
#Remove old files
try
:
try
:
if
os
.
path
.
exists
(
use_case_path
):
if
os
.
path
.
exists
(
use_case_path
):
...
@@ -31,20 +32,23 @@ def upload_and_train(use_case: str):
...
@@ -31,20 +32,23 @@ def upload_and_train(use_case: str):
except
OSError
as
error
:
except
OSError
as
error
:
print
(
error
)
print
(
error
)
return
Response
(
status
=
400
,
response
=
"Error occured when deleteing the old use_case directory"
)
return
Response
(
status
=
400
,
response
=
"Error occured when deleteing the old use_case directory"
)
#Start a new implementation of the model.
#TODO: get the python files and create them locally in the {use_case} folder
#Start a new implementation of the model.
try
:
try
:
os
.
mkdir
(
use_case_path
)
os
.
mkdir
(
use_case_path
)
#COPY DEFAULT FILES
#COPY DEFAULT FILES
default_path
=
'processing/default/'
use_case_path
+=
'/'
shutil
.
copyfile
(
default_path
+
'main_proc.py'
,
use_case_path
+
'main_proc.py'
)
shutil
.
copyfile
(
default_path
+
'main_proc.py'
,
use_case_path
+
'main_proc.py'
)
shutil
.
copyfile
(
default_path
+
'__init__.py'
,
use_case_path
+
'__init__.py'
)
shutil
.
copyfile
(
default_path
+
'__init__.py'
,
use_case_path
+
'__init__.py'
)
shutil
.
copyfile
(
default_path
+
'checkpoint_manager.py'
,
use_case_path
+
'checkpoint_manager.py'
)
shutil
.
copyfile
(
default_path
+
'checkpoint_manager.py'
,
use_case_path
+
'checkpoint_manager.py'
)
shutil
.
copyfile
(
default_path
+
'federated_algorithm.py'
,
use_case_path
+
'federated_algorithm.py'
)
shutil
.
copyfile
(
default_path
+
'federated_algorithm.py'
,
use_case_path
+
'federated_algorithm.py'
)
#TODO: get the python files and create them locally in the {use_case} folder
# copy dataset_file1 into use_case_path+'db/'
# copy dataset_file1 into use_case_path+'db/'
# copy preprocessing into use_case_path
# copy global_hyperparams into use_case_path
# copy model into use_case_path
#COPY flask files
#COPY flask files
#use flask? request.files.getlist('filename')[0] ???
#use flask? request.files.getlist('filename')[0] ???
...
@@ -53,9 +57,9 @@ def upload_and_train(use_case: str):
...
@@ -53,9 +57,9 @@ def upload_and_train(use_case: str):
return
Response
(
status
=
400
,
response
=
"Error occured when creating/copying the use_case files"
)
return
Response
(
status
=
400
,
response
=
"Error occured when creating/copying the use_case files"
)
#after the files are copied, start training
#TODO:
after the files are copied, start train
ing
#TODO:
check if training is work
ing
# should write to ledger.csv the last train metrics after 2-3 mins
use_case_path
=
'processing/'
+
use_case
+
'/'
use_case_path
=
'processing/'
+
use_case
+
'/'
sys
.
path
.
append
(
use_case_path
)
sys
.
path
.
append
(
use_case_path
)
import
main_proc
import
main_proc
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment