pre forward files fix

parent 59c1cd55
......@@ -5,6 +5,8 @@ import requests
import pandas as pd
import sys
import network_constants
import time
from os.path import dirname, abspath
modules_path = './'
if os.path.exists(modules_path):
......@@ -13,7 +15,7 @@ if os.path.exists(modules_path):
def last(use_case: str):
#FORWARD TO GPU SERVER WITH IP AND PORT
url = f'https://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Developers/use_case/{use_case}/last_train'
url = f'http://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Developers/use_case/{use_case}/last_train'
response = requests.get(
url,
......@@ -30,17 +32,37 @@ def upload_and_train(use_case: str, developer_id: int):
#data = {'use_case' : use_case,
# 'developer_id' : developer_id}
url = f'https://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Developers/use_cases/{use_case}/developer_id/{developer_id}/upload_and_train'
#url= 'gpu3.itec.aau.at/home/itec/bogdan/Articonf/smart/tools/federated-training/app/routes/developers'
response = requests.post(
url,
verify = False,
proxies = { "http":None, "https":None },
files= request.files,
#data = data
)
try:
use_case_path = 'processing/'+use_case+'/'
app_path = dirname(dirname(abspath(__file__)))
file_dict = request.files
db_File_True = file_dict["dataset_file1"]
db_File_Fake = file_dict["dataset_file2"]
true_csv_path = os.path.join(app_path+"/"+use_case_path+"db/", "True.csv")
fake_csv_path = os.path.join(app_path+"/"+use_case_path+"db/", "Fake.csv")
db_File_True.save(true_csv_path)
db_File_Fake.save(fake_csv_path)
time.sleep(2) #wait for hte files to be copied
forwarded_files = {
"dataset_file1": open(true_csv_path,"rb"),
"dataset_file2": open(fake_csv_path,"rb")}
url = f'http://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Developers/use_cases/{use_case}/developer_id/{developer_id}/upload_and_train'
response = requests.post(
url,
verify = False,
proxies = { "http":None, "https":None },
files= forwarded_files
#data = data
)
return json.loads(response.text)
return json.loads(response.text)
except Exception as e:
return json.loads(str(e))
#upload_and_train("text_processing",1)
......
......@@ -12,3 +12,4 @@ Trainer_id,Model_id,Dataset_id,Accuracy,Loss
1,1624021190,1624021190,0.75,nan
1,1624284673,1624284673,0.5,nan
0,1624550528,1624550528,0.75,nan
2,1624872086,1624872086,0.5,nan
import json
import os
from flask import Response, request
import requests
import pandas as pd
import sys
from os.path import dirname, abspath
import time
modules_path = './'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
......@@ -34,11 +36,12 @@ def upload_and_train(use_case: str, developer_id: int):
app_path = dirname(dirname(abspath(__file__)))
file_dict = request.files
db_File_True = file_dict["dataset_file1"]
db_File_False = file_dict["dataset_file2"]
db_File_Fake = file_dict["dataset_file2"]
true_csv_path = os.path.join(app_path+"/"+use_case_path+"db/", "True.csv")
false_csv_path = os.path.join(app_path+"/"+use_case_path+"db/", "False.csv")
fake_csv_path = os.path.join(app_path+"/"+use_case_path+"db/", "Fake.csv")
db_File_True.save(true_csv_path)
db_File_False.save(false_csv_path)
db_File_Fake.save(fake_csv_path)
time.sleep(2) #wait for hte files to be copied
#THEN start processing
last_train_metrics = main_proc.start_processing(use_case,developer_id)
print("## Last train metrics")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment