Commit 6481f6e3 authored by Bogdan's avatar Bogdan

Merge branch 'federated-learning' into feature/federated-learning

parents 3928da1c 2791e7cd
......@@ -35,4 +35,4 @@ else:
# start app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=False, ssl_context=context) #<-- IF running locally comment ss_context
app.run(host='0.0.0.0', port=5000, debug=False, ssl_context=context) #<-- IF running locally comment ss_context and change port to5000 from 30422
#pandas==1.2.4
astroid==2.4.2
attrs==19.3.0
autopep8==1.5.4
......@@ -22,7 +23,6 @@ lazy-object-proxy==1.4.3
MarkupSafe==1.1.1
mccabe==0.6.1
openapi-spec-validator==0.2.9
pandas==1.2.4
prance==0.19.0
pycodestyle==2.6.0
pycparser==2.20
......@@ -35,10 +35,7 @@ rope==0.17.0
semver==2.10.2
six==1.15.0
swagger-ui-bundle==0.0.8
tensorflow==2.3.0
tensorflow-federated==0.17.0
toml==0.10.1
typed-ast==1.4.1
urllib3==1.25.10
Werkzeug==1.0.1
wrapt==1.12.1
......
connexion==2.7.0
coverage==5.3.1
Flask==1.1.2
importlib-metadata==1.7.0
jsonschema==3.2.0
pandas==1.2.4
prance==0.19.0
pymongo==3.11.0
PyYAML==5.3.1
requests==2.24.0
swagger-ui-bundle==0.0.8
urllib3==1.25.10
......@@ -2,7 +2,7 @@ import json
import os
from flask import Response, request
import requests
import pandas as pd
#import pandas as pd
import sys
import network_constants
import time
......
......@@ -4,12 +4,12 @@ import sys
import shutil
from flask import Response, request
import requests
import pandas as pd
#import pandas as pd
import network_constants
def last(use_case: str):
#FORWARD TO GPU SERVER WITH IP AND POR
url = f'http://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Owners/use_case/{use_case}/last_train:'
url = f'http://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Owners/use_case/{use_case}/last_train'
response = requests.get(
url,
verify = False,
......@@ -24,7 +24,7 @@ def upload_and_train(use_case: str):
#TODO FORWARD FILES, for some reason the files are received in this microservice (Federated-LEARNING), but after forwarding they are empty in Federated-TRAINING
file_dict = request.files
url = f'http://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Owners/use_cases/{use_case}/upload_and_train:'
url = f'http://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Owners/use_cases/{use_case}/upload_and_train'
response = requests.post(
url,
verify = False,
......
......@@ -9,9 +9,9 @@ import json
def check_article(use_case: str,data_entry: str):
#FORWARD TO GPU SERVER WITH IP AND PORT
url = f'http://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Users/use_case/{use_case}/data_entry/{data_entry}/check_article:'
url = f'http://{network_constants.FEDERATED_TRAINING_HOSTNAME}:{network_constants.FEDERATED_TRAINING_REST_PORT}/api/Users/use_case/{use_case}/data_entry/{data_entry}/check_article'
#url = "google.com"#API ENDPOINT WITH IP AND PORT OF GPU SERVER
response = requests.get(
response = requests.post(
url,
verify = False,
proxies = { "http":None, "https":None }
......
......@@ -37,6 +37,7 @@ app = connexion.App(__name__, specification_dir='configs/')
app.add_api('routes.yml')
# start app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=30424, debug=False)
......@@ -18,7 +18,8 @@ def model_fn():
metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])
def federated_computation_new(train_dataset,test_dataset):
#print("###TEEEST")git
#tff.backends.native.set_local_python_execution_context()
if(globals.INPUT_SPEC == None):
#should never reach this place because INPUT_SPEC is instantiated inside get_preprocessed_train_test_data.
#however, if in the future, processed data is provided without hte preprocessing function it will be none -> therefore assign it here
......
......@@ -7,17 +7,20 @@ import tensorflow as tf
import tensorflow_federated as tff
def model_fn():
keras_model = get_simple_LSTM_model()
keras_model = get_simple_LSTM_model()
return tff.learning.from_keras_model(
keras_model,
input_spec=globals.INPUT_SPEC,
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])
return tff.learning.from_keras_model(
keras_model,
input_spec=globals.INPUT_SPEC,
loss=tf.keras.losses.BinaryCrossentropy(from_logits=True),
metrics=[tf.keras.metrics.BinaryAccuracy()])
#loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
#metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])
def federated_computation_new(train_dataset,test_dataset):
tff.backends.native.set_local_execution_context
#TFF must be imported here, to guarantee that the context is initialised
if(globals.INPUT_SPEC == None):
#should never reach this place because INPUT_SPEC is instantiated inside get_preprocessed_train_test_data.
......
......@@ -6,7 +6,7 @@ def initialize(use_case,trainer_id = 0,dataset_id = 0):
global VOCAB_SIZE #tells how many words it memorises. each word is stored as an int.
VOCAB_SIZE = 6000
global NUM_CLIENTS #number of clients in the federated dataset
NUM_CLIENTS = 4
NUM_CLIENTS = 10
global SHUFFLE_BUFFER
SHUFFLE_BUFFER = 5000 #used in preprocessing
global BATCH_SIZE
......
......@@ -21,3 +21,20 @@ Trainer_id,Model_id,Dataset_id,Accuracy,Loss
3,1633518451,1633518451,0.25,nan
4,1633525163,1633525163,0.75,nan
4,1633528455,1633528455,0.5,nan
3,1634034576,1634034576,0.75,nan
0,1634036764,1634036764,0.75,nan
0,1634553320,1634553320,0.5,nan
3,1634567728,1634567728,0.75,nan
3,1634568735,1634568735,0.5,nan
3,1634569432,1634569432,0.5,nan
3,1634570407,1634570407,0.75,nan
3,1635859162,1635859162,0.0,0.3336388
3,1635859677,1635859677,0.75,0.7262713
3,1635860773,1635860773,0.5,0.7085196
3,1635945006,1635945006,0.5,0.7163227
3,1635945114,1635945114,0.75,0.7200562
3,1635945197,1635945197,0.75,0.5879684
3,1635945506,1635945506,0.5,0.70786154
3,1635945786,1635945786,0.5,0.7161836
3,1635945886,1635945886,0.6,0.7275145
3,1635946190,1635946190,0.8,0.53487456
......@@ -20,15 +20,16 @@ def start_processing(use_case, developer_id:int = 0):
timestamp = int(time.time())
globals.DATASET_ID = timestamp
written_row = save_to_file_CSV(use_case,globals.TRAINER_ID,timestamp,globals.DATASET_ID,trained_metrics['sparse_categorical_accuracy'],trained_metrics['loss'])
written_row = save_to_file_CSV(use_case,globals.TRAINER_ID,timestamp,globals.DATASET_ID,trained_metrics['binary_accuracy'],trained_metrics['loss'])
return written_row
def start_prediction(use_case, developer_id:int = -1):
#TODO
globals.initialize(use_case,developer_id)
raw_input_prediction_data = "Test sentence. And another sentence which is going to be used as a mean for checking if this article is true or not. Also Santa is real"
raw_input_prediction_data = "Donald Trump Sends Out Embarrassing New Year’s Eve Message. This is Disturbing,'Donald Trump just couldn t wish all Americans a Happy New Year and leave it at that. Instead, he had to give a shout out to his enemies, haters and the very dishonest fake news media. The former reality show star had just one job to do and he couldn t do it. As our Country rapidly grows stronger and smarter, I want to wish all of my friends, supporters, enemies, haters, and even the very dishonest Fake News Media, a Happy and Healthy New Year, President Angry Pants tweeted. 2018 will be a great year for America! As our Country rapidly grows stronger and smarter, I want to wish all of my friends, supporters, enemies, haters, and even the very dishonest Fake News Media, a Happy and Healthy New Year. 2018 will be a great year for America! Donald J. Trump (@realDonaldTrump) December 31, 2017Trump s tweet went down about as welll as you d expect.What kind of president sends a New Year s greeting like this despicable, petty, infantile gibberish? Only Trump! His lack of decency won t even allow him to rise above the gutter long enough to wish the American citizens a happy new year! Bishop Talbert Swan (@TalbertSwan) December 31, 2017no one likes you Calvin (@calvinstowell) December 31, 2017Your impeachment would make 2018 a great year for America, but I ll also accept regaining control of Congress. Miranda Yaver (@mirandayaver) December 31, 2017Do you hear yourself talk? When you have to include that many people that hate you you have to wonder? Why do the they all hate me? Alan Sandoval (@AlanSandoval13) December 31, 2017Who uses the word Haters in a New Years wish?? Marlene (@marlene399) December 31, 2017You can t just say happy new year? Koren pollitt (@Korencarpenter) December 31, 2017Here s Trump s New Year s Eve tweet from 2016.Happy New Year to all, including to my many enemies and those who have fought me and lost so badly they just don t know what to do. Love! Donald J. Trump (@realDonaldTrump) December 31, 2016This is nothing new for Trump. He s been doing this for years.Trump has directed messages to his enemies and haters for New Year s, Easter, Thanksgiving, and the anniversary of 9/11. pic.twitter.com/4FPAe2KypA Daniel Dale (@ddale8) December 31, 2017Trump s holiday tweets are clearly not presidential.How long did he work at Hallmark before becoming President? Steven Goodine (@SGoodine) December 31, 2017He s always been like this . . . the only difference is that in the last few years, his filter has been breaking down. Roy Schulze (@thbthttt) December 31, 2017Who, apart from a teenager uses the term haters? Wendy (@WendyWhistles) December 31, 2017he s a fucking 5 year old Who Knows (@rainyday80) December 31, 2017So, to all the people who voted for this a hole thinking he would change once he got into power, you were wrong! 70-year-old men don t change and now he s a year older.Photo by Andrew Burton/Getty Images."
input_prediction_data= preprocess_single_train_data(raw_input_prediction_data)
input_prediction_data = preprocess_single_train_data(raw_input_prediction_data)
prediction_result = make_prediction(input_prediction_data)
res = prediction_result[0]
if (res[0]>=0.50):
......
astroid==2.4.2
absl-py==0.15.0
astunparse==1.6.3
attrs==19.3.0
autopep8==1.5.4
certifi==2020.6.20
cffi==1.14.2
chardet==3.0.4
click==7.1.2
clickclick==1.2.2
colorama==0.4.3
cachetools==3.1.1
certifi==2021.10.8
charset-normalizer==2.0.7
click==8.0.3
clickclick==20.10.2
connexion==2.7.0
coverage==5.3.1
cryptography==3.1
dm-tree==0.1.6
Flask==1.1.2
idna==2.10
importlib-metadata==1.7.0
inflection==0.5.0
isort==4.3.21
itsdangerous==1.1.0
Jinja2==2.11.2
flatbuffers==1.12
gast==0.4.0
google-auth==2.3.3
google-auth-oauthlib==0.4.6
google-pasta==0.2.0
grpcio==1.34.1
h5py==3.1.0
idna==3.3
inflection==0.5.1
isodate==0.6.0
itsdangerous==2.0.1
jax==0.2.24
jaxlib==0.1.73
Jinja2==3.0.2
joblib==1.1.0
jsonschema==3.2.0
lazy-object-proxy==1.4.3
MarkupSafe==1.1.1
mccabe==0.6.1
numpy==1.18.5
keras-nightly==2.5.0.dev2021032900
Keras-Preprocessing==1.1.2
Markdown==3.3.4
MarkupSafe==2.0.1
mpmath==1.2.1
numpy==1.19.5
oauthlib==3.1.1
openapi-schema-validator==0.1.5
openapi-spec-validator==0.2.9
opt-einsum==3.3.0
pandas==1.2.4
prance==0.19.0
pycodestyle==2.6.0
pycparser==2.20
pylint==2.5.3
pymongo==3.11.0
pyrsistent==0.16.0
PyYAML==5.3.1
requests==2.24.0
portpicker==1.3.9
protobuf==3.19.1
pyasn1==0.4.8
pyasn1-modules==0.2.8
pyrsistent==0.18.0
python-dateutil==2.8.2
pytz==2021.3
PyYAML==6.0
requests==2.26.0
requests-oauthlib==1.3.0
retrying==1.3.3
rope==0.17.0
semver==2.10.2
rsa==4.7.2
scikit-learn==1.0.1
scipy==1.7.1
semantic-version==2.8.5
six==1.15.0
sklearn==0.0
swagger-ui-bundle==0.0.8
tensorflow==2.3.0
tensorflow-federated==0.17.0
toml==0.10.1
typed-ast==1.4.1
urllib3==1.25.10
Werkzeug==1.0.1
tensorboard==2.7.0
tensorboard-data-server==0.6.1
tensorboard-plugin-wit==1.8.0
tensorflow==2.5.2
tensorflow-estimator==2.5.0
tensorflow-federated==0.19.0
tensorflow-model-optimization==0.5.0
tensorflow-privacy==0.5.2
termcolor==1.1.0
threadpoolctl==3.0.0
tqdm==4.28.1
typing-extensions==3.7.4.3
urllib3==1.26.7
Werkzeug==2.0.2
wrapt==1.12.1
zipp==3.1.0
absl-py==0.9.0
astroid==2.4.2
astunparse==1.6.3
attrs==19.3.0
autopep8==1.5.4
beautifulsoup4==4.9.3
blinker==1.4
brotlipy==0.7.0
bs4==0.0.1
cachetools==3.1.1
certifi==2020.6.20
cffi @ file:///tmp/build/80754af9/cffi_1598370769933/work
chardet==3.0.4
click==7.1.2
clickclick==1.2.2
colorama==0.4.3
connexion==2.7.0
coverage==5.3.1
cryptography @ file:///tmp/build/80754af9/cryptography_1598892038851/work
cycler==0.10.0
dm-tree==0.1.6
Flask==1.1.2
gast==0.3.3
google-auth @ file:///tmp/build/80754af9/google-auth_1598987460909/work
google-auth-oauthlib==0.4.1
google-pasta==0.2.0
grpcio==1.29.0
h5py @ file:///tmp/build/80754af9/h5py_1593454122442/work
idna @ file:///tmp/build/80754af9/idna_1593446292537/work
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1593446406207/work
inflection==0.5.0
isodate==0.6.0
isort==4.3.21
itsdangerous==1.1.0
jedi==0.17.0
Jinja2==2.11.2
joblib==1.0.1
jsonschema==3.2.0
Keras==2.4.3
Keras-Preprocessing==1.1.2
kiwisolver==1.3.1
lazy-object-proxy==1.4.3
Markdown @ file:///tmp/build/80754af9/markdown_1597433240441/work
MarkupSafe==1.1.1
matplotlib==3.4.1
mccabe==0.6.1
mkl-fft==1.3.0
mkl-random==1.2.2
mkl-service==2.4.0
mpmath==1.2.1
nest-asyncio==1.5.1
nltk==3.6.2
numpy==1.18.5
oauthlib==3.1.0
olefile==0.46
openapi-schema-validator==0.1.5
openapi-spec-validator==0.2.9
opt-einsum==3.1.0
pandas==1.2.4
pickleshare==0.7.5
Pillow==8.3.2
portpicker==1.3.1
prance==0.19.0
prompt-toolkit==3.0.20
protobuf==3.12.4
ptyprocess==0.7.0
pyasn1==0.4.8
pyasn1-modules==0.2.7
pycodestyle==2.6.0
pycparser==2.20
Pygments==2.10.0
PyJWT==1.7.1
pylint==2.5.3
pymongo==3.11.0
pyOpenSSL==19.1.0
pyparsing==2.4.7
pyrsistent==0.16.0
PySocks==1.7.1
python-dateutil @ file:///home/ktietz/src/ci/python-dateutil_1611928101742/work
pytz==2021.1
PyYAML==5.3.1
pyzmq==20.0.0
regex==2021.4.4
requests @ file:///tmp/build/80754af9/requests_1592841827918/work
requests-oauthlib==1.3.0
retrying==1.3.3
rope==0.17.0
rsa @ file:///tmp/build/80754af9/rsa_1596998415516/work
scikit-learn==0.24.2
scipy==1.4.1
seaborn==0.11.1
semantic-version==2.8.5
semver==2.10.2
six==1.15.0
sklearn==0.0
soupsieve==2.2.1
swagger-ui-bundle==0.0.8
tensorboard==2.4.1
tensorboard-plugin-wit==1.6.0.post2
tensorflow==2.3.0
tensorflow-addons==0.11.2
tensorflow-estimator==2.3.0
tensorflow-federated==0.17.0
tensorflow-model-optimization==0.4.1
tensorflow-privacy==0.5.2
termcolor==1.1.0
threadpoolctl==2.1.0
toml==0.10.1
torch==1.6.0
torchvision==0.7.0
tornado==6.1
tqdm==4.60.0
traitlets==5.1.0
typed-ast==1.4.1
typeguard==2.12.0
urllib3==1.21.1
wcwidth==0.2.5
Werkzeug==1.0.1
wordcloud==1.8.1
wrapt==1.12.1
zipp==3.1.0
......@@ -6,6 +6,7 @@ import pandas as pd
import sys
from os.path import dirname, abspath
import time
import threading
modules_path = './'
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
......@@ -25,12 +26,12 @@ def last(use_case: str):
def upload_and_train(use_case: str, developer_id: int):
use_case_path = 'processing/'+use_case+'/'
sys.path.append(use_case_path)
import main_proc
#COPY THE NEW DB TO THE FOLDER
#TODO IMPLEMENT HERE
app_path = dirname(dirname(abspath(__file__)))
......@@ -41,7 +42,7 @@ def upload_and_train(use_case: str, developer_id: int):
fake_csv_path = os.path.join(app_path+"/"+use_case_path+"db/", "Fake.csv")
db_File_True.save(true_csv_path)
db_File_Fake.save(fake_csv_path)
time.sleep(5) #wait for the files to be copied before proceeding with the processing (they are copied in a separate thread, i think?)
time.sleep(10) #wait for the files to be copied before proceeding with the processing (they are copied in a separate thread, i think?)
#THEN start processing
last_train_metrics = main_proc.start_processing(use_case,developer_id)
print("## Last train metrics")
......@@ -50,7 +51,9 @@ def upload_and_train(use_case: str, developer_id: int):
#0,1623160388,0,0.25,nan
metricsJson = trainMetricsToJSON(last_train_metrics)
#START A THREAD to process after the response is sent
thread1 = threading.Thread(target=reload_connection,args=[])
thread1.start()
return Response(status=200, response=metricsJson)
def trainMetricsToJSON(last_train_metrics : list):
......@@ -62,6 +65,11 @@ def trainMetricsToJSON(last_train_metrics : list):
metricsDict["Loss"] = last_train_metrics[4]
return json.dumps(metricsDict)
#upload_and_train("text_processing",1)
#upload_and_train("text_processing",3)
#last("text_processing")
def reload_connection():
print("In the thread")
time.sleep(5)
print("Closing Server")
os._exit(0)
python: can't open file 'processing/main_processing.py': [Errno 2] No such file or directory
/home/itec/bogdan/Articonf/smart/tools/federated-training/app/venv/bin/python3 /home/itec/bogdan/Articonf/smart/tools/federated-training/app/main.py
\ No newline at end of file
while true
do
/home/itec/bogdan/Articonf/smart/tools/federated-training/app/venv/bin/python3 /home/itec/bogdan/Articonf/smart/tools/federated-training/app/main.py
sleep 3
done
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment