Prediction implementation Finished

parent fec9c13c
......@@ -262,11 +262,6 @@ def save_state_to_file(state):
ckpt_manager.save_checkpoint(state,time_stamp)
return time_stamp
def load_state_from_file(model_filename):
iterative_process = tff.learning.build_federated_averaging_process(model_fn,client_optimizer_fn=lambda: tf.keras.optimizers.SGD(lr=0.5))
state = iterative_process.initialize()
ckpt_manager = FileCheckpointManager("processing/text_processing/models", prefix="ckpt_")
restored_state = ckpt_manager.load_latest_checkpoint(state)
return restored_state[0]
#import processing.text_processing.global_hyperparams as globals
import global_hyperparams as globals
from model import get_simple_LSTM_model
from checkpoint_manager import FileCheckpointManager
import pandas as pd
import pickle
from sklearn.model_selection import train_test_split
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM, Dense, Dropout, Embedding
......@@ -49,6 +50,14 @@ def federated_computation_new(train_dataset,test_dataset):
test_metrics = evaluation(state.model, test_dataset)
print('Test evaluation metrics={}'.format(test_metrics))
####################################################################################################################################
import pickle#
######
with open("processing/"+globals.USE_CASE+"/last_model",'wb') as f:
pickle.dump(state, f)
return state,metrics
###############################################################################################
......@@ -80,3 +89,18 @@ def federated_computation_continue(train_dataset,test_dataset,restored_state):
return state,metrics
###############################################################################################
def load_latest_state():
iterative_process = tff.learning.build_federated_averaging_process(model_fn,client_optimizer_fn=lambda: tf.keras.optimizers.SGD(lr=0.5))
state = iterative_process.initialize()
ckpt_manager = FileCheckpointManager("processing/text_processing/models", prefix="ckpt_")
restored_state = ckpt_manager.load_latest_checkpoint(state)
return restored_state[0]
def make_prediction(input_prediction_data):
with open("processing/"+globals.USE_CASE+"/last_model",'rb') as f:
state = pickle.load(f)
model_for_inference = get_simple_LSTM_model()
state.model.assign_weights_to(model_for_inference)
predictions = model_for_inference.predict_on_batch(input_prediction_data)
return predictions
......@@ -18,7 +18,7 @@ def initialize(use_case,trainer_id = 0,dataset_id = 0):
global LSTM_OUT # output size of the LSTM layer
LSTM_OUT = 100
global EPOCHS #number of epochs the model will be trained
EPOCHS = 5
EPOCHS = 15
global TRAINER_ID # ID of the trainer entity.
TRAINER_ID = trainer_id #0 = Owner of the use_case
global DATASET_ID # ID of the dataset used
......
......@@ -2,3 +2,7 @@ Trainer_id,Model_id,Dataset_id,Accuracy,Loss
0,1623766462,1623766462,0.5,nan
0,1623768325,1623768325,0.25,nan
1,1623768510,1623768510,0.75,nan
0,1623857759,1623857759,0.25,nan
0,1623932014,1623932014,0.0,nan
0,1623934119,1623934119,0.25,nan
0,1623935435,1623935435,0.25,nan
import os
import json
#from processing.text_processing.federated_algorithm import federated_computation_continue
#from processing.text_processing.version_handler import save_state_to_file
......@@ -9,9 +10,9 @@ print(os.getcwd())
#from processing.text_processing.preprocessing import get_preprocessed_train_test_data
import global_hyperparams as globals
from preprocessing import get_preprocessed_train_test_data
from federated_algorithm import federated_computation_new, federated_computation_continue#, save_state_to_file, load_state_from_file
from checkpoint_manager import save_to_file_CSV,save_state_to_file, load_state_from_file
from preprocessing import get_preprocessed_train_test_data, preprocess_single_train_data
from federated_algorithm import federated_computation_new, federated_computation_continue, make_prediction
from checkpoint_manager import save_to_file_CSV,save_state_to_file
......@@ -48,4 +49,23 @@ def start_processing(use_case, developer_id:int = 0):
globals.DATASET_ID = timestamp
written_row = save_to_file_CSV(globals.TRAINER_ID,timestamp,globals.DATASET_ID,trained_metrics['sparse_categorical_accuracy'],trained_metrics['loss'])
return written_row
\ No newline at end of file
return written_row
def start_prediction(use_case, developer_id:int = -1):
globals.initialize(use_case,developer_id)
raw_input_prediction_data = "Test sentence. And another sentence which is going to be used as a mean for checking if this article is true or not. Also Santa is real"
raw_input_prediction_data = "Donald Trump Sends Out Embarrassing New Year’s Eve Message. This is Disturbing,'Donald Trump just couldn t wish all Americans a Happy New Year and leave it at that. Instead, he had to give a shout out to his enemies, haters and the very dishonest fake news media. The former reality show star had just one job to do and he couldn t do it. As our Country rapidly grows stronger and smarter, I want to wish all of my friends, supporters, enemies, haters, and even the very dishonest Fake News Media, a Happy and Healthy New Year, President Angry Pants tweeted. 2018 will be a great year for America! As our Country rapidly grows stronger and smarter, I want to wish all of my friends, supporters, enemies, haters, and even the very dishonest Fake News Media, a Happy and Healthy New Year. 2018 will be a great year for America! Donald J. Trump (@realDonaldTrump) December 31, 2017Trump s tweet went down about as welll as you d expect.What kind of president sends a New Year s greeting like this despicable, petty, infantile gibberish? Only Trump! His lack of decency won t even allow him to rise above the gutter long enough to wish the American citizens a happy new year! Bishop Talbert Swan (@TalbertSwan) December 31, 2017no one likes you Calvin (@calvinstowell) December 31, 2017Your impeachment would make 2018 a great year for America, but I ll also accept regaining control of Congress. Miranda Yaver (@mirandayaver) December 31, 2017Do you hear yourself talk? When you have to include that many people that hate you you have to wonder? Why do the they all hate me? Alan Sandoval (@AlanSandoval13) December 31, 2017Who uses the word Haters in a New Years wish?? Marlene (@marlene399) December 31, 2017You can t just say happy new year? Koren pollitt (@Korencarpenter) December 31, 2017Here s Trump s New Year s Eve tweet from 2016.Happy New Year to all, including to my many enemies and those who have fought me and lost so badly they just don t know what to do. Love! Donald J. Trump (@realDonaldTrump) December 31, 2016This is nothing new for Trump. He s been doing this for years.Trump has directed messages to his enemies and haters for New Year s, Easter, Thanksgiving, and the anniversary of 9/11. pic.twitter.com/4FPAe2KypA Daniel Dale (@ddale8) December 31, 2017Trump s holiday tweets are clearly not presidential.How long did he work at Hallmark before becoming President? Steven Goodine (@SGoodine) December 31, 2017He s always been like this . . . the only difference is that in the last few years, his filter has been breaking down. Roy Schulze (@thbthttt) December 31, 2017Who, apart from a teenager uses the term haters? Wendy (@WendyWhistles) December 31, 2017he s a fucking 5 year old Who Knows (@rainyday80) December 31, 2017So, to all the people who voted for this a hole thinking he would change once he got into power, you were wrong! 70-year-old men don t change and now he s a year older.Photo by Andrew Burton/Getty Images."
#from preprocessing import preprocess_single_train_data
input_prediction_data= preprocess_single_train_data(raw_input_prediction_data)
prediction_result = make_prediction(input_prediction_data)
res = prediction_result[0]
if (res[0]>=0.50):
return json.dumps({ "result" : "True" })
else:
return json.dumps({ "result" : "False" })
#start_processing("text_processing")
start_prediction("text_processing")
\ No newline at end of file
......@@ -109,5 +109,22 @@ def get_preprocessed_train_test_data() -> tuple:
print("DONE PREPROCESSING")
return train_dataset,test_dataset
def preprocess_single_train_data(input_data):
input_data = "Test sentence. And another sentence which is going to be used as a mean for checking if this article is true or not. Also Santa is real"
input_data_list = []
input_data_list.append(input_data)
df_text = input_data_list
tokenizer = Tokenizer(oov_token = "<OOV>", num_words=6000)
tokenizer.fit_on_texts(df_text)
sequences_train = tokenizer.texts_to_sequences(df_text)
padded_train = pad_sequences(sequences_train, padding = 'post', maxlen=globals.MAX_LENGTH)
padded_train = tf.convert_to_tensor(padded_train)# cu sau fara [0]????????
return padded_train
#globals.initialize("text_processing")
#preprocess_single_train_data("test")
\ No newline at end of file
......@@ -52,5 +52,5 @@ def trainMetricsToJSON(last_train_metrics : list):
metricsDict["Loss"] = last_train_metrics[4]
return json.dumps(metricsDict)
upload_and_train("text_processing",1)
#upload_and_train("text_processing",1)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment