Updated funct for prediction

parent fbc26ebf
...@@ -97,8 +97,12 @@ def load_latest_state(): ...@@ -97,8 +97,12 @@ def load_latest_state():
return restored_state[0] return restored_state[0]
def make_prediction(input_prediction_data): def make_prediction(input_prediction_data):
try:
with open("processing/"+globals.USE_CASE+"/last_model",'rb') as f: with open("processing/"+globals.USE_CASE+"/last_model",'rb') as f:
state = pickle.load(f) state = pickle.load(f)
except Exception as e:
print(e)
return None
model_for_inference = get_simple_LSTM_model() model_for_inference = get_simple_LSTM_model()
state.model.assign_weights_to(model_for_inference) state.model.assign_weights_to(model_for_inference)
......
...@@ -68,4 +68,4 @@ def start_prediction(use_case, developer_id:int = -1): ...@@ -68,4 +68,4 @@ def start_prediction(use_case, developer_id:int = -1):
#start_processing("text_processing") #start_processing("text_processing")
start_prediction("text_processing") #start_prediction("text_processing")
\ No newline at end of file \ No newline at end of file
from flask import Response, request from flask import Response, request
import sys
def check_article(use_case: str): def check_article(use_case: str):
#body = request.STRING #body = request.STRING
#TODO Working on it use_case_path = 'processing/'+use_case+'/'
#FOR USE_CASE {use_case} sys.path.append(use_case_path)
import main_proc
#insert body into the trained model result = main_proc.start_prediction(use_case)
if result == None:
return Response(status = 404, response="Server doesn't have a trained model. Training of the model should be finished before attempting a prediction.")
return Response(status=200, response=result)
#get the result #check_article("text_processing")
result = None #bool True/False \ No newline at end of file
return Response(status=400, response=str(result))
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment