Commit 935c80f7 authored by Alexander Lercher's avatar Alexander Lercher

Updated clustering to store json locally

Cluster sizes might get too large, so they will still be stored on disk.
parent fec1e504
......@@ -17,8 +17,12 @@ repo = Repository()
def run_generic_clustering():
'''Runs the clustering for all layers found in the repository.'''
all_layers:List[Layer] = repo.get_layers()
# all_layers = [l for l in all_layers if l.layer_name in ['!Heating_Consumption_Layer', 'Position_Layer', '!Solar_Production_Layer']]
for layer in all_layers:
# if layer.use_case != 'smart-energy':
# continue
print(f"Clustering {layer.use_case}//{layer.use_case_table}//{layer.layer_name}.")
if layer.properties is None or len(layer.properties) == 0:
......@@ -27,7 +31,7 @@ def run_generic_clustering():
try:
clusters = run_clustering_for_layer(layer)
store_generic_clusters(clusters)
store_generic_clusters(clusters, layer.layer_name)
except Exception as e:
print(str(e))
......@@ -46,10 +50,21 @@ def run_clustering_for_layer(layer: Layer) -> List[Cluster]:
for key, cluster_result in res.items()]
def store_generic_clusters(clusters: List[Cluster]):
repo.add_clusters(clusters)
def store_generic_clusters(clusters: List[Cluster], layer):
try:
with open(f'{layer}.json', 'w') as file:
cluster_dicts = [c.to_serializable_dict(for_db=True) for c in clusters]
file.write(json.dumps(cluster_dicts))
except:
pass
try:
for cluster in clusters:
repo.add_cluster(cluster)
except:
pass
if __name__ == "__main__":
repo.delete_all_clusters()
# repo.delete_all_clusters()
run_generic_clustering()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment