Commit d2898192 authored by Spiros Koulouzis's avatar Spiros Koulouzis

get IP of containers

parent c214b464
version: '3'
services:
# Platform - SWITCH
switch_monitoring_server:
image: salmant/ul_monitoring_server_container_image
ports:
- 8080:8080 # Optional, for debugging - WEB Interface
- 4242:4242
- 4245:4245
- 7199:7199
- 7000:7000
- 7001:7001
- 9160:9160
- 9042:9042
- 8012:8012
- 61621:61621
switch_monitoring_agent:
image: beia/monitoring_agent
environment:
- MONITORING_SERVER=switch_monitoring_server
- MONITORING_PREFIX=eu.switch.beia # USER CONFIGURABLE!!!
depends_on:
- switch_monitoring_server
switch_alarm_trigger:
image: salmant/ul_alarm_trigger_container_image
environment:
- AlarmTriggerYMLURL=http://194.249.1.72:5000/AlarmTrigger.yml # SIDE GENERATED VALUE!!!
- MONITORING_SERVER=switch_monitoring_server
- JSONAlertURL=https://gurujsonrpc.appspot.com/guru # SIDE GENERATED VALUE!!! - POINTS to another component of ASAP
- JSONAlertURLSIDEGUI=https://gurujsonrpc.appspot.com/guru # SIDE GENERATED VALUE!!!
ports:
- 18080:8080 # Optional, for debugging - Web interface to alarm trigger
depends_on:
- switch_monitoring_server
# APPLICATION - BEIA, MOG, WT ...
This diff is collapsed.
This diff is collapsed.
No preview for this file type
......@@ -40,6 +40,7 @@ import sys
from results_collector import ResultsCollector
def install_prerequisites(vm):
try:
print "Installing ansible prerequisites in: %s" % (vm.ip)
......
2017-10-24 17:02:29,283 - rpc_server - INFO - Awaiting RPC requests
2017-10-24 17:02:39,292 - rpc_server - INFO - Threads successfully closed
......@@ -20,11 +20,28 @@ __author__ = 'Yang Hu'
import paramiko, os
from vm_info import VmInfo
import json
import logging
import linecache
import sys
# create logger
logger = logging.getLogger('docker_swarm')
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
logger.addHandler(ch)
def docker_check(vm, compose_name):
try:
print "%s: ====== Start Check Docker Services ======" % (vm.ip)
logger.info("Starting docker info services on: "+vm.ip)
paramiko.util.log_to_file("deployment.log")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
......@@ -46,9 +63,12 @@ def docker_check(vm, compose_name):
stdin, stdout, stderr = ssh.exec_command(cmd)
stack_ps_resp = stdout.readlines()
services_info = []
nodes_hostname = set()
for i in stack_ps_resp:
if i.encode():
json_str = json.loads(i.encode().strip('\n'))
if json_str['node'] not in nodes_hostname:
nodes_hostname.add(json_str['node'])
services_info.append(json_str)
json_response ['services_info'] = services_info
stack_format = '\'{"ID":"{{.ID}}","name":"{{.Name}}","mode":"{{.Mode}}","replicas":"{{.Replicas}}","image":"{{.Image}}"}\''
......@@ -62,9 +82,34 @@ def docker_check(vm, compose_name):
stack_info.append(json_str)
json_response ['stack_info'] = stack_info
print "%s: =========== Check Finished ==============" % (vm.ip)
cmd = 'sudo docker node inspect '
for hostname in nodes_hostname:
cmd += hostname
logger.info(cmd)
stdin, stdout, stderr = ssh.exec_command(cmd)
inspect_resp = stdout.readlines()
nodes_info = []
#for i in inspect_resp:
#if i.encode():
#json_str = json.loads(i.encode().strip('\n'))
#nodes_info.append(json_str)
#json_response ['nodes_info'] = nodes_info
logger.info("Finished docker info services on: "+vm.ip)
except Exception as e:
print '%s: %s' % (vm.ip, e)
exc_type, exc_obj, tb = sys.exc_info()
f = tb.tb_frame
lineno = tb.tb_lineno
filename = f.f_code.co_filename
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
print 'EXCEPTION IN ({}, LINE {} "{}"): {}'.format(filename, lineno, line.strip(), exc_obj)
logger.error(vm.ip + " " + str(e)+ " line:" +lineno)
return "ERROR:" + vm.ip + " " + str(e)
ssh.close()
return json_response
......
......@@ -19,11 +19,25 @@ __author__ = 'Yang Hu'
import paramiko, os
from vm_info import VmInfo
import logging
# create logger
logger = logging.getLogger('docker_swarm')
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
logger.addHandler(ch)
def deploy_compose(vm, compose_file, compose_name):
try:
print "%s: ====== Start Docker Compose Deploying ======" % (vm.ip)
logger.info("Starting docker compose deployment on: "+vm.ip)
paramiko.util.log_to_file("deployment.log")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
......@@ -33,9 +47,9 @@ def deploy_compose(vm, compose_file, compose_name):
sftp.put(compose_file, "docker-compose.yml")
stdin, stdout, stderr = ssh.exec_command("sudo docker stack deploy --compose-file /tmp/docker-compose.yml %s" % (compose_name))
stdout.read()
print "%s: ======= Deployment of Compose Finished =========" % (vm.ip)
logger.info("Finished docker compose deployment on: "+vm.ip)
except Exception as e:
print '%s: %s' % (vm.ip, e)
logger.error(vm.ip + " " + str(e))
return "ERROR:" + vm.ip + " " + str(e)
ssh.close()
return "SUCCESS"
......
......@@ -19,20 +19,34 @@ __author__ = 'Yang Hu'
import paramiko, os
from vm_info import VmInfo
import logging
# create logger
logger = logging.getLogger('docker_swarm')
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
logger.addHandler(ch)
def scale_service(vm, application_name, service_name, service_num):
try:
print "%s: ====== Start Docker Service Scaling ======" % (vm.ip)
logger.info("Starting docker service scaling on: "+vm.ip)
paramiko.util.log_to_file("deployment.log")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(vm.ip, username=vm.user, key_filename=vm.key)
stdin, stdout, stderr = ssh.exec_command("sudo docker service scale %s_%s=%s" % (application_name, service_name, service_num))
stdout.read()
print "%s: ======= Service Scaling Finished =========" % (vm.ip)
logger.info("Finished docker service scaling on: "+vm.ip)
except Exception as e:
print '%s: %s' % (vm.ip, e)
logger.error(vm.ip + " " + str(e))
return "ERROR:" + vm.ip + " " + str(e)
ssh.close()
return "SUCCESS"
......
......@@ -19,9 +19,25 @@ __author__ = 'Yang Hu'
import paramiko, os
from vm_info import VmInfo
import logging
# create logger
logger = logging.getLogger('docker_swarm')
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
logger.addHandler(ch)
def install_manager(vm):
try:
print "%s: ====== Start Swarm Manager Installing ======" % (vm.ip)
logger.info("Starting swarm manager installation on: "+(vm.ip))
paramiko.util.log_to_file("deployment.log")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
......@@ -42,16 +58,16 @@ def install_manager(vm):
stdin, stdout, stderr = ssh.exec_command("sudo docker swarm join-token worker")
retstr = stdout.readlines()
ret = retstr[2].encode()
print "%s: ========= Swarm Manager Installed =========" % (vm.ip)
logger.info("Finished swarm manager installation on: "+(vm.ip))
except Exception as e:
print '%s: %s' % (vm.ip, e)
logger.error(vm.ip + " " + str(e))
return "ERROR:" + vm.ip + " " + str(e)
ssh.close()
return ret
def install_worker(join_cmd, vm):
try:
print "%s: ====== Start Swarm Worker Installing ======" % (vm.ip)
logger.info("Starting swarm worker installation on: "+(vm.ip))
paramiko.util.log_to_file("deployment.log")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
......@@ -63,9 +79,9 @@ def install_worker(join_cmd, vm):
stdout.read()
stdin, stdout, stderr = ssh.exec_command("sudo %s" % (join_cmd))
stdout.read()
print "%s: ========= Swarm Worker Installed =========" % (vm.ip)
logger.info("Finished swarm worker installation on: "+(vm.ip))
except Exception as e:
print '%s: %s' % (vm.ip, e)
logger.error(vm.ip + " " + str(e))
return "ERROR:" + vm.ip + " " + str(e)
ssh.close()
return "SUCCESS"
......
......@@ -16,6 +16,7 @@ import sys, argparse
from threading import Thread
from time import sleep
import os.path
import logging
if len(sys.argv) > 1:
......@@ -23,6 +24,19 @@ if len(sys.argv) > 1:
else:
rabbitmq_host = '127.0.0.1'
logger = logging.getLogger('rpc_server')
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
fh = logging.FileHandler('deployer.log')
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
connection = pika.BlockingConnection(pika.ConnectionParameters(host=rabbitmq_host))
channel = connection.channel()
......@@ -160,7 +174,8 @@ def on_request(ch, method, props, body):
response["parameters"].append(par)
response = json.dumps(response)
print "Response: %s " % response
logger.info("Response: " + response)
ch.basic_publish(exchange='',
routing_key=props.reply_to,
......@@ -176,7 +191,7 @@ channel.basic_consume(on_request, queue='deployer_queue')
thread = Thread(target = threaded_function, args = (1, ))
thread.start()
print(" [x] Awaiting RPC requests")
logger.info("Awaiting RPC requests")
......@@ -186,4 +201,4 @@ except KeyboardInterrupt:
#thread.stop()
done = True
thread.join()
print "threads successfully closed"
logger.info("Threads successfully closed")
......@@ -81,10 +81,10 @@ def handle_delivery(message):
return response
def test_local():
test_local()
home = expanduser("~")
transformer = DockerComposeTransformer(home+"/workspace/DRIP/docs/input_tosca_files/MOG_cardif.yml")
transformer = DockerComposeTransformer(home+"/workspace/DRIP/docs/input_tosca_files/BEIA_cardif.yml")
compose = transformer.getnerate_compose()
print yaml.dump(compose)
response = {}
current_milli_time = lambda: int(round(time.time() * 1000))
response["creationDate"] = current_milli_time()
......@@ -98,12 +98,12 @@ def test_local():
print response
if __name__ == "__main__":
# test_local()
print sys.argv
channel = init_chanel(sys.argv)
global queue_name
queue_name = sys.argv[2]
start(channel)
test_local()
# print sys.argv
# channel = init_chanel(sys.argv)
# global queue_name
# queue_name = sys.argv[2]
# start(channel)
# try:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment