Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
C
CONF
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Registry
Registry
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
UvA
CONF
Commits
b330368e
Commit
b330368e
authored
Feb 05, 2020
by
Spiros Koulouzis
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
return add k8s attributes
parent
07b2988e
Changes
11
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
164 additions
and
83 deletions
+164
-83
application_example_updated.yaml
TOSCA/application_example_updated.yaml
+1
-1
nodes.yaml
TOSCA/types/nodes.yaml
+8
-7
example_ansible_output.out
ansible_playbooks/example_ansible_output.out
+1
-0
__main__.py
deployer/__main__.py
+22
-15
ansible_service.py
deployer/service/ansible_service.py
+11
-1
k8s_service.py
deployer/service/k8s_service.py
+79
-53
tosca.py
deployer/service/tosca.py
+28
-2
test_deployer.py
deployer/test/test_deployer.py
+11
-0
DRIPService.java
...er/src/main/java/nl/uva/sne/drip/service/DRIPService.java
+2
-2
__main__.py
planner/__main__.py
+0
-1
CloudStormService.java
...n/java/nl/uva/sne/drip/provisioner/CloudStormService.java
+1
-1
No files found.
TOSCA/application_example_updated.yaml
View file @
b330368e
...
...
@@ -21,7 +21,7 @@ topology_template:
type
:
tosca.nodes.ARTICONF.Container.Application.Docker
properties
:
ports
:
-
"
27017
:27017"
-
"
30001
:27017"
volumes
:
-
db-data:/data/db
environment
:
...
...
TOSCA/types/nodes.yaml
View file @
b330368e
...
...
@@ -65,20 +65,21 @@ node_types:
-
greater_or_equal
:
1
tosca.nodes.ARTICONF.Orchestrator.Kubernetes
:
derived_from
:
tosca.nodes.ARTICONF.Orchestrator
description
:
Kubernetes orchestrator
attributes
:
api_key
:
type
:
string
tokens
:
type
:
list
entry_schema
:
description
:
simple contact information
type
:
tosca.datatypes.Credential
required
:
false
join_token
:
type
:
string
required
:
false
discovery_token_ca_cert_hash
:
description
:
list of tokens to access the kubernetes dashboard and other kubernetes resources
dashboard_url
:
type
:
string
required
:
false
description
:
the dashboard access url
interfaces
:
Kubernetes
:
type
:
tosca.interfaces.ARTICONF.Kubernetes
...
...
ansible_playbooks/example_ansible_output.out
0 → 100644
View file @
b330368e
\nPLAY [k8-master] ***************************************************************\n\nTASK [Gathering Facts] *********************************************************\nok: [54.93.250.84]\n\nTASK [install pip modules] *****************************************************\nchanged: [54.93.250.84]\n\nTASK [copy task src: /home/alogo/workspace/DRIP/deployer/k8s/dashboard.yaml dest: /tmp/dashboard.yaml] ***\nok: [54.93.250.84]\n\nTASK [create_dashboard] ********************************************************\nchanged: [54.93.250.84]\n\nTASK [create_admin_dashboard] **************************************************\nok: [54.93.250.84]\n\nTASK [create_admin_cluster_role_binding] ***************************************\nok: [54.93.250.84]\n\nTASK [get token] ***************************************************************\nchanged: [54.93.250.84]\n\nTASK [print token] *************************************************************\nok: [54.93.250.84] => {\n "dashboard_token": {\n "changed": true, \n "cmd": "kubectl describe secret $(kubectl get secret | grep admin-user | awk \'{print $1}\')", \n "delta": "0:00:00.149191", \n "end": "2020-02-05 11:12:17.981963", \n "failed": false, \n "rc": 0, \n "start": "2020-02-05 11:12:17.832772", \n "stderr": "", \n "stderr_lines": [], \n "stdout": "Name: admin-user-token-dpmgw\\nNamespace: default\\nLabels: <none>\\nAnnotations: kubernetes.io/service-account.name: admin-user\\n kubernetes.io/service-account.uid: bc04be40-3782-422a-bca2-032be8596fb0\\n\\nType: kubernetes.io/service-account-token\\n\\nData\\n====\\ntoken: eyJhbGciOiJSUzI1NiIsImtpZCI6InduMVZmamRqODRTZXg1eDktVTRCTnlLQ1FHQm85RDgzSU1mYnZHQVRPeWsifQ.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9uYW1lc3BhY2UiOiJkZWZhdWx0Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9zZWNyZXQubmFtZSI6ImFkbWluLXVzZXItdG9rZW4tZHBtZ3ciLCJrdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L3NlcnZpY2UtYWNjb3VudC5uYW1lIjoiYWRtaW4tdXNlciIsImt1YmVybmV0ZXMuaW8vc2VydmljZWFjY291bnQvc2VydmljZS1hY2NvdW50LnVpZCI6ImJjMDRiZTQwLTM3ODItNDIyYS1iY2EyLTAzMmJlODU5NmZiMCIsInN1YiI6InN5c3RlbTpzZXJ2aWNlYWNjb3VudDpkZWZhdWx0OmFkbWluLXVzZXIifQ.W1xXAuMcJe1zbs4gaqtEbwer8Wc20Wm1hrPtJdCB6LTSWL7UFNl8Xjq5dXPrfOr8TsoDwvBpoLc6HEXb7MDMVX5TC_2lXEy0t3pHEu3uOsvm7Y5sRMaKOmEcw4jpsAz5UuVWO0MP3qSDXdf28Wq-zVf57tRKOr8WcSLv23oKVt_eeqQgroqA6E2PThFXvTnyGzXAPn7wmpWcAY7MIa-pntJEorwRsTRAwt8o0KcmiNMOKkJiRz3vp-Lq3lITT6lXazfXwG4nWGlLmFA8tosBdH2EXqai3d_0LJfQ2Or6Vn0Tqti6Z47xOJlJOiy1GM810dc0cZ1J-tuHbVdJt08Eig\\nca.crt: 1025 bytes\\nnamespace: 7 bytes", \n "stdout_lines": [\n "Name: admin-user-token-dpmgw", \n "Namespace: default", \n "Labels: <none>", \n "Annotations: kubernetes.io/service-account.name: admin-user", \n " kubernetes.io/service-account.uid: bc04be40-3782-422a-bca2-032be8596fb0", \n "", \n "Type: kubernetes.io/service-account-token", \n "", \n "Data", \n "====", \n "token: eyJhbGciOiJSUzI1NiIsImtpZCI6InduMVZmamRqODRTZXg1eDktVTRCTnlLQ1FHQm85RDgzSU1mYnZHQVRPeWsifQ.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9uYW1lc3BhY2UiOiJkZWZhdWx0Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9zZWNyZXQubmFtZSI6ImFkbWluLXVzZXItdG9rZW4tZHBtZ3ciLCJrdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L3NlcnZpY2UtYWNjb3VudC5uYW1lIjoiYWRtaW4tdXNlciIsImt1YmVybmV0ZXMuaW8vc2VydmljZWFjY291bnQvc2VydmljZS1hY2NvdW50LnVpZCI6ImJjMDRiZTQwLTM3ODItNDIyYS1iY2EyLTAzMmJlODU5NmZiMCIsInN1YiI6InN5c3RlbTpzZXJ2aWNlYWNjb3VudDpkZWZhdWx0OmFkbWluLXVzZXIifQ.W1xXAuMcJe1zbs4gaqtEbwer8Wc20Wm1hrPtJdCB6LTSWL7UFNl8Xjq5dXPrfOr8TsoDwvBpoLc6HEXb7MDMVX5TC_2lXEy0t3pHEu3uOsvm7Y5sRMaKOmEcw4jpsAz5UuVWO0MP3qSDXdf28Wq-zVf57tRKOr8WcSLv23oKVt_eeqQgroqA6E2PThFXvTnyGzXAPn7wmpWcAY7MIa-pntJEorwRsTRAwt8o0KcmiNMOKkJiRz3vp-Lq3lITT6lXazfXwG4nWGlLmFA8tosBdH2EXqai3d_0LJfQ2Or6Vn0Tqti6Z47xOJlJOiy1GM810dc0cZ1J-tuHbVdJt08Eig", \n "ca.crt: 1025 bytes", \n "namespace: 7 bytes"\n ]\n }\n}\n\nTASK [Create a Service object0] ************************************************\nfatal: [54.93.250.84]: FAILED! => {"changed": false, "error": 422, "msg": "Failed to patch object: {\\"kind\\":\\"Status\\",\\"apiVersion\\":\\"v1\\",\\"metadata\\":{},\\"status\\":\\"Failure\\",\\"message\\":\\"Service \\\\\\"logspout\\\\\\" is invalid: spec.ports[0].nodePort: Invalid value: 8000: provided port is not in the valid range. The range of valid ports is 30000-32767\\",\\"reason\\":\\"Invalid\\",\\"details\\":{\\"name\\":\\"logspout\\",\\"kind\\":\\"Service\\",\\"causes\\":[{\\"reason\\":\\"FieldValueInvalid\\",\\"message\\":\\"Invalid value: 8000: provided port is not in the valid range. The range of valid ports is 30000-32767\\",\\"field\\":\\"spec.ports[0].nodePort\\"}]},\\"code\\":422}\\n", "reason": "Unprocessable Entity", "status": 422}\n\nPLAY RECAP *********************************************************************\n54.93.250.84 : ok=8 changed=3 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0 \n\n
deployer/__main__.py
View file @
b330368e
...
...
@@ -67,25 +67,35 @@ def handle_delivery(message):
parsed_json_message
=
json
.
loads
(
message
)
owner
=
parsed_json_message
[
'owner'
]
tosca_file_name
=
'tosca_template'
tosca_template_
json
=
parsed_json_message
[
'toscaTemplate'
]
tosca_template_
dict
=
parsed_json_message
[
'toscaTemplate'
]
interfaces
=
tosca
.
get_interfaces
(
tosca_template_json
)
tosca_interfaces
=
tosca
.
get_interfaces
(
tosca_template_dict
)
tmp_path
=
tempfile
.
mkdtemp
()
vms
=
tosca
.
get_vms
(
tosca_template_
json
)
vms
=
tosca
.
get_vms
(
tosca_template_
dict
)
inventory_path
=
ansible_service
.
write_inventory_file
(
tmp_path
,
vms
)
paths
=
ansible_service
.
write_playbooks_from_tosca_interface
(
interfaces
,
tmp_path
)
# for playbook_path in paths:
# out,err = ansible_service.run(inventory_path,playbook_path)
# api_key, join_token, discovery_token_ca_cert_hash = ansible_service.parse_tokens(out.decode("utf-8"))
ansible_playbook_path
=
k8s_service
.
write_ansible_k8s_files
(
tosca_template_json
,
tmp_path
)
paths
=
ansible_service
.
write_playbooks_from_tosca_interface
(
tosca_interfaces
,
tmp_path
)
tokens
=
{}
for
playbook_path
in
paths
:
out
,
err
=
ansible_service
.
run
(
inventory_path
,
playbook_path
)
api_key
,
join_token
,
discovery_token_ca_cert_hash
=
ansible_service
.
parse_api_tokens
(
out
.
decode
(
"utf-8"
))
if
api_key
:
tokens
[
'api_key'
]
=
api_key
if
join_token
:
tokens
[
'join_token'
]
=
join_token
if
discovery_token_ca_cert_hash
:
tokens
[
'discovery_token_ca_cert_hash'
]
=
discovery_token_ca_cert_hash
ansible_playbook_path
=
k8s_service
.
write_ansible_k8s_files
(
tosca_template_dict
,
tmp_path
)
out
,
err
=
ansible_service
.
run
(
inventory_path
,
ansible_playbook_path
)
dashboard_token
=
ansible_service
.
parse_dashboard_tokens
(
out
.
decode
(
"utf-8"
))
tokens
[
'dashboard_token'
]
=
dashboard_token
tosca_template_dict
=
tosca
.
add_tokens
(
tokens
,
tosca_template_dict
)
template_dict
=
{}
logger
.
info
(
"template ----:
\n
"
+
yaml
.
dump
(
template_dict
))
tosca_template_dict
=
tosca
.
add_dashboard_url
(
k8s_service
.
get_dashboard_url
(
vms
),
tosca_template_dict
)
response
=
{
'toscaTemplate'
:
template_dict
}
response
=
{
'toscaTemplate'
:
t
osca_t
emplate_dict
}
output_current_milli_time
=
int
(
round
(
time
.
time
()
*
1000
))
response
[
"creationDate"
]
=
output_current_milli_time
response
[
"parameters"
]
=
[]
...
...
@@ -108,11 +118,8 @@ if __name__ == "__main__":
vms
=
tosca
.
get_vms
(
tosca_template_json
)
ansible_service
.
run
(
interfaces
,
vms
)
k8s_service
.
write_ansible_k8s_files
(
tosca_template_json
)
else
:
logger
.
info
(
"Input args: "
+
sys
.
argv
[
0
]
+
' '
+
sys
.
argv
[
1
]
+
' '
+
sys
.
argv
[
2
])
channel
=
init_chanel
(
sys
.
argv
)
...
...
deployer/service/ansible_service.py
View file @
b330368e
...
...
@@ -48,6 +48,7 @@ def write_inventory_file(tmp_path, vms):
os
.
chmod
(
ansible_ssh_private_key_file_path
,
S_IREAD
)
if
ansible_ssh_user
is
None
:
ansible_ssh_user
=
vms
[
vm_name
][
'properties'
][
'user_name'
]
k8s_hosts_path
=
tmp_path
+
"/k8s_hosts"
with
open
(
k8s_hosts_path
,
"w"
)
as
k8s_hosts_file
:
...
...
@@ -105,7 +106,16 @@ def run(inventory_path, playbook_path):
return
output
,
err
def
parse_tokens
(
out
):
def
parse_dashboard_tokens
(
out
):
token
=
None
if
'admin-user-token'
in
out
:
m
=
re
.
search
(
'
\"
token: (.+?)
\"
'
,
out
)
if
m
:
token
=
m
.
group
(
1
)
.
strip
()
return
token
def
parse_api_tokens
(
out
):
api_key
=
None
join_token
=
None
discovery_token_ca_cert_hash
=
None
...
...
deployer/service/k8s_service.py
View file @
b330368e
...
...
@@ -6,15 +6,19 @@ import yaml
yaml
.
Dumper
.
ignore_aliases
=
lambda
*
args
:
True
def
get_template
_dictionary
(
file_name
):
def
get_template
s_directory_path
(
file_name
):
template_path
=
"./k8s/"
template_file_path
=
template_path
+
file_name
if
not
os
.
path
.
exists
(
template_file_path
):
template_path
=
"../k8s/"
template_file_path
=
template_path
+
file_name
return
os
.
path
.
abspath
(
template_file_path
)
def
get_yaml_data
(
file_name
):
template_file_path
=
get_templates_directory_path
(
file_name
)
with
open
(
template_file_path
,
'r'
)
as
stream
:
data
=
yaml
.
safe_load
(
stream
)
return
data
...
...
@@ -29,33 +33,19 @@ def get_dockers(tosca_template_json):
def
create_service_definition
(
docker_name
,
docker
):
k8s_service
=
get_
template_dictionary
(
'template-service.yaml'
)
k8s_service
=
get_
yaml_data
(
'template-service.yaml'
)
k8s_service
[
'metadata'
][
'labels'
][
'app'
]
=
docker_name
k8s_service
[
'metadata'
][
'name'
]
=
docker_name
docker_ports
=
docker
[
docker_name
][
'properties'
][
'ports'
][
0
]
.
split
(
':'
)
k8s_service
[
'spec'
][
'ports'
][
0
][
'port'
]
=
int
(
docker_ports
[
1
])
k8s_service
[
'spec'
][
'ports'
][
0
][
'nodePort'
]
=
int
(
docker_ports
[
0
])
k8s_service
[
'spec'
][
'selector'
][
'app'
]
=
docker_name
# k8s_service = {'apiVersion': 'v1', 'kind': 'Service'}
# labels = {'app': docker_name}
# metadata = {'labels': labels, 'name': docker_name, 'namespace': 'application'}
# k8s_service['metadata'] = metadata
# spec = {'type': 'NodePort'}
# port = {'port': docker_ports[1], 'nodePort': docker_ports[0]}
# ports = [port]
# spec['ports'] = ports
# app = {'app': docker_name}
# spec['selector'] = app
# k8s_service['spec'] = spec
# # print(yaml.safe_dump(k8s_service))
return
k8s_service
def
create_deployment_definition
(
docker_name
,
docker
):
docker_ports
=
docker
[
docker_name
][
'properties'
][
'ports'
][
0
]
.
split
(
':'
)
deployment
=
get_
template_dictionary
(
'template-deployment.yaml'
)
deployment
=
get_
yaml_data
(
'template-deployment.yaml'
)
deployment
[
'metadata'
][
'labels'
][
'app'
]
=
docker_name
deployment
[
'metadata'
][
'name'
]
=
docker_name
deployment
[
'spec'
][
'selector'
][
'matchLabels'
][
'app'
]
=
docker_name
...
...
@@ -69,29 +59,6 @@ def create_deployment_definition(docker_name, docker):
k8s_env
=
{
'name'
:
env
,
'value'
:
docker
[
docker_name
][
'properties'
][
'environment'
][
env
]}
env_list
.
append
(
k8s_env
)
deployment
[
'spec'
][
'template'
][
'spec'
][
'containers'
][
0
][
'env'
]
=
env_list
# labels = {'app': docker_name}
# metadata = {'labels': labels, 'name': docker_name, 'namespace': 'application'}
# deployment = {'apiVersion': 'apps/v1', 'kind': 'Deployment', 'metadata': metadata}
#
# match_labels = {'app': docker_name}
# selector = {'matchLabels': match_labels}
# spec = {'selector': selector, 'replicas': 1}
# labels = {'app': docker_name}
# metadata = {'labels': labels}
# template = {'metadata': metadata}
#
# containers = []
# container = {'image': docker[docker_name]['artifacts']['image']['file'], 'name': docker_name}
# docker_ports = docker[docker_name]['properties']['ports'][0].split(':')
# ports = {'containerPort': docker_ports[1]}
# container['ports'] = ports
# containers.append(container)
# template_spec = {'containers': containers}
# template['spec'] = template_spec
#
# spec['template'] = template
#
# deployment['spec'] = spec
return
deployment
...
...
@@ -142,18 +109,46 @@ def create_namespace_task():
return
task
def
create_dashboard_task
():
task
=
{
'name'
:
'create
dashboard'
}
k8s
=
{
's
rc'
:
'/tmp/recommended.yaml'
,
'state'
:
'present'
}
def
create_dashboard_task
(
def_src
):
task
=
{
'name'
:
'create
_
dashboard'
}
k8s
=
{
's
tate'
:
'present'
,
'src'
:
def_src
}
task
[
'k8s'
]
=
k8s
return
task
def
create_download_dashboard_task
():
task
=
{
'name'
:
'Download dashboard'
}
get_url
=
{
'url'
:
'https://raw.githubusercontent.com/kubernetes/dashboard/v2.0.0-rc3/aio/deploy/recommended.yaml'
,
'dest'
:
'/tmp/recommended.yaml'
}
task
[
'get_url'
]
=
get_url
def
create_admin_dashboard_task
():
admin_service_account_def
=
get_yaml_data
(
"admin_service_account.yaml"
)
task
=
{
'name'
:
'create_admin_dashboard'
}
k8s
=
{
'state'
:
'present'
,
'definition'
:
admin_service_account_def
}
task
[
'k8s'
]
=
k8s
return
task
def
create_admin_cluster_role_binding_task
():
admin_cluster_role_binding_def
=
get_yaml_data
(
"admin_cluster_role_binding.yaml"
)
task
=
{
'name'
:
'create_admin_cluster_role_binding'
}
k8s
=
{
'state'
:
'present'
,
'definition'
:
admin_cluster_role_binding_def
}
task
[
'k8s'
]
=
k8s
return
task
def
create_copy_task
(
src
,
dest
):
copy
=
{
'src'
:
src
,
'dest'
:
dest
}
task
=
{
'name'
:
'copy task src: '
+
src
+
' dest: '
+
dest
,
'copy'
:
copy
}
return
task
def
create_get_admin_token_task
():
task
=
{
'name'
:
'get token'
,
'shell'
:
'kubectl describe secret $(kubectl get secret | grep admin-user | awk
\'
{print $1}
\'
)'
,
'register'
:
'dashboard_token'
}
return
task
def
create_print_admin_token_task
():
var
=
{
'var'
:
'dashboard_token'
}
task
=
{
'name'
:
'print token'
,
'debug'
:
var
}
return
task
...
...
@@ -170,11 +165,25 @@ def write_ansible_k8s_files(tosca_template_json, tmp_path):
# namespace_task = create_namespace_task()
# tasks.append(namespace_task)
download_dashboard_task
=
create_download_dashboard_task
()
tasks
.
append
(
download_dashboard_task
)
def_src
=
'/tmp/dashboard.yaml'
copy_task
=
create_copy_task
(
get_templates_directory_path
(
'dashboard.yaml'
),
def_src
)
tasks
.
append
(
copy_task
)
dashboard_task
=
create_dashboard_task
()
dashboard_task
=
create_dashboard_task
(
def_src
)
tasks
.
append
(
dashboard_task
)
dashboard_admin_task
=
create_admin_dashboard_task
()
tasks
.
append
(
dashboard_admin_task
)
admin_cluster_role_binding_task
=
create_admin_cluster_role_binding_task
()
tasks
.
append
(
admin_cluster_role_binding_task
)
get_admin_token_task
=
create_get_admin_token_task
()
tasks
.
append
(
get_admin_token_task
)
print_admin_token_task
=
create_print_admin_token_task
()
tasks
.
append
(
print_admin_token_task
)
for
services_def
in
services
:
task
=
create_service_task
(
i
,
services_def
)
i
+=
1
...
...
@@ -189,10 +198,27 @@ def write_ansible_k8s_files(tosca_template_json, tmp_path):
ansible_playbook
=
[]
plays
=
{
'hosts'
:
'k8-master'
,
'tasks'
:
tasks
}
ansible_playbook
.
append
(
plays
)
print
(
yaml
.
safe_dump
(
ansible_playbook
))
#
print(yaml.safe_dump(ansible_playbook))
ansible_playbook_path
=
tmp_path
+
'/'
+
'k8s_playbook.yml'
with
open
(
ansible_playbook_path
,
'w'
)
as
file
:
documents
=
yaml
.
dump
(
ansible_playbook
,
file
)
return
ansible_playbook_path
def
get_dashboard_url
(
vms
):
dashboard_tasks_path
=
get_templates_directory_path
(
'dashboard.yaml'
)
with
open
(
dashboard_tasks_path
,
'r'
)
as
stream
:
tasks
=
yaml
.
load_all
(
stream
)
for
task
in
tasks
:
if
task
[
'kind'
]
==
'Service'
and
'name'
in
task
[
'metadata'
]
and
task
[
'metadata'
][
'name'
]
and
task
[
'metadata'
][
'name'
]
==
'kubernetes-dashboard'
:
dashboard_port
=
task
[
'port'
][
'ports'
][
0
][
'nodePort'
]
for
vm_name
in
vms
:
attributes
=
vms
[
vm_name
][
'attributes'
]
role
=
attributes
[
'role'
]
if
role
==
'master'
:
k8_master
=
attributes
[
'public_ip'
]
url
=
'https://'
+
k8_master
+
':'
+
str
(
dashboard_port
)
return
url
deployer/service/tosca.py
View file @
b330368e
...
...
@@ -3,8 +3,8 @@ import logging
logger
=
logging
.
getLogger
(
__name__
)
def
get_interfaces
(
tosca_template_
json
):
node_templates
=
tosca_template_
json
[
'topology_template'
][
'node_templates'
]
def
get_interfaces
(
tosca_template_
dict
):
node_templates
=
tosca_template_
dict
[
'topology_template'
][
'node_templates'
]
for
node_name
in
node_templates
:
if
node_templates
[
node_name
][
'type'
]
==
'tosca.nodes.ARTICONF.Orchestrator.Kubernetes'
:
logger
.
info
(
"Returning interfaces from tosca_template: "
+
str
(
node_templates
[
node_name
][
'interfaces'
]))
...
...
@@ -19,3 +19,29 @@ def get_vms(tosca_template_json):
vms
[
node_name
]
=
node_templates
[
node_name
]
logger
.
info
(
"Returning VMs from tosca_template: "
+
str
(
vms
))
return
vms
def
add_tokens
(
tokens
,
tosca_template_dict
):
node_templates
=
tosca_template_dict
[
'topology_template'
][
'node_templates'
]
for
node_name
in
node_templates
:
if
node_templates
[
node_name
][
'type'
]
==
'tosca.nodes.ARTICONF.Orchestrator.Kubernetes'
:
creds
=
[]
for
token_name
in
tokens
:
cred
=
{
'token_type'
:
'k8s_token'
,
'token'
:
tokens
[
token_name
],
'user'
:
token_name
}
creds
.
append
(
cred
)
if
'attributes'
not
in
node_templates
[
node_name
]:
node_templates
[
node_name
][
'attributes'
]
=
{}
attributes
=
node_templates
[
node_name
][
'attributes'
]
attributes
[
'tokens'
]
=
creds
return
tosca_template_dict
def
add_dashboard_url
(
dashboard_url
,
tosca_template_dict
):
node_templates
=
tosca_template_dict
[
'topology_template'
][
'node_templates'
]
for
node_name
in
node_templates
:
if
node_templates
[
node_name
][
'type'
]
==
'tosca.nodes.ARTICONF.Orchestrator.Kubernetes'
:
if
'attributes'
not
in
node_templates
[
node_name
]:
node_templates
[
node_name
][
'attributes'
]
=
{}
attributes
=
node_templates
[
node_name
][
'attributes'
]
attributes
[
'dashboard_url'
]
=
dashboard_url
return
tosca_template_dict
deployer/test/test_deployer.py
View file @
b330368e
...
...
@@ -12,6 +12,17 @@ from service import k8s_service, tosca, ansible_service
class
TestDeployer
(
unittest
.
TestCase
):
def
test_parse_token
(
self
):
tosca_path
=
"../../ansible_playbooks/"
example_ansible_output_file_path
=
tosca_path
+
'/example_ansible_output.out'
if
not
os
.
path
.
exists
(
example_ansible_output_file_path
):
tosca_path
=
"../ansible_playbooks/"
example_ansible_output_file_path
=
tosca_path
+
'/example_ansible_output.out'
with
open
(
example_ansible_output_file_path
,
'r'
)
as
file
:
out
=
file
.
read
()
token
=
ansible_service
.
parse_dashboard_tokens
(
out
)
def
test
(
self
):
logger
=
logging
.
getLogger
(
__name__
)
tosca_path
=
"../../TOSCA/"
...
...
manager/src/main/java/nl/uva/sne/drip/service/DRIPService.java
View file @
b330368e
...
...
@@ -62,9 +62,9 @@ public class DRIPService {
caller
.
setRequestQeueName
(
requestQeueName
);
Message
plannerResponse
=
caller
.
call
(
message
);
ToscaTemplate
plann
edToscaTemplate
=
plannerResponse
.
getToscaTemplate
();
ToscaTemplate
updat
edToscaTemplate
=
plannerResponse
.
getToscaTemplate
();
caller
.
close
();
return
toscaTemplateService
.
save
(
plann
edToscaTemplate
);
return
toscaTemplateService
.
save
(
updat
edToscaTemplate
);
}
...
...
planner/__main__.py
View file @
b330368e
...
...
@@ -106,7 +106,6 @@ def handle_delivery(message):
response
=
{
'toscaTemplate'
:
template_dict
}
output_current_milli_time
=
int
(
round
(
time
.
time
()
*
1000
))
response
[
"creationDate"
]
=
output_current_milli_time
response
[
"parameters"
]
=
[]
if
queue_name
==
"planner_queue"
:
logger
.
info
(
"Planning"
)
logger
.
info
(
"Returning plan"
)
...
...
provisioner/src/main/java/nl/uva/sne/drip/provisioner/CloudStormService.java
View file @
b330368e
...
...
@@ -241,7 +241,7 @@ class CloudStormService {
CloudCredentialDB
cloudStormCredentials
=
new
CloudCredentialDB
();
cloudStormCredentials
.
setCloudCreds
(
cloudStormCredentialList
);
objectMapper
.
writeValue
(
new
File
(
credentialsTempInputDirPath
+
File
.
separator
+
"cred.yml"
),
cloudStormCredentials
);
Logger
.
getLogger
(
CloudStormService
.
class
.
getName
()).
log
(
Level
.
INFO
,
"Wrote cloudStorm credentials at :
"
+
credentialsTempInputDirPath
+
File
.
separator
+
"cred.yml"
);
Logger
.
getLogger
(
CloudStormService
.
class
.
getName
()).
log
(
Level
.
INFO
,
"Wrote cloudStorm credentials at :
{0}{1}cred.yml"
,
new
Object
[]{
credentialsTempInputDirPath
,
File
.
separator
}
);
}
private
CredentialInfo
getCloudStormCredentialInfo
(
Credential
toscaCredentials
,
String
tmpPath
)
throws
FileNotFoundException
,
IOException
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment