root / ci / utils.py @ 064d2f1b
History | View | Annotate | Download (26.5 kB)
1 |
#!/usr/bin/env python
|
---|---|
2 |
|
3 |
"""
|
4 |
Synnefo ci utils module
|
5 |
"""
|
6 |
|
7 |
import os |
8 |
import re |
9 |
import sys |
10 |
import time |
11 |
import logging |
12 |
import fabric.api as fabric |
13 |
import subprocess |
14 |
import tempfile |
15 |
from ConfigParser import ConfigParser, DuplicateSectionError |
16 |
|
17 |
from kamaki.cli import config as kamaki_config |
18 |
from kamaki.clients.astakos import AstakosClient |
19 |
from kamaki.clients.cyclades import CycladesClient |
20 |
from kamaki.clients.image import ImageClient |
21 |
from kamaki.clients.compute import ComputeClient |
22 |
|
23 |
DEFAULT_CONFIG_FILE = "new_config"
|
24 |
# UUID of owner of system images
|
25 |
DEFAULT_SYSTEM_IMAGES_UUID = [ |
26 |
"25ecced9-bf53-4145-91ee-cf47377e9fb2", # production (okeanos.grnet.gr) |
27 |
"04cbe33f-29b7-4ef1-94fb-015929e5fc06", # testing (okeanos.io) |
28 |
] |
29 |
|
30 |
|
31 |
def _run(cmd, verbose): |
32 |
"""Run fabric with verbose level"""
|
33 |
if verbose:
|
34 |
args = ('running',)
|
35 |
else:
|
36 |
args = ('running', 'stdout',) |
37 |
with fabric.hide(*args): # Used * or ** magic. pylint: disable-msg=W0142 |
38 |
return fabric.run(cmd)
|
39 |
|
40 |
|
41 |
def _put(local, remote): |
42 |
"""Run fabric put command without output"""
|
43 |
with fabric.quiet():
|
44 |
fabric.put(local, remote) |
45 |
|
46 |
|
47 |
def _red(msg): |
48 |
"""Red color"""
|
49 |
#return "\x1b[31m" + str(msg) + "\x1b[0m"
|
50 |
return str(msg) |
51 |
|
52 |
|
53 |
def _yellow(msg): |
54 |
"""Yellow color"""
|
55 |
#return "\x1b[33m" + str(msg) + "\x1b[0m"
|
56 |
return str(msg) |
57 |
|
58 |
|
59 |
def _green(msg): |
60 |
"""Green color"""
|
61 |
#return "\x1b[32m" + str(msg) + "\x1b[0m"
|
62 |
return str(msg) |
63 |
|
64 |
|
65 |
def _check_fabric(fun): |
66 |
"""Check if fabric env has been set"""
|
67 |
def wrapper(self, *args, **kwargs): |
68 |
"""wrapper function"""
|
69 |
if not self.fabric_installed: |
70 |
self.setup_fabric()
|
71 |
return fun(self, *args, **kwargs) |
72 |
return wrapper
|
73 |
|
74 |
|
75 |
def _check_kamaki(fun): |
76 |
"""Check if kamaki has been initialized"""
|
77 |
def wrapper(self, *args, **kwargs): |
78 |
"""wrapper function"""
|
79 |
if not self.kamaki_installed: |
80 |
self.setup_kamaki()
|
81 |
return fun(self, *args, **kwargs) |
82 |
return wrapper
|
83 |
|
84 |
|
85 |
class _MyFormatter(logging.Formatter): |
86 |
"""Logging Formatter"""
|
87 |
def format(self, record): |
88 |
format_orig = self._fmt
|
89 |
if record.levelno == logging.DEBUG:
|
90 |
self._fmt = " %(msg)s" |
91 |
elif record.levelno == logging.INFO:
|
92 |
self._fmt = "%(msg)s" |
93 |
elif record.levelno == logging.WARNING:
|
94 |
self._fmt = _yellow("[W] %(msg)s") |
95 |
elif record.levelno == logging.ERROR:
|
96 |
self._fmt = _red("[E] %(msg)s") |
97 |
result = logging.Formatter.format(self, record)
|
98 |
self._fmt = format_orig
|
99 |
return result
|
100 |
|
101 |
|
102 |
# Too few public methods. pylint: disable-msg=R0903
|
103 |
class _InfoFilter(logging.Filter): |
104 |
"""Logging Filter that allows DEBUG and INFO messages only"""
|
105 |
def filter(self, rec): |
106 |
"""The filter"""
|
107 |
return rec.levelno in (logging.DEBUG, logging.INFO) |
108 |
|
109 |
|
110 |
# Too many instance attributes. pylint: disable-msg=R0902
|
111 |
class SynnefoCI(object): |
112 |
"""SynnefoCI python class"""
|
113 |
|
114 |
def __init__(self, config_file=None, build_id=None, cloud=None): |
115 |
""" Initialize SynnefoCI python class
|
116 |
|
117 |
Setup logger, local_dir, config and kamaki
|
118 |
"""
|
119 |
# Setup logger
|
120 |
self.logger = logging.getLogger('synnefo-ci') |
121 |
self.logger.setLevel(logging.DEBUG)
|
122 |
|
123 |
handler1 = logging.StreamHandler(sys.stdout) |
124 |
handler1.setLevel(logging.DEBUG) |
125 |
handler1.addFilter(_InfoFilter()) |
126 |
handler1.setFormatter(_MyFormatter()) |
127 |
handler2 = logging.StreamHandler(sys.stderr) |
128 |
handler2.setLevel(logging.WARNING) |
129 |
handler2.setFormatter(_MyFormatter()) |
130 |
|
131 |
self.logger.addHandler(handler1)
|
132 |
self.logger.addHandler(handler2)
|
133 |
|
134 |
# Get our local dir
|
135 |
self.ci_dir = os.path.dirname(os.path.abspath(__file__))
|
136 |
self.repo_dir = os.path.dirname(self.ci_dir) |
137 |
|
138 |
# Read config file
|
139 |
if config_file is None: |
140 |
config_file = DEFAULT_CONFIG_FILE |
141 |
if not os.path.isabs(config_file): |
142 |
config_file = os.path.join(self.ci_dir, config_file)
|
143 |
self.config = ConfigParser()
|
144 |
self.config.optionxform = str |
145 |
self.config.read(config_file)
|
146 |
|
147 |
# Read temporary_config file
|
148 |
temp_config = self.config.get('Global', 'temporary_config') |
149 |
self.temp_config = ConfigParser()
|
150 |
self.temp_config.optionxform = str |
151 |
self.temp_config.read(temp_config)
|
152 |
self.build_id = build_id
|
153 |
self.logger.info("Will use \"%s\" as build id" % _green(self.build_id)) |
154 |
|
155 |
# Set kamaki cloud
|
156 |
if cloud is not None: |
157 |
self.kamaki_cloud = cloud
|
158 |
elif self.config.has_option("Deployment", "kamaki_cloud"): |
159 |
kamaki_cloud = self.config.get("Deployment", "kamaki_cloud") |
160 |
if kamaki_cloud == "": |
161 |
self.kamaki_cloud = None |
162 |
else:
|
163 |
self.kamaki_cloud = None |
164 |
|
165 |
# Initialize variables
|
166 |
self.fabric_installed = False |
167 |
self.kamaki_installed = False |
168 |
self.cyclades_client = None |
169 |
self.compute_client = None |
170 |
self.image_client = None |
171 |
|
172 |
def setup_kamaki(self): |
173 |
"""Initialize kamaki
|
174 |
|
175 |
Setup cyclades_client, image_client and compute_client
|
176 |
"""
|
177 |
|
178 |
config = kamaki_config.Config() |
179 |
if self.kamaki_cloud is None: |
180 |
self.kamaki_cloud = config.get_global("default_cloud") |
181 |
|
182 |
self.logger.info("Setup kamaki client, using cloud '%s'.." % |
183 |
self.kamaki_cloud)
|
184 |
auth_url = config.get_cloud(self.kamaki_cloud, "url") |
185 |
self.logger.debug("Authentication URL is %s" % _green(auth_url)) |
186 |
token = config.get_cloud(self.kamaki_cloud, "token") |
187 |
#self.logger.debug("Token is %s" % _green(token))
|
188 |
|
189 |
astakos_client = AstakosClient(auth_url, token) |
190 |
|
191 |
cyclades_url = \ |
192 |
astakos_client.get_service_endpoints('compute')['publicURL'] |
193 |
self.logger.debug("Cyclades API url is %s" % _green(cyclades_url)) |
194 |
self.cyclades_client = CycladesClient(cyclades_url, token)
|
195 |
self.cyclades_client.CONNECTION_RETRY_LIMIT = 2 |
196 |
|
197 |
image_url = \ |
198 |
astakos_client.get_service_endpoints('image')['publicURL'] |
199 |
self.logger.debug("Images API url is %s" % _green(image_url)) |
200 |
self.image_client = ImageClient(cyclades_url, token)
|
201 |
self.image_client.CONNECTION_RETRY_LIMIT = 2 |
202 |
|
203 |
compute_url = \ |
204 |
astakos_client.get_service_endpoints('compute')['publicURL'] |
205 |
self.logger.debug("Compute API url is %s" % _green(compute_url)) |
206 |
self.compute_client = ComputeClient(compute_url, token)
|
207 |
self.compute_client.CONNECTION_RETRY_LIMIT = 2 |
208 |
|
209 |
def _wait_transition(self, server_id, current_status, new_status): |
210 |
"""Wait for server to go from current_status to new_status"""
|
211 |
self.logger.debug("Waiting for server to become %s" % new_status) |
212 |
timeout = self.config.getint('Global', 'build_timeout') |
213 |
sleep_time = 5
|
214 |
while True: |
215 |
server = self.cyclades_client.get_server_details(server_id)
|
216 |
if server['status'] == new_status: |
217 |
return server
|
218 |
elif timeout < 0: |
219 |
self.logger.error(
|
220 |
"Waiting for server to become %s timed out" % new_status)
|
221 |
self.destroy_server(False) |
222 |
sys.exit(-1)
|
223 |
elif server['status'] == current_status: |
224 |
# Sleep for #n secs and continue
|
225 |
timeout = timeout - sleep_time |
226 |
time.sleep(sleep_time) |
227 |
else:
|
228 |
self.logger.error(
|
229 |
"Server failed with status %s" % server['status']) |
230 |
self.destroy_server(False) |
231 |
sys.exit(-1)
|
232 |
|
233 |
@_check_kamaki
|
234 |
def destroy_server(self, wait=True): |
235 |
"""Destroy slave server"""
|
236 |
server_id = int(self.read_temp_config('server_id')) |
237 |
self.logger.info("Destoying server with id %s " % server_id) |
238 |
self.cyclades_client.delete_server(server_id)
|
239 |
if wait:
|
240 |
self._wait_transition(server_id, "ACTIVE", "DELETED") |
241 |
|
242 |
@_check_kamaki
|
243 |
def create_server(self, image_id=None, flavor_name=None, ssh_keys=None): |
244 |
"""Create slave server"""
|
245 |
self.logger.info("Create a new server..") |
246 |
|
247 |
# Find a build_id to use
|
248 |
if self.build_id is None: |
249 |
# If build_id is given use this, else ..
|
250 |
# Find a uniq build_id to use
|
251 |
ids = self.temp_config.sections()
|
252 |
if ids:
|
253 |
max_id = int(max(self.temp_config.sections(), key=int)) |
254 |
self.build_id = max_id + 1 |
255 |
else:
|
256 |
self.build_id = 1 |
257 |
self.logger.debug("New build id \"%s\" was created" |
258 |
% _green(self.build_id))
|
259 |
|
260 |
# Find an image to use
|
261 |
if image_id is None: |
262 |
image = self._find_image()
|
263 |
self.logger.debug("Will use image \"%s\"" % _green(image['name'])) |
264 |
image_id = image["id"]
|
265 |
self.logger.debug("Image has id %s" % _green(image_id)) |
266 |
# Find a flavor to use
|
267 |
flavor_id = self._find_flavor(flavor_name)
|
268 |
server = self.cyclades_client.create_server(
|
269 |
self.config.get('Deployment', 'server_name'), |
270 |
flavor_id, |
271 |
image_id) |
272 |
server_id = server['id']
|
273 |
self.write_temp_config('server_id', server_id) |
274 |
self.logger.debug("Server got id %s" % _green(server_id)) |
275 |
server_user = server['metadata']['users'] |
276 |
self.write_temp_config('server_user', server_user) |
277 |
self.logger.debug("Server's admin user is %s" % _green(server_user)) |
278 |
server_passwd = server['adminPass']
|
279 |
self.write_temp_config('server_passwd', server_passwd) |
280 |
|
281 |
server = self._wait_transition(server_id, "BUILD", "ACTIVE") |
282 |
self._get_server_ip_and_port(server)
|
283 |
self._copy_ssh_keys(ssh_keys)
|
284 |
|
285 |
self.setup_fabric()
|
286 |
self.logger.info("Setup firewall") |
287 |
accept_ssh_from = self.config.get('Global', 'accept_ssh_from') |
288 |
if accept_ssh_from != "": |
289 |
self.logger.debug("Block ssh except from %s" % accept_ssh_from) |
290 |
cmd = """
|
291 |
local_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | \
|
292 |
cut -d':' -f2 | cut -d' ' -f1)
|
293 |
iptables -A INPUT -s localhost -j ACCEPT
|
294 |
iptables -A INPUT -s $local_ip -j ACCEPT
|
295 |
iptables -A INPUT -s {0} -p tcp --dport 22 -j ACCEPT
|
296 |
iptables -A INPUT -p tcp --dport 22 -j DROP
|
297 |
""".format(accept_ssh_from)
|
298 |
_run(cmd, False)
|
299 |
|
300 |
def _find_flavor(self, flavor_name): |
301 |
"""Given a flavor_name (reg expression) find a flavor id to use"""
|
302 |
# Get a list of flavor names from config file
|
303 |
flavor_names = self.config.get('Deployment', 'flavor_name').split(",") |
304 |
if flavor_name is not None: |
305 |
# If we have a flavor_name to use, add it to our list
|
306 |
flavor_names.insert(0, flavor_name)
|
307 |
|
308 |
flavors = self.compute_client.list_flavors()
|
309 |
for flname in flavor_names: |
310 |
sflname = flname.strip() |
311 |
self.logger.debug("Try to find a flavor with name \"%s\"" % sflname) |
312 |
fls = [f for f in flavors |
313 |
if re.search(sflname, f['name']) is not None] |
314 |
if fls:
|
315 |
self.logger.debug("Will use %s with id %s" |
316 |
% (fls[0]['name'], fls[0]['id'])) |
317 |
return fls[0]['id'] |
318 |
|
319 |
self.logger.error("No matching flavor found.. aborting") |
320 |
sys.exit(1)
|
321 |
|
322 |
def _find_image(self): |
323 |
"""Find a suitable image to use
|
324 |
|
325 |
It has to belong to one of the `DEFAULT_SYSTEM_IMAGES_UUID'
|
326 |
users and contain the word given by `image_name' option.
|
327 |
"""
|
328 |
image_name = self.config.get('Deployment', 'image_name').lower() |
329 |
images = self.image_client.list_public(detail=True)['images'] |
330 |
# Select images by `system_uuid' user
|
331 |
images = [x for x in images |
332 |
if x['user_id'] in DEFAULT_SYSTEM_IMAGES_UUID] |
333 |
# Select images with `image_name' in their names
|
334 |
images = [x for x in images |
335 |
if x['name'].lower().find(image_name) != -1] |
336 |
# Let's select the first one
|
337 |
return images[0] |
338 |
|
339 |
def _get_server_ip_and_port(self, server): |
340 |
"""Compute server's IPv4 and ssh port number"""
|
341 |
self.logger.info("Get server connection details..") |
342 |
server_ip = server['attachments'][0]['ipv4'] |
343 |
if ".okeanos.io" in self.cyclades_client.base_url: |
344 |
tmp1 = int(server_ip.split(".")[2]) |
345 |
tmp2 = int(server_ip.split(".")[3]) |
346 |
server_ip = "gate.okeanos.io"
|
347 |
server_port = 10000 + tmp1 * 256 + tmp2 |
348 |
else:
|
349 |
server_port = 22
|
350 |
self.write_temp_config('server_ip', server_ip) |
351 |
self.logger.debug("Server's IPv4 is %s" % _green(server_ip)) |
352 |
self.write_temp_config('server_port', server_port) |
353 |
self.logger.debug("Server's ssh port is %s" % _green(server_port)) |
354 |
self.logger.debug("Access server using \"ssh -X -p %s %s@%s\"" % |
355 |
(server_port, server['metadata']['users'], server_ip)) |
356 |
|
357 |
@_check_fabric
|
358 |
def _copy_ssh_keys(self, ssh_keys): |
359 |
"""Upload/Install ssh keys to server"""
|
360 |
self.logger.debug("Check for authentication keys to use") |
361 |
if ssh_keys is None: |
362 |
ssh_keys = self.config.get("Deployment", "ssh_keys") |
363 |
|
364 |
if ssh_keys != "": |
365 |
self.logger.debug("Will use %s authentication keys file" % ssh_keys) |
366 |
keyfile = '/tmp/%s.pub' % fabric.env.user
|
367 |
_run('mkdir -p ~/.ssh && chmod 700 ~/.ssh', False) |
368 |
if ssh_keys.startswith("http://") or \ |
369 |
ssh_keys.startswith("https://") or \ |
370 |
ssh_keys.startswith("ftp://"):
|
371 |
cmd = """
|
372 |
apt-get update
|
373 |
apt-get install wget --yes
|
374 |
wget {0} -O {1} --no-check-certificate
|
375 |
""".format(ssh_keys, keyfile)
|
376 |
_run(cmd, False)
|
377 |
elif os.path.exists(ssh_keys):
|
378 |
_put(ssh_keys, keyfile) |
379 |
else:
|
380 |
self.logger.debug("No ssh keys found") |
381 |
_run('cat %s >> ~/.ssh/authorized_keys' % keyfile, False) |
382 |
_run('rm %s' % keyfile, False) |
383 |
self.logger.debug("Uploaded ssh authorized keys") |
384 |
else:
|
385 |
self.logger.debug("No ssh keys found") |
386 |
|
387 |
def write_temp_config(self, option, value): |
388 |
"""Write changes back to config file"""
|
389 |
# If build_id section doesn't exist create a new one
|
390 |
try:
|
391 |
self.temp_config.add_section(str(self.build_id)) |
392 |
creation_time = time.strftime("%a, %d %b %Y %X", time.localtime())
|
393 |
self.write_temp_config("created", creation_time) |
394 |
except DuplicateSectionError:
|
395 |
pass
|
396 |
self.temp_config.set(str(self.build_id), option, str(value)) |
397 |
curr_time = time.strftime("%a, %d %b %Y %X", time.localtime())
|
398 |
self.temp_config.set(str(self.build_id), "modified", curr_time) |
399 |
temp_conf_file = self.config.get('Global', 'temporary_config') |
400 |
with open(temp_conf_file, 'wb') as tcf: |
401 |
self.temp_config.write(tcf)
|
402 |
|
403 |
def read_temp_config(self, option): |
404 |
"""Read from temporary_config file"""
|
405 |
# If build_id is None use the latest one
|
406 |
if self.build_id is None: |
407 |
ids = self.temp_config.sections()
|
408 |
if ids:
|
409 |
self.build_id = int(ids[-1]) |
410 |
else:
|
411 |
self.logger.error("No sections in temporary config file") |
412 |
sys.exit(1)
|
413 |
self.logger.debug("Will use \"%s\" as build id" |
414 |
% _green(self.build_id))
|
415 |
# Read specified option
|
416 |
return self.temp_config.get(str(self.build_id), option) |
417 |
|
418 |
def setup_fabric(self): |
419 |
"""Setup fabric environment"""
|
420 |
self.logger.info("Setup fabric parameters..") |
421 |
fabric.env.user = self.read_temp_config('server_user') |
422 |
fabric.env.host_string = self.read_temp_config('server_ip') |
423 |
fabric.env.port = int(self.read_temp_config('server_port')) |
424 |
fabric.env.password = self.read_temp_config('server_passwd') |
425 |
fabric.env.connection_attempts = 10
|
426 |
fabric.env.shell = "/bin/bash -c"
|
427 |
fabric.env.disable_known_hosts = True
|
428 |
fabric.env.output_prefix = None
|
429 |
|
430 |
def _check_hash_sum(self, localfile, remotefile): |
431 |
"""Check hash sums of two files"""
|
432 |
self.logger.debug("Check hash sum for local file %s" % localfile) |
433 |
hash1 = os.popen("sha256sum %s" % localfile).read().split(' ')[0] |
434 |
self.logger.debug("Local file has sha256 hash %s" % hash1) |
435 |
self.logger.debug("Check hash sum for remote file %s" % remotefile) |
436 |
hash2 = _run("sha256sum %s" % remotefile, False) |
437 |
hash2 = hash2.split(' ')[0] |
438 |
self.logger.debug("Remote file has sha256 hash %s" % hash2) |
439 |
if hash1 != hash2:
|
440 |
self.logger.error("Hashes differ.. aborting") |
441 |
sys.exit(-1)
|
442 |
|
443 |
@_check_fabric
|
444 |
def clone_repo(self, local_repo=False): |
445 |
"""Clone Synnefo repo from slave server"""
|
446 |
self.logger.info("Configure repositories on remote server..") |
447 |
self.logger.debug("Setup apt. Install curl, git and firefox") |
448 |
cmd = """
|
449 |
echo 'APT::Install-Suggests "false";' >> /etc/apt/apt.conf
|
450 |
apt-get update
|
451 |
apt-get install curl git iceweasel --yes
|
452 |
echo -e "\n\ndeb {0}" >> /etc/apt/sources.list
|
453 |
curl https://dev.grnet.gr/files/apt-grnetdev.pub | apt-key add -
|
454 |
apt-get update
|
455 |
git config --global user.name {1}
|
456 |
git config --global user.email {2}
|
457 |
""".format(self.config.get('Global', 'apt_repo'), |
458 |
self.config.get('Global', 'git_config_name'), |
459 |
self.config.get('Global', 'git_config_mail')) |
460 |
_run(cmd, False)
|
461 |
|
462 |
# Find synnefo_repo and synnefo_branch to use
|
463 |
synnefo_repo = self.config.get('Global', 'synnefo_repo') |
464 |
synnefo_branch = self.config.get("Global", "synnefo_branch") |
465 |
if synnefo_branch == "": |
466 |
synnefo_branch = \ |
467 |
subprocess.Popen( |
468 |
["git", "rev-parse", "--abbrev-ref", "HEAD"], |
469 |
stdout=subprocess.PIPE).communicate()[0].strip()
|
470 |
if synnefo_branch == "HEAD": |
471 |
synnefo_branch = \ |
472 |
subprocess.Popen( |
473 |
["git", "rev-parse", "--short", "HEAD"], |
474 |
stdout=subprocess.PIPE).communicate()[0].strip()
|
475 |
self.logger.info("Will use branch %s" % synnefo_branch) |
476 |
|
477 |
if local_repo or synnefo_branch == "": |
478 |
# Use local_repo
|
479 |
self.logger.debug("Push local repo to server") |
480 |
# Firstly create the remote repo
|
481 |
_run("git init synnefo", False) |
482 |
# Then push our local repo over ssh
|
483 |
# We have to pass some arguments to ssh command
|
484 |
# namely to disable host checking.
|
485 |
(temp_ssh_file_handle, temp_ssh_file) = tempfile.mkstemp() |
486 |
os.close(temp_ssh_file_handle) |
487 |
cmd = """
|
488 |
echo 'exec ssh -o "StrictHostKeyChecking no" \
|
489 |
-o "UserKnownHostsFile /dev/null" \
|
490 |
-q "$@"' > {4}
|
491 |
chmod u+x {4}
|
492 |
export GIT_SSH="{4}"
|
493 |
echo "{0}" | git push --mirror ssh://{1}@{2}:{3}/~/synnefo
|
494 |
rm -f {4}
|
495 |
""".format(fabric.env.password,
|
496 |
fabric.env.user, |
497 |
fabric.env.host_string, |
498 |
fabric.env.port, |
499 |
temp_ssh_file) |
500 |
os.system(cmd) |
501 |
else:
|
502 |
# Clone Synnefo from remote repo
|
503 |
# Currently clonning synnefo can fail unexpectedly
|
504 |
cloned = False
|
505 |
for i in range(10): |
506 |
self.logger.debug("Clone synnefo from %s" % synnefo_repo) |
507 |
try:
|
508 |
_run("git clone %s synnefo" % synnefo_repo, False) |
509 |
cloned = True
|
510 |
break
|
511 |
except BaseException: |
512 |
self.logger.warning(
|
513 |
"Clonning synnefo failed.. retrying %s" % i)
|
514 |
if not cloned: |
515 |
self.logger.error("Can not clone Synnefo repo.") |
516 |
sys.exit(-1)
|
517 |
|
518 |
# Checkout the desired synnefo_branch
|
519 |
self.logger.debug("Checkout \"%s\" branch/commit" % synnefo_branch) |
520 |
cmd = """
|
521 |
cd synnefo
|
522 |
for branch in `git branch -a | grep remotes | \
|
523 |
grep -v HEAD | grep -v master`; do
|
524 |
git branch --track ${branch##*/} $branch
|
525 |
done
|
526 |
git checkout %s
|
527 |
""" % (synnefo_branch)
|
528 |
_run(cmd, False)
|
529 |
|
530 |
@_check_fabric
|
531 |
def build_synnefo(self): |
532 |
"""Build Synnefo packages"""
|
533 |
self.logger.info("Build Synnefo packages..") |
534 |
self.logger.debug("Install development packages") |
535 |
cmd = """
|
536 |
apt-get update
|
537 |
apt-get install zlib1g-dev dpkg-dev debhelper git-buildpackage \
|
538 |
python-dev python-all python-pip --yes
|
539 |
pip install devflow
|
540 |
"""
|
541 |
_run(cmd, False)
|
542 |
|
543 |
if self.config.get('Global', 'patch_pydist') == "True": |
544 |
self.logger.debug("Patch pydist.py module") |
545 |
cmd = r"""
|
546 |
sed -r -i 's/(\(\?P<name>\[A-Za-z\]\[A-Za-z0-9_\.)/\1\\\-/' \ |
547 |
/usr/share/python/debpython/pydist.py |
548 |
"""
|
549 |
_run(cmd, False)
|
550 |
|
551 |
# Build synnefo packages
|
552 |
self.logger.debug("Build synnefo packages")
|
553 |
cmd = """
|
554 |
devflow-autopkg snapshot -b ~/synnefo_build-area --no-sign |
555 |
"""
|
556 |
with fabric.cd("synnefo"):
|
557 |
_run(cmd, True)
|
558 |
|
559 |
# Install snf-deploy package
|
560 |
self.logger.debug("Install snf-deploy package")
|
561 |
cmd = """
|
562 |
dpkg -i snf-deploy*.deb |
563 |
apt-get -f install --yes |
564 |
"""
|
565 |
with fabric.cd("synnefo_build-area"):
|
566 |
with fabric.settings(warn_only=True):
|
567 |
_run(cmd, True)
|
568 |
|
569 |
# Setup synnefo packages for snf-deploy
|
570 |
self.logger.debug("Copy synnefo debs to snf-deploy packages dir")
|
571 |
cmd = """
|
572 |
cp ~/synnefo_build-area/*.deb /var/lib/snf-deploy/packages/ |
573 |
"""
|
574 |
_run(cmd, False)
|
575 |
|
576 |
@_check_fabric
|
577 |
def build_documentation(self):
|
578 |
"""Build Synnefo documentation""" |
579 |
self.logger.info("Build Synnefo documentation..")
|
580 |
_run("pip install -U Sphinx", False)
|
581 |
with fabric.cd("synnefo"):
|
582 |
_run("devflow-update-version; "
|
583 |
"./ci/make_docs.sh synnefo_documentation", False)
|
584 |
|
585 |
def fetch_documentation(self, dest=None):
|
586 |
"""Fetch Synnefo documentation""" |
587 |
self.logger.info("Fetch Synnefo documentation..")
|
588 |
if dest is None:
|
589 |
dest = "synnefo_documentation"
|
590 |
dest = os.path.abspath(dest)
|
591 |
if not os.path.exists(dest):
|
592 |
os.makedirs(dest)
|
593 |
self.fetch_compressed("synnefo/synnefo_documentation", dest)
|
594 |
self.logger.info("Downloaded documentation to %s" %
|
595 |
_green(dest))
|
596 |
|
597 |
@_check_fabric
|
598 |
def deploy_synnefo(self, schema=None):
|
599 |
"""Deploy Synnefo using snf-deploy""" |
600 |
self.logger.info("Deploy Synnefo..")
|
601 |
if schema is None:
|
602 |
schema = self.config.get('Global', 'schema')
|
603 |
self.logger.debug("Will use %s schema" % schema)
|
604 |
|
605 |
schema_dir = os.path.join(self.ci_dir, "schemas/%s" % schema)
|
606 |
if not (os.path.exists(schema_dir) and os.path.isdir(schema_dir)):
|
607 |
raise ValueError("Unknown schema: %s" % schema)
|
608 |
|
609 |
self.logger.debug("Upload schema files to server")
|
610 |
_put(os.path.join(schema_dir, "*"), "/etc/snf-deploy/")
|
611 |
|
612 |
self.logger.debug("Change password in nodes.conf file")
|
613 |
cmd = """
|
614 |
sed -i 's/^password =.*/password = {0}/' /etc/snf-deploy/nodes.conf
|
615 |
""".format(fabric.env.password)
|
616 |
_run(cmd, False)
|
617 |
|
618 |
self.logger.debug("Run snf-deploy")
|
619 |
cmd = """
|
620 |
snf-deploy --disable-colors --autoconf all
|
621 |
"""
|
622 |
_run(cmd, True)
|
623 |
|
624 |
@_check_fabric
|
625 |
def unit_test(self):
|
626 |
"""Run Synnefo unit test suite""" |
627 |
self.logger.info("Run Synnefo unit test suite")
|
628 |
component = self.config.get('Unit Tests', 'component')
|
629 |
|
630 |
self.logger.debug("Install needed packages")
|
631 |
cmd = """
|
632 |
pip install mock |
633 |
pip install factory_boy |
634 |
"""
|
635 |
_run(cmd, False)
|
636 |
|
637 |
self.logger.debug("Upload tests.sh file")
|
638 |
unit_tests_file = os.path.join(self.ci_dir, "tests.sh")
|
639 |
_put(unit_tests_file, ".")
|
640 |
|
641 |
self.logger.debug("Run unit tests")
|
642 |
cmd = """
|
643 |
bash tests.sh {0}
|
644 |
""".format(component)
|
645 |
_run(cmd, True)
|
646 |
|
647 |
@_check_fabric
|
648 |
def run_burnin(self):
|
649 |
"""Run burnin functional test suite""" |
650 |
self.logger.info("Run Burnin functional test suite")
|
651 |
cmd = """
|
652 |
auth_url=$(grep -e '^url =' .kamakirc | cut -d' ' -f3) |
653 |
token=$(grep -e '^token =' .kamakirc | cut -d' ' -f3) |
654 |
images_user=$(kamaki image list -l | grep owner | \ |
655 |
cut -d':' -f2 | tr -d ' ') |
656 |
snf-burnin --auth-url=$auth_url --token=$token \ |
657 |
--force-flavor=2 --image-id=all \ |
658 |
--system-images-user=$images_user \
|
659 |
{0}
|
660 |
log_folder=$(ls -1d /var/log/burnin/* | tail -n1) |
661 |
for i in $(ls $log_folder/*/details*); do |
662 |
echo -e "\\n\\n"
|
663 |
echo -e "***** $i\\n"
|
664 |
cat $i
|
665 |
done |
666 |
""".format(self.config.get('Burnin', 'cmd_options'))
|
667 |
_run(cmd, True)
|
668 |
|
669 |
@_check_fabric
|
670 |
def fetch_compressed(self, src, dest=None):
|
671 |
"""Create a tarball and fetch it locally""" |
672 |
self.logger.debug("Creating tarball of %s" % src)
|
673 |
basename = os.path.basename(src)
|
674 |
tar_file = basename + ".tgz"
|
675 |
cmd = "tar czf %s %s" % (tar_file, src)
|
676 |
_run(cmd, False)
|
677 |
if not os.path.exists(dest):
|
678 |
os.makedirs(dest)
|
679 |
|
680 |
tmp_dir = tempfile.mkdtemp()
|
681 |
fabric.get(tar_file, tmp_dir)
|
682 |
|
683 |
dest_file = os.path.join(tmp_dir, tar_file)
|
684 |
self._check_hash_sum(dest_file, tar_file)
|
685 |
self.logger.debug("Untar packages file %s" % dest_file)
|
686 |
cmd = """
|
687 |
cd %s |
688 |
tar xzf %s |
689 |
cp -r %s/* %s |
690 |
rm -r %s |
691 |
""" % (tmp_dir, tar_file, src, dest, tmp_dir)
|
692 |
os.system(cmd)
|
693 |
self.logger.info("Downloaded %s to %s" %
|
694 |
(src, _green(dest)))
|
695 |
|
696 |
@_check_fabric
|
697 |
def fetch_packages(self, dest=None):
|
698 |
"""Fetch Synnefo packages""" |
699 |
if dest is None:
|
700 |
dest = self.config.get('Global', 'pkgs_dir')
|
701 |
dest = os.path.abspath(dest)
|
702 |
if not os.path.exists(dest):
|
703 |
os.makedirs(dest)
|
704 |
self.fetch_compressed("synnefo_build-area", dest)
|
705 |
self.logger.info("Downloaded debian packages to %s" %
|
706 |
_green(dest))
|
707 |
|