Skip to content
Snippets Groups Projects
Commit 3437b725 authored by sguazt's avatar sguazt
Browse files

Forgot to add OpenStack module to the repo

parent de0cef40
No related branches found
No related tags found
No related merge requests found
__author__ = "Davide Monfrecola, Stefano Garione, Giorgio Gambino, Luca Banzato"
__copyright__ = "Copyright (C) 2019"
__credits__ = ["Andrea Lombardo", "Irene Lovotti"]
__license__ = "GPL v3"
__version__ = "0.10.0"
__maintainer__ = "Luca Banzato"
__email__ = "20005492@studenti.uniupo.it"
__status__ = "Prototype"
"""
OpenStack Agent Actions Implementation
"""
__author__ = "Davide Monfrecola, Stefano Garione, Giorgio Gambino, Luca Banzato"
__copyright__ = "Copyright (C) 2019"
__credits__ = ["Andrea Lombardo", "Irene Lovotti"]
__license__ = "GPL v3"
__version__ = "0.10.0"
__maintainer__ = "Luca Banzato"
__email__ = "20005492@studenti.uniupo.it"
__status__ = "Prototype"
import logging
import time
import traceback
class OpenStackAgentActions:
def clone_instance(self, instance_id):
"""
Clone an instance if it hasn't been already cloned
Args:
self (MetaManager): The platform manager object
instance_id (str): The id of the instance to clone
"""
if OpenStackAgentActions.is_clonable(self, instance_id):
logging.debug("Cloning instance " + instance_id + "...")
try:
instance = self.os_client.ex_get_node_details(instance_id)
# Clone only if the instance is running
if self.conf.demo_reservation_id is None or self.conf.demo_reservation_id == "":
logging.error("No reservation id available for cloning " + instance_id + ". Clonation aborted!")
elif instance.state == "running":
instance_clone = self.os_client.create_node(name=instance.name + "-clone-" + str(int(time.time())),
image=self.os_client.get_image(instance.extra["imageId"]),
size=self.os_client.ex_get_size(instance.extra["flavorId"]),
ex_keyname=instance.extra["key_name"],
ex_security_groups=self.os_client.ex_get_node_security_groups(instance),
ex_scheduler_hints={"reservation": self.conf.demo_reservation_id})
if instance_clone is None:
logging.error("An error has occurred while cloning the instance " + instance_id + "!")
else:
self.cloned_instances.append(instance_id)
logging.debug("Instance " + instance_id + " cloned successfully!")
else:
logging.debug("The instance" + instance_id + " is not in a running state (currently: " + instance.state + "). Clonation aborted!")
except Exception as e:
logging.error("An error has occurred while cloning the instance " + instance_id + ": " + str(e))
traceback.print_exc()
else:
logging.debug("The " + instance_id + " has already been cloned!")
def is_clonable(self, instance_id):
"""
Check if the VM corresponding to the provided instance_id
has already been cloned
Args:
self (MetaManager): The platform manager object
instance_id (str): An instance id
Returns:
bool: True if it has already been cloned, False otherwise
"""
if instance_id in self.cloned_instances:
return False
else:
return True
def alarm(self, instance_id):
"""
Trigger an alarm (reported in logs/cloudtui-fts.log)
Args:
self (MetaManager): The platform manager object
instance_id (str): An instance id
"""
logging.debug("Alarm triggered for instance " + instance_id)
"""
OpenStack configuration manager
"""
__author__ = "Davide Monfrecola, Stefano Garione, Giorgio Gambino, Luca Banzato"
__copyright__ = "Copyright (C) 2019"
__credits__ = ["Andrea Lombardo", "Irene Lovotti"]
__license__ = "GPL v3"
__version__ = "0.10.0"
__maintainer__ = "Luca Banzato"
__email__ = "20005492@studenti.uniupo.it"
__status__ = "Prototype"
import logging
from core.metaconfmanager import MetaConfManager
class OpenStackConfManager(MetaConfManager):
def __init__(self):
"""
Init method (object initialization)
"""
super().__init__("openstack_libcloud")
def read_login_data(self):
"""
Read login data from settings.cfg
"""
# URL Regex took from http://www.noah.org/wiki/RegEx_Python#URL_regex_pattern
self.os_auth_url = self.get_parameter("openstack", "os_auth_url",
return_type=str,
regex="http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+")
self.os_username = self.get_parameter("openstack", "os_username", return_type=str)
self.os_password = self.get_parameter("openstack", "os_password", return_type=str)
self.os_project_name = self.get_parameter("openstack", "os_project_name", return_type=str)
self.os_project_id = self.get_parameter("openstack", "os_project_id", return_type=str)
self.os_region = self.get_parameter("openstack", "os_region", return_type=str)
self.os_telemetry_metering = self.get_parameter("openstack", "os_telemetry_metering", return_type=str)
logging.debug("OpenStack login data read")
def read_platform_options(self):
self.demo_reservation_id = self.get_parameter("re_demo", "demo_reservation_id", return_type=str)
"""
Read platform-specific options from settings.cfg
"""
# Nothing to do here
pass
This diff is collapsed.
[manifest]
# Name of the platform
platform_name = OpenStack (Libcloud)
# Package name (name of the directory in which module files are stored)
# Do not edit unless you know what are you doing
package_name = openstack_libcloud
# Name of the manager class of this module
# Do not edit unless you know what are you doing
manager_name = OpenStack
# Library dependencies
dependencies = apache-libcloud:git+https://github.com/subwave07/libcloud.git@trunk,
python-ceilometerclient, gnocchiclient, python-blazarclient, pytz, tzlocal
# Author
author = Luca Banzato
# Email
email = 20005492@studenti.uniupo.it
# Version
module_version = 0.10.0
# Release date
release_date = February 09, 2019
# License
license = GNU v3
"""
OpenStack monitor implementation (using Ceilometer/Gnocchi API)
"""
__author__ = "Davide Monfrecola, Stefano Garione, Giorgio Gambino, Luca Banzato"
__copyright__ = "Copyright (C) 2019"
__credits__ = ["Andrea Lombardo", "Irene Lovotti"]
__license__ = "GPL v3"
__version__ = "0.10.0"
__maintainer__ = "Luca Banzato"
__email__ = "20005492@studenti.uniupo.it"
__status__ = "Prototype"
import datetime
import logging
import pytz
from core.metamonitor import MetaMonitor
#from keystoneauth1 import exceptions as ks_exception
from keystoneauth1 import loading as ks_loading
#from keystoneauth1 import session as ks_session
#from keystoneclient.v3 import client as ks_client_v3
from tzlocal import get_localzone
class OpenStackMonitor(MetaMonitor):
def __init__(self, conf, commands_queue, measurements_queue):
"""
Init method
Args:
conf (MetaConfManager): a configuration manager holding all the settings
for the Monitor
commands_queue (Queue): message queue for communicating with the main
thread and receiving commands regarding the metrics
to observe
measurements_queue (Queue): message queue for sending measurements to
the platform RuleEngine
"""
super().__init__(conf, commands_queue, measurements_queue)
if conf.os_telemetry_metering == 'ceilometerclient':
self.client_ = OpenStackCeilometerClientDriver(conf, message_builder)
else: # == 'gnocchi' [default]
self.client_ = OpenStackGnocchiClientDriver(conf, message_builder)
self._bind_generic_metric_to_getter(name="cpu_load",
function=self.client_.get_cpu_load_measures)
self._bind_generic_metric_to_getter(name="memory_free",
function=self.client_.get_memory_free_measures)
self._bind_generic_metric_to_getter(name="memory_used",
function=self.client_.get_memory_used_measures)
logging.debug("[OPENSTACK CLOUD MONITOR GETTERS]: " + str(self._metrics_getters))
def connect(self):
"""
Connect to Ceilometer and initialize its client object
"""
self.client.connect()
def _get_metric_values(self, instance_id, metric, granularity, limit):
"""
Ceilometer measurements getter. Conversion to generic metric name is performed here through _build_message.
Args:
instance_id (str): The id of the instance to fetch the measurements
metric (str): The *specific* metric name to get the values
granularity (int): The granularity of the measurements fetched, expressed in seconds
limit (int): The maximum number of measurements returned
Returns:
dict: A structure containing all the measurements for a metric
"""
logging.debug("Fetching metric \"" + metric + "\" with granularity=" + str(granularity) + " and limit=" + str(limit))
samples = []
try:
samples = self.client_.get_metric_values(instance_id=instance_id,
metric=metric,
granularit=granularity,
limit=limit)
logging.debug("MEASUREMENTS: " + str(samples))
except Exception as exception:
samples.append(self._error_sample(exception))
logging.error("There was an error while fetching measurements:\n" +
str(type(exception)) + " - " + str(exception) +
". Please note that this is normal if you recently created a new instance.")
return samples
class OpenStackCeilometerClientDriver():
"""
OpenStack python-ceilometerclient driver.
Note:
python-ceilometerclient is no longer support and has been removed from OpenStack since OpenStack's
Queens release.
References:
- https://docs.openstack.org/ceilometer/latest/
- https://docs.openstack.org/python-ceilometerclient/latest/api.html
- https://docs.openstack.org/ceilometer/pike/webapi/v2.html
"""
def __init__(self, conf, message_builder):
"""
Init method
Args:
conf (MetaConfManager): a configuration manager holding all the settings
for the Monitor
message_builder (function): function used for formatting metric values
"""
__import__('ceilometer.client')
self.conf_ = conf
self.message_builder_ = message_builder
self.client_ = None
def connect(self):
"""
Connect to Ceilometer and initialize its client object
"""
loader = ks_loading.get_plugin_loader('password')
auth = loader.load_from_options(auth_url=self.conf_.os_auth_url,
username=self.conf_.os_username,
password=self.conf_.os_password,
project_id=self.conf_.os_project_id,
user_domain_name="default")
#sess = ks_session.Session(auth=_auth, verify=False)
self.client_ = ceilometerclient.client.get_client("2", # v2 API
#session=sess)
auth=auth,
region_name=self.conf_.os_region)
# For the complete metrics list available for each instance, please use the
# res_viewer.py script available in the module main directory, after
# filling all the required authentication parameters
# You can also look here:
# https://docs.openstack.org/ceilometer/latest/admin/telemetry-measurements.html
def get_cpu_load_measures(self, instance_id, granularity, limit):
"""
Returns samples for the CPU load metric.
Args:
instance_id (str): The id of the instance to fetch the measurements
granularity (int): The granularity of the measurements fetched, expressed in seconds
limit (int): The maximum number of measurements returned
Returns:
dict: A structure containing all the measurements regarding CPU Load
"""
(start_time, end_time) = get_time_window(granularity, limit)
return self.get_measures(instance_id=instance_id,
metric="cpu_util",
start_time=start_time,
end_time=end_time,
limit=limit)
def get_memory_free_measures(self, instance_id, granularity, limit):
"""
Returns samples for the memory free metric.
Args:
instance_id (str): The id of the instance to fetch the measurements
granularity (int): The granularity of the measurements fetched, expressed in seconds
limit (int): The maximum number of measurements returned
Returns:
dict: A structure containing all the measurements regarding Memory Free
"""
#Note, there is no 'memory free' metrics in ceilometer:
(start_time, end_time) = get_time_window(granularity, limit)
mem_tots = self.get_measures(instance_id=instance_id,
metric="memory",
start_time=start_time,
end_time=end_time,
limit=limit)
mem_uses = self.get_measures(instance_id=instance_id,
metric="memory.usage",
start_time=start_time,
end_time=end_time,
limit=limit,)
# Trim the longer sample list to the length of the shorter
# FIXME: we should also consider the value of timestamp
# For instance, mem_uses may stop with older timestamps than mem_tots.
if len(mem_tots) > len(mem_uses):
mem_tots = mem_tots[-len(mem_uses):]
elif len(mem_tots) < len(mem_uses):
mem_uses = mem_uses[-len(mem_tots):]
samples = []
for i in range(0, len(mem_tots)):
samples.append(self.message_builder_(timestamp=(mem_tots[i].timestamp+mem_uses[i].timestamp)/2, # take the avg timestamp
value=mem_tots[i].value-mem_uses[i].value,
unit=mem_tots[i].unit))
return samples
def get_memory_used_measures(self, instance_id, granularity, limit):
"""
Returns samples for the memory used metric.
Args:
instance_id (str): The id of the instance to fetch the measurements
granularity (int): The granularity of the measurements fetched, expressed in seconds
limit (int): The maximum number of measurements returned
Returns:
dict: A structure containing all the measurements regarding Memory Used
"""
(start_time, end_time) = get_time_window(granularity, limit)
return self.get_measures(instance_id=instance_id,
metric="memory.usage",
start_time=start_time,
end_time=end_time,
limit=limit)
def get_measures(self, instance_id, metric, start_time, end_time, limit):
"""
Returns samples related to the given metric.
Args:
instance_id (str): The id of the instance to fetch the measurements
metric (str): The *specific* metric name to get the values
start_time (datetime): The starting time of the first sample
end_time (datetime): The ending time of the last sample
limit (int): The maximum number of measurements returned
Returns:
dict: A structure containing all the measurements for a metric
"""
logging.debug("Ceilometer -- Fetching metric \"" + metric + "\" with granularity=" + str(granularity) + " and limit=" + str(limit))
logging.debug("Instance ID: " + str(instance_id))
# logging.debug("FETCHING METRIC " + metric + ": " + str(instance_resources))
query = [
dict(field='resource_id', op='eq', value=instance_id),
dict(field='timestamp', op='ge', value=start_time.strftime("%Y-%m-%dT%H:%M:%S")),
dict(field='timestamp', op='le', value=end_time.strftime("%Y-%m-%dT%H:%M:%S"))
]
random_measurements_values = self.client_.samples.list(meter_name=metric,
q=query,
limit=limit)
# Sort all the data (sometimes data can be unrodered)
random_measurements_values.sort(key=lambda x: x.timestamp)
# Extract the most recent limit-values
measurements_values = random_measurements_values[-limit:]
# Add the measurements to the list that will be returned
samples = []
for value in measurements_values:
samples.append(self.message_builder_(timestamp=value.timestamp,
value=value.volume,
unit=value.unit))
return samples
class OpenStackGnocchiClientDriver():
"""
OpenStack Gnocchi driver.
References:
- https://github.com/gnocchixyz/python-gnocchiclient
- https://gnocchi.osci.io/index.html
"""
def __init__(self, conf, message_builder):
"""
Init method
Args:
conf (MetaConfManager): a configuration manager holding all the settings
for the Monitor
message_builder (function): function used for formatting metric values
"""
__import__('gnocchiclient.client')
self.conf_ = conf
self.message_builder_ = message_builder
self.client_ = None
def connect(self):
"""
Connect to Gnocchi and initialize its client object
"""
loader = ks_loading.get_plugin_loader('password')
auth = loader.load_from_options(auth_url=self.conf_.os_auth_url,
username=self.conf_.os_username,
password=self.conf_.os_password,
project_id=self.conf_.os_project_id,
user_domain_name="default")
self.client_ = gnocchiclient.client.Client("1",
adapter_options={
"region_name": self.conf.os_region},
session_options={"auth": _auth})
# For the complete metrics list available for each instance, please use the
# res_viewer.py script available in the module main directory, after
# filling all the required authentication parameters
# You can also look here:
# https://docs.openstack.org/ceilometer/latest/admin/telemetry-measurements.html
def get_cpu_load_measures(self, instance_id, granularity, limit):
"""
Returns samples for the CPU load metric.
Args:
instance_id (str): The id of the instance to fetch the measurements
granularity (int): The granularity of the measurements fetched, expressed in seconds
limit (int): The maximum number of measurements returned
Returns:
dict: A structure containing all the measurements regarding CPU Load
"""
(start_time, end_time) = get_time_window(granularity, limit)
return self.get_measures(instance_id=instance_id,
metric="load@load",
start_time=start_time,
end_time=end_time,
limit=limit)
def get_memory_free_measures(self, instance_id, granularity, limit):
"""
Returns samples for the memory free metric.
Args:
instance_id (str): The id of the instance to fetch the measurements
granularity (int): The granularity of the measurements fetched, expressed in seconds
limit (int): The maximum number of measurements returned
Returns:
dict: A structure containing all the measurements regarding Memory Free
"""
(start_time, end_time) = get_time_window(granularity, limit)
return self.get_measures(instance_id=instance_id,
metric="memory@memory.free",
start_time=start_time,
end_time=end_time,
limit=limit)
def get_memory_used_measures(self, instance_id, granularity, limit):
"""
Returns samples for the memory used metric.
Args:
instance_id (str): The id of the instance to fetch the measurements
granularity (int): The granularity of the measurements fetched, expressed in seconds
limit (int): The maximum number of measurements returned
Returns:
dict: A structure containing all the measurements regarding Memory Used
"""
(start_time, end_time) = get_time_window(granularity, limit)
return self.get_measures(instance_id=instance_id,
metric="memory@memory.used",
start_time=start_time,
end_time=end_time,
limit=limit)
def get_measures(self, instance_id, metric, start_time, end_time, limit):
"""
Returns samples related to the given metric.
Args:
instance_id (str): The id of the instance to fetch the measurements
metric (str): The *specific* metric name to get the values
start_time (datetime): The starting time of the first sample
end_time (datetime): The ending time of the last sample
limit (int): The maximum number of measurements returned
Returns:
dict: A structure containing all the measurements for a metric
"""
logging.debug("Gnocchi -- Fetching metric \"" + metric + "\" with granularity=" + str(granularity) + " and limit=" + str(limit))
logging.debug("Instance ID: " + str(instance_id))
# logging.debug("FETCHING METRIC " + metric + ": " + str(instance_resources))
samples = []
instance_resources = self.client_.resource.get("generic", instance_id)
logging.debug("Instance ID: " + str(instance_id))
# logging.debug("FETCHING METRIC " + metric + ": " + str(instance_resources))
random_measurements_values = self.client_.metric.get_measures(instance_resources["metrics"][metric],
start=start_time_utc,
end=end_time_utc,
granularity=granularity)
# Sort all the data (sometimes data can be unrodered)
random_measurements_values.sort(key=lambda x: x[0])
# Extract the most recent limit-values
measurements_values = random_measurements_values[-limit:]
metric_unit = self.client_.metric.get(instance_resources["metrics"][metric])["unit"]
# Add the measurements to the list that will be returned
for value in measurements_values:
samples.append(self.message_builder_(timestamp=value[0],
value=value[2],
unit=metric_unit))
return samples
def get_time_window(granularity, limit):
# Define time interval for retrieving metrics
now = datetime.datetime.now()
start_time_local = now - datetime.timedelta(seconds=(granularity * limit) + granularity * 2)
end_time_local = now
local_tz = get_localzone() # Detect current timezone
# Convert local time to UTC
start_time_no_tz = local_tz.localize(
start_time_local, is_dst=None) # No daylight saving time
end_time_no_tz = local_tz.localize(
end_time_local, is_dst=None) # No daylight saving time
# Final times
start_time_utc = start_time_no_tz.astimezone(pytz.utc)
end_time_utc = end_time_no_tz.astimezone(pytz.utc)
return (start_time_utc, end_time_utc)
"""
EasyCloud OpenStack Resources ID Viewer
"""
from gnocchiclient.v1 import client
from keystoneauth1 import loading
__author__ = "Davide Monfrecola, Stefano Garione, Giorgio Gambino, Luca Banzato"
__copyright__ = "Copyright (C) 2019"
__credits__ = ["Andrea Lombardo", "Irene Lovotti"]
__license__ = "GPL v3"
__version__ = "0.10.0"
__maintainer__ = "Luca Banzato"
__email__ = "20005492@studenti.uniupo.it"
__status__ = "Prototype"
############################
# To be filled by the user #
############################
AUTH_URL = ""
USERNAME = ""
PASSWORD = ""
PROJECT_ID = ""
REGION_NAME = ""
############################
print("\nOpenStack Resources ID Viewer\n")
# Authentication
_loader = loading.get_plugin_loader('password')
_auth = _loader.load_from_options(auth_url=AUTH_URL,
username=USERNAME,
password=PASSWORD,
project_id=PROJECT_ID,
user_domain_name="default")
gnocchi_client = client.Client(adapter_options={"region_name": REGION_NAME},
session_options={"auth": _auth})
# Instance ID request
instance_id = str(input("Insert the instance id: "))
try:
# Get resources for a specified instance (can raise an exception)
resources = gnocchi_client.resource.get("generic", instance_id)
# Print metrics if available
if "metrics" in resources:
metrics = resources["metrics"]
i = 0
print("\nMetrics available for instance " + instance_id + ":\n")
for key, value in metrics.items():
print(("ID: %-25s - Name: %-25s" % (value, key)))
print("")
else:
print("No metrics available for instance " + instance_id)
except Exception as e:
print("An error has occourred while fetching metrics details: " + str(e))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment