Skip to content
Snippets Groups Projects
Commit b6b169ef authored by Robotka István Adrián's avatar Robotka István Adrián
Browse files

Quickstart

parent e9895987
No related branches found
No related tags found
No related merge requests found
.*
venv
data
README*
...@@ -20,3 +20,6 @@ indent_size = 2 ...@@ -20,3 +20,6 @@ indent_size = 2
[*.sh] [*.sh]
max_line_length = 80 max_line_length = 80
indent_size = 2 indent_size = 2
[.gitlab-ci.yml]
max_line_length = none
/.idea/ /.idea/
/venv/ /venv/
/data/ data
image: "python:3.7" image: registry.kszk.bme.hu/baseimg/python-tools
stages: stages:
- Static Analysis - Static Analysis
- Docker build - Docker build
variables: variables:
CONTAINER_IMAGE: "registry.kszk.bme.hu/kszk/monitoring/generator:$CI_COMMIT_REF_NAME" CONTAINER_IMAGE: "registry.kszk.bme.hu/kszk/monitoring/pupak:$CI_COMMIT_REF_NAME"
before_script: before_script:
- python --version - python --version
...@@ -30,3 +30,5 @@ docker build: ...@@ -30,3 +30,5 @@ docker build:
script: script:
- echo "{\"auths\":{\"registry.kszk.bme.hu\":{\"username\":\"$CI_REG_USER\",\"password\":\"$CI_REG_PASS\"}}}" > /kaniko/.docker/services.json - echo "{\"auths\":{\"registry.kszk.bme.hu\":{\"username\":\"$CI_REG_USER\",\"password\":\"$CI_REG_PASS\"}}}" > /kaniko/.docker/services.json
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $CONTAINER_IMAGE - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $CONTAINER_IMAGE
only:
- master
FROM python:slim
RUN apt-get update && apt-get -y install --no-install-recommends git
WORKDIR /usr/src/app
COPY requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
CMD [ "python", "./pupak.py", "/data" ]
# Pupák Config generator # Pupák config generator
Generates config files from custom data scheme and jinja2 templates. Generates config files from custom data scheme and jinja2 templates.
Uses yaml as data source. Uses yaml as data source.
......
build.py 0 → 100644
"""Fill YAML-jinja2 templates, prepare the final data-model"""
import logging
import yaml
from jinja2 import Template
from constants import APP_LOG_TAG
from rawconfig import RawConfig
class ConfigBuilder:
"""Fill YAML-jinja2 templates, prepare the final data-model"""
service_definitions = []
templates = {}
def __init__(self, cfg: RawConfig):
"""Instantiate ConfigBuilder"""
self.logger = logging.getLogger(APP_LOG_TAG)
self.service_definitions = cfg.service_definitions
self.templates = cfg.templates
def build(self):
"""Fill YAML-jinja2 templates, prepare the final data-model"""
for service in self.service_definitions:
for job in service['scraping']:
template = Template(job['template'])
data = service.copy()
data['targets'] = job['targets']
output = template.render(data)
# store generated prometheus job config
job['output_yaml'] = yaml.safe_load(output)
import yaml
from validation import validate_config
from transformation import setup_host_fullnames
from transformation import setup_static_dns
from transformation import setup_node_exporters
from transformation import setup_bind9_exporters
from transformation import setup_ping_exporters
from transformation import setup_wmi_exporters
from transformation import setup_exporters
from transformation import setup_autogen_warning
class config():
def __init__(self, data_root):
self.raw_config = yaml.safe_load(open(data_root))
print("YAML data file has loaded: " + data_root)
def validate(self):
self.validated_config = validate_config(self)
def transformate(self):
setup_host_fullnames(self) # should be the first extender
setup_static_dns(self)
setup_node_exporters(self)
setup_bind9_exporters(self)
setup_ping_exporters(self)
setup_wmi_exporters(self)
self.validated_config['snmp_exporters'] = setup_exporters(self, 'snmp')
setup_autogen_warning(self)
"""Global constants"""
APP_LOG_TAG = 'pupak'
GENERATOR_OUTPUT_FOLDER = 'generated/'
OUTPUT_TEMPLATE_FOLDER = 'output-templates/'
SERVICE_PREFIX = 'services/'
TEMPLATE_PREFIX = 'service-templates/'
"""Setup custom log format"""
import logging
from constants import APP_LOG_TAG
def setup_logging():
# create logger with 'spam_application'
logger = logging.getLogger(APP_LOG_TAG)
logger.setLevel(logging.DEBUG)
# create console handler with a higher log level
console = logging.StreamHandler()
console.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatter = logging.Formatter('[%(levelname)s]\t%(message)s')
console.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(console)
#!/usr/bin/env python3 """Generates output from templates and some data"""
import logging
import os import os
import sys import socket
from config import config from datetime import datetime
from pathlib import Path
from schema import Schema, And, Optional
import jinja2 import jinja2
import yaml import yaml
from build import ConfigBuilder
from constants import APP_LOG_TAG, OUTPUT_TEMPLATE_FOLDER, GENERATOR_OUTPUT_FOLDER
def generate_config_files(data, template_folder, out_folder):
template_loader = jinja2.FileSystemLoader(searchpath=template_folder)
template_env = jinja2.Environment(loader=template_loader)
print(data)
for filename in os.listdir(template_folder): def autogen_warning():
output = template_env.get_template(filename).render(data) """Make a warning message that cannot be ignored"""
output = ""
for x in range(30):
output += "# !!! AUTOGENERATED ; DO NOT EDIT\n"
return output
out_name = out_folder + '/' + filename
out_name = out_name.replace(".j2", "")
f = open(out_name, "w+")
f.write(output)
f.close()
class Generator:
"""Generates output from templates and some data"""
if __name__ == "__main__": def __init__(self, data_folder: Path, cfg: ConfigBuilder):
print("Starting generator script.") self.logger = logging.getLogger(APP_LOG_TAG)
self.data_folder = data_folder
self.config = cfg
if len(sys.argv) < 3 + 1: def ignite(self):
print("Usage: data_file template_folder out_folder") """Generate"""
exit(1) self.logger.debug('Generate output config files')
data = {
'autogen_warning': autogen_warning(),
'generation_info': self.generation_info(),
}
self.collect_scrape_configs(data)
self.generate_files(data)
config_filename = sys.argv[1] def generation_info(self):
template_folder = sys.argv[2] """Make some generation meta info"""
out_folder = sys.argv[3] stream = os.popen('git -C ' + str(self.data_folder.absolute()) + ' rev-parse HEAD')
git_hash = stream.read()
cfg = config(config_filename) output = "# This file is generated. Some meta:"
print("YAML data file has loaded: " + config_filename) output += "\n# Time: " + datetime.now().isoformat()
output += "\n# Host: " + socket.gethostname()
output += "\n# Commit: " + git_hash
return output
print("Data scheme validation:") def collect_scrape_configs(self, output):
cfg.validate() """Build data model to the generation"""
print("Data scheme is VALID.") data = []
for service in self.config.service_definitions:
for job in service['scraping']:
data.append(job['output_yaml'])
output['scrape_configs'] = yaml.dump(
{'scrape_configs': data},
explicit_start=False,
default_flow_style=False
)
cfg.transformate() def generate_files(self, data):
print("Successful data scheme extension.") base = str(self.data_folder.absolute())
template_folder = base + '/' + OUTPUT_TEMPLATE_FOLDER
out_folder = base + '/' + GENERATOR_OUTPUT_FOLDER
generate_config_files(cfg.validated_config, template_folder, out_folder) template_loader = jinja2.FileSystemLoader(searchpath=template_folder)
print("Config files has been generated.") template_env = jinja2.Environment(loader=template_loader)
for filename in os.listdir(template_folder):
output = template_env.get_template(filename).render(data)
out_name = out_folder + '/' + filename
out_name = out_name.replace(".j2", "")
file = open(out_name, "w+")
file.write(output)
file.close()
pupak.py 0 → 100644
#!/usr/bin/env python3
"""Entry-point file"""
import logging
import sys
from pathlib import Path
from build import ConfigBuilder
from constants import APP_LOG_TAG
from customlog import setup_logging
from generator import Generator
from rawconfig import RawConfig
if __name__ == "__main__":
# Bootstrapping
setup_logging()
logger = logging.getLogger(APP_LOG_TAG)
logger.info("Starting Pupák generator script.")
if len(sys.argv) < 1 + 1:
logger.error("Not enough CLI args.")
logger.error("Usage: data_folder")
exit(1)
data_folder = Path(sys.argv[1])
# Read service YAML files and preload templates
raw = RawConfig(data_folder)
# Fill YAML-jinja2 templates,
# prepare the final data-model
builder = ConfigBuilder(raw)
builder.build()
# print("Data scheme validation:")
# cfg.validate()
# print("Data scheme is VALID.")
generator = Generator(data_folder, builder)
generator.ignite()
"""Read service YAML files and preload templates"""
import logging
from pathlib import Path
import yaml
from constants import APP_LOG_TAG, SERVICE_PREFIX, TEMPLATE_PREFIX
def is_service_file(file_identifier: str) -> bool:
"""Hmm, is it a service definition file or not"""
return file_identifier.startswith(SERVICE_PREFIX)
def is_template_file(file_identifier: str) -> bool:
"""Hmm, is it a service template file or not"""
return file_identifier.startswith(TEMPLATE_PREFIX)
class RawConfig:
"""Read service YAML files and preload templates"""
service_definitions = []
templates = {}
def __init__(self, path: Path):
"""Read service YAML files and preload templates"""
self.logger = logging.getLogger(APP_LOG_TAG)
# YAML files in tha data directory recursively
base_path_len = len(str(path.absolute())) + 1
self.preload_service_files(path.rglob('*.yaml'), base_path_len)
self.preload_template_files(path.rglob('*.yaml.j2'), base_path_len)
self.fill_templates()
def preload_service_files(self, path_glob, base_path_len: int):
"""Read all (service)YAML files in tha data directory recursively"""
for yaml_path in path_glob:
file_identifier = str(yaml_path.absolute())[base_path_len:-len(".yaml")]
if is_service_file(file_identifier):
self.service_definitions.append(self.read_yaml_file(yaml_path))
def preload_template_files(self, path_glob, base_path_len: int):
"""Read all (template)YAML files in tha data directory recursively"""
for yaml_path in path_glob:
file_identifier = str(yaml_path.absolute())[base_path_len:-len(".yaml.j2")]
if is_template_file(file_identifier):
data = open(str(yaml_path.absolute()), "r")
self.templates[file_identifier] = data.read()
def read_yaml_file(self, yaml_path: Path):
"""Reads a YAML file"""
self.logger.debug("Load YAML file: " + str(yaml_path.absolute()))
with yaml_path.open('r') as stream:
try:
return yaml.safe_load(stream)
except yaml.YAMLError as exc:
self.logger.error("Cannot load YAML file.")
self.logger.error(exc)
def fill_templates(self):
"""Fill a service definition template field with its content"""
for service in self.service_definitions:
for job in service['scraping']:
template_identifier = TEMPLATE_PREFIX + job['template']
job['template'] = self.templates[template_identifier]
def setup_host_fullnames(config):
for host in config.validated_config['hosts']:
if 'fullname' not in host:
host['fullname'] = host['name']
# Mine hosts out of the services like this:
# 'static_dns': [{
# 'fullname': 'avian.sch.bme.hu',
# 'address': '10.0.209.160'
# }]
# Note: only first exporter's address!
def setup_static_dns(config):
config.validated_config['static_dns'] = []
for host in config.validated_config['hosts']:
if len(host['exporters']) == 0:
continue
key = next(iter(host['exporters']))
exporter = host['exporters'][key]
config.validated_config['static_dns'].append({
'fullname': host['fullname'],
'address': exporter['addresses'][0] # TODO support multiple addresses
})
def setup_exporters(config, type):
exporters = []
for host in config.validated_config['hosts']:
if type not in host['exporters']:
continue
metrics_labels = [{
'name': 'facility',
'value': host['facility']
},
# {
# 'name': 'owner',
# 'value': host['owner']
# }
]
if 'metrics_labels' in host:
for label in host['metrics_labels']:
key = next(iter(label))
metrics_labels.append({
'name': key,
'value': label[key]
})
exporters.append({
'target': host['fullname'],
'metrics_labels': metrics_labels
})
return exporters
def search_config_by_host_fullname(fullname, config):
for host in config.validated_config['hosts']:
if host['fullname'] == fullname:
return host
def setup_node_exporters(config):
config.validated_config['node_exporters'] = setup_exporters(config, 'node')
for exporter in config.validated_config['node_exporters']:
host = search_config_by_host_fullname(exporter['target'], config)
exporter['target'] += ':' + host['exporters']['node']['port']
def setup_bind9_exporters(config):
config.validated_config['bind9_exporters'] = setup_exporters(config, 'bind9')
for exporter in config.validated_config['bind9_exporters']:
host = search_config_by_host_fullname(exporter['target'], config)
exporter['target'] += ':' + host['exporters']['bind9']['port']
def setup_wmi_exporters(config):
config.validated_config['wmi_exporters'] = setup_exporters(config, 'wmi')
for exporter in config.validated_config['wmi_exporters']:
host = search_config_by_host_fullname(exporter['target'], config)
exporter['target'] += ':' + host['exporters']['wmi']['port']
def setup_ping_exporters(config):
config.validated_config['ping_exporters'] = setup_exporters(config, 'ping')
# for exporter in services.validated_config['ping_exporters']:
# host = search_config_by_host_fullname(exporter['target'])
def setup_autogen_warning(config):
config.validated_config['autogen_warning'] = ""
for x in range(30):
config.validated_config['autogen_warning'] += "# !!! AUTOGENERATED ; DO NOT EDIT\n"
"""Rules to validate schema"""
from schema import Schema, And, Optional from schema import Schema, And, Optional
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment