diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000000000000000000000000000000000000..d4d4b6570131de918670c05ecd96d087c4b5a8e4
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,4 @@
+.*
+venv
+data
+README*
diff --git a/.editorconfig b/.editorconfig
index b20e68a3ec30a15b06900ea736eec439d96bfc00..1ac5567866daaae7834b943674da1aa1b1e321a7 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -20,3 +20,6 @@ indent_size = 2
 [*.sh]
 max_line_length = 80
 indent_size = 2
+
+[.gitlab-ci.yml]
+max_line_length = none
diff --git a/.gitignore b/.gitignore
index 0ed0785c50ccda2f5f1c882e3dcdeac1ff5ac754..abee4873414bb250d76fc922c41c2fa969a838e6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,3 @@
 /.idea/
 /venv/
-/data/
+data
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 03911e03a4d683f6a2d98e03022eda4945d5796f..71fe51b4f0840870388d5ce1fb3d2f56c9db9ed7 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,11 +1,11 @@
-image: "python:3.7"
+image: registry.kszk.bme.hu/baseimg/python-tools
 
 stages:
   - Static Analysis
   - Docker build
 
 variables:
-  CONTAINER_IMAGE: "registry.kszk.bme.hu/kszk/monitoring/generator:$CI_COMMIT_REF_NAME"
+  CONTAINER_IMAGE: "registry.kszk.bme.hu/kszk/monitoring/pupak:$CI_COMMIT_REF_NAME"
 
 before_script:
   - python --version
@@ -30,3 +30,5 @@ docker build:
   script:
     - echo "{\"auths\":{\"registry.kszk.bme.hu\":{\"username\":\"$CI_REG_USER\",\"password\":\"$CI_REG_PASS\"}}}" > /kaniko/.docker/services.json
     - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $CONTAINER_IMAGE
+  only:
+  - master
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..7c72e2f9e47c51093c0ba387d124ec8861f2cefc
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,12 @@
+FROM python:slim
+
+RUN apt-get update && apt-get -y install --no-install-recommends git
+
+WORKDIR /usr/src/app
+
+COPY requirements.txt ./
+RUN pip install --no-cache-dir -r requirements.txt
+
+COPY . .
+
+CMD [ "python", "./pupak.py", "/data" ]
diff --git a/README.md b/README.md
index 75f1162b1ab13375676b11987f9696df15acdfec..b5e6edd4f89520f608cecb6f7c89d0b67f8db184 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# Pupák Config generator
+# Pupák config generator
 Generates config files from custom data scheme and jinja2 templates.
 Uses yaml as data source.
 
diff --git a/build.py b/build.py
new file mode 100644
index 0000000000000000000000000000000000000000..165149be280015d73c9442ecb5e74693d3cbf911
--- /dev/null
+++ b/build.py
@@ -0,0 +1,31 @@
+"""Fill YAML-jinja2 templates, prepare the final data-model"""
+import logging
+
+import yaml
+from jinja2 import Template
+
+from constants import APP_LOG_TAG
+from rawconfig import RawConfig
+
+
+class ConfigBuilder:
+    """Fill YAML-jinja2 templates, prepare the final data-model"""
+    service_definitions = []
+    templates = {}
+
+    def __init__(self, cfg: RawConfig):
+        """Instantiate ConfigBuilder"""
+        self.logger = logging.getLogger(APP_LOG_TAG)
+        self.service_definitions = cfg.service_definitions
+        self.templates = cfg.templates
+
+    def build(self):
+        """Fill YAML-jinja2 templates, prepare the final data-model"""
+        for service in self.service_definitions:
+            for job in service['scraping']:
+                template = Template(job['template'])
+                data = service.copy()
+                data['targets'] = job['targets']
+                output = template.render(data)
+                # store generated prometheus job config
+                job['output_yaml'] = yaml.safe_load(output)
diff --git a/config.py b/config.py
deleted file mode 100644
index d9be3ee55238312a659a2ed046ac6a465fe0cd7c..0000000000000000000000000000000000000000
--- a/config.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import yaml
-from validation import validate_config
-from transformation import setup_host_fullnames
-from transformation import setup_static_dns
-from transformation import setup_node_exporters
-from transformation import setup_bind9_exporters
-from transformation import setup_ping_exporters
-from transformation import setup_wmi_exporters
-from transformation import setup_exporters
-from transformation import setup_autogen_warning
-
-
-class config():
-    def __init__(self, data_root):
-        self.raw_config = yaml.safe_load(open(data_root))
-        print("YAML data file has loaded: " + data_root)
-
-    def validate(self):
-        self.validated_config = validate_config(self)
-
-    def transformate(self):
-        setup_host_fullnames(self)  # should be the first extender
-        setup_static_dns(self)
-        setup_node_exporters(self)
-        setup_bind9_exporters(self)
-        setup_ping_exporters(self)
-        setup_wmi_exporters(self)
-        self.validated_config['snmp_exporters'] = setup_exporters(self, 'snmp')
-        setup_autogen_warning(self)
diff --git a/constants.py b/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..7115878c5c43f25d9c810ad47fa21ffbe65c3920
--- /dev/null
+++ b/constants.py
@@ -0,0 +1,6 @@
+"""Global constants"""
+APP_LOG_TAG = 'pupak'
+GENERATOR_OUTPUT_FOLDER = 'generated/'
+OUTPUT_TEMPLATE_FOLDER = 'output-templates/'
+SERVICE_PREFIX = 'services/'
+TEMPLATE_PREFIX = 'service-templates/'
diff --git a/customlog.py b/customlog.py
new file mode 100644
index 0000000000000000000000000000000000000000..ec100c54e85f9d8a08249e34df671c746fc49ea2
--- /dev/null
+++ b/customlog.py
@@ -0,0 +1,21 @@
+"""Setup custom log format"""
+import logging
+
+from constants import APP_LOG_TAG
+
+
+def setup_logging():
+    # create logger with 'spam_application'
+    logger = logging.getLogger(APP_LOG_TAG)
+    logger.setLevel(logging.DEBUG)
+
+    # create console handler with a higher log level
+    console = logging.StreamHandler()
+    console.setLevel(logging.DEBUG)
+
+    # create formatter and add it to the handlers
+    formatter = logging.Formatter('[%(levelname)s]\t%(message)s')
+    console.setFormatter(formatter)
+
+    # add the handlers to the logger
+    logger.addHandler(console)
diff --git a/generator.py b/generator.py
index 884a5bf763ccafd31921dc15c99f7f3066beada4..bd431ce7ca3014d80a7628f3ac34c6d3f6af5fde 100644
--- a/generator.py
+++ b/generator.py
@@ -1,48 +1,78 @@
-#!/usr/bin/env python3
+"""Generates output from templates and some data"""
+import logging
 import os
-import sys
-from config import config
+import socket
+from datetime import datetime
+from pathlib import Path
 
-from schema import Schema, And, Optional
 import jinja2
 import yaml
 
+from build import ConfigBuilder
+from constants import APP_LOG_TAG, OUTPUT_TEMPLATE_FOLDER, GENERATOR_OUTPUT_FOLDER
 
-def generate_config_files(data, template_folder, out_folder):
-    template_loader = jinja2.FileSystemLoader(searchpath=template_folder)
-    template_env = jinja2.Environment(loader=template_loader)
-    print(data)
 
-    for filename in os.listdir(template_folder):
-        output = template_env.get_template(filename).render(data)
+def autogen_warning():
+    """Make a warning message that cannot be ignored"""
+    output = ""
+    for x in range(30):
+        output += "# !!! AUTOGENERATED ; DO NOT EDIT\n"
+    return output
 
-        out_name = out_folder + '/' + filename
-        out_name = out_name.replace(".j2", "")
-        f = open(out_name, "w+")
-        f.write(output)
-        f.close()
 
+class Generator:
+    """Generates output from templates and some data"""
 
-if __name__ == "__main__":
-    print("Starting generator script.")
+    def __init__(self, data_folder: Path, cfg: ConfigBuilder):
+        self.logger = logging.getLogger(APP_LOG_TAG)
+        self.data_folder = data_folder
+        self.config = cfg
 
-    if len(sys.argv) < 3 + 1:
-        print("Usage: data_file template_folder out_folder")
-        exit(1)
+    def ignite(self):
+        """Generate"""
+        self.logger.debug('Generate output config files')
+        data = {
+            'autogen_warning': autogen_warning(),
+            'generation_info': self.generation_info(),
+        }
+        self.collect_scrape_configs(data)
+        self.generate_files(data)
 
-    config_filename = sys.argv[1]
-    template_folder = sys.argv[2]
-    out_folder = sys.argv[3]
+    def generation_info(self):
+        """Make some generation meta info"""
+        stream = os.popen('git -C ' + str(self.data_folder.absolute()) + ' rev-parse HEAD')
+        git_hash = stream.read()
 
-    cfg = config(config_filename)
-    print("YAML data file has loaded: " + config_filename)
+        output = "# This file is generated. Some meta:"
+        output += "\n# Time: " + datetime.now().isoformat()
+        output += "\n# Host: " + socket.gethostname()
+        output += "\n# Commit: " + git_hash
+        return output
 
-    print("Data scheme validation:")
-    cfg.validate()
-    print("Data scheme is VALID.")
+    def collect_scrape_configs(self, output):
+        """Build data model to the generation"""
+        data = []
+        for service in self.config.service_definitions:
+            for job in service['scraping']:
+                data.append(job['output_yaml'])
+        output['scrape_configs'] = yaml.dump(
+            {'scrape_configs': data},
+            explicit_start=False,
+            default_flow_style=False
+        )
 
-    cfg.transformate()
-    print("Successful data scheme extension.")
+    def generate_files(self, data):
+        base = str(self.data_folder.absolute())
+        template_folder = base + '/' + OUTPUT_TEMPLATE_FOLDER
+        out_folder = base + '/' + GENERATOR_OUTPUT_FOLDER
 
-    generate_config_files(cfg.validated_config, template_folder, out_folder)
-    print("Config files has been generated.")
+        template_loader = jinja2.FileSystemLoader(searchpath=template_folder)
+        template_env = jinja2.Environment(loader=template_loader)
+
+        for filename in os.listdir(template_folder):
+            output = template_env.get_template(filename).render(data)
+            out_name = out_folder + '/' + filename
+            out_name = out_name.replace(".j2", "")
+            file = open(out_name, "w+")
+            file.write(output)
+            file.close()
diff --git a/pupak.py b/pupak.py
new file mode 100644
index 0000000000000000000000000000000000000000..1eecf60d700f23ae56e9088748745321184dd957
--- /dev/null
+++ b/pupak.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+"""Entry-point file"""
+import logging
+import sys
+from pathlib import Path
+
+from build import ConfigBuilder
+from constants import APP_LOG_TAG
+from customlog import setup_logging
+from generator import Generator
+from rawconfig import RawConfig
+
+if __name__ == "__main__":
+    # Bootstrapping
+    setup_logging()
+    logger = logging.getLogger(APP_LOG_TAG)
+    logger.info("Starting Pupák generator script.")
+
+    if len(sys.argv) < 1 + 1:
+        logger.error("Not enough CLI args.")
+        logger.error("Usage: data_folder")
+        exit(1)
+
+    data_folder = Path(sys.argv[1])
+
+    # Read service YAML files and preload templates
+    raw = RawConfig(data_folder)
+
+    # Fill YAML-jinja2 templates,
+    # prepare the final data-model
+    builder = ConfigBuilder(raw)
+    builder.build()
+
+    # print("Data scheme validation:")
+    # cfg.validate()
+    # print("Data scheme is VALID.")
+
+    generator = Generator(data_folder, builder)
+    generator.ignite()
diff --git a/rawconfig.py b/rawconfig.py
new file mode 100644
index 0000000000000000000000000000000000000000..346f896855bc86cb64ab2b693ee03ad9e2a0e979
--- /dev/null
+++ b/rawconfig.py
@@ -0,0 +1,64 @@
+"""Read service YAML files and preload templates"""
+import logging
+from pathlib import Path
+
+import yaml
+
+from constants import APP_LOG_TAG, SERVICE_PREFIX, TEMPLATE_PREFIX
+
+
+def is_service_file(file_identifier: str) -> bool:
+    """Hmm, is it a service definition file or not"""
+    return file_identifier.startswith(SERVICE_PREFIX)
+
+
+def is_template_file(file_identifier: str) -> bool:
+    """Hmm, is it a service template file or not"""
+    return file_identifier.startswith(TEMPLATE_PREFIX)
+
+
+class RawConfig:
+    """Read service YAML files and preload templates"""
+    service_definitions = []
+    templates = {}
+
+    def __init__(self, path: Path):
+        """Read service YAML files and preload templates"""
+        self.logger = logging.getLogger(APP_LOG_TAG)
+        # YAML files in tha data directory recursively
+        base_path_len = len(str(path.absolute())) + 1
+        self.preload_service_files(path.rglob('*.yaml'), base_path_len)
+        self.preload_template_files(path.rglob('*.yaml.j2'), base_path_len)
+        self.fill_templates()
+
+    def preload_service_files(self, path_glob, base_path_len: int):
+        """Read all (service)YAML files in tha data directory recursively"""
+        for yaml_path in path_glob:
+            file_identifier = str(yaml_path.absolute())[base_path_len:-len(".yaml")]
+            if is_service_file(file_identifier):
+                self.service_definitions.append(self.read_yaml_file(yaml_path))
+
+    def preload_template_files(self, path_glob, base_path_len: int):
+        """Read all (template)YAML files in tha data directory recursively"""
+        for yaml_path in path_glob:
+            file_identifier = str(yaml_path.absolute())[base_path_len:-len(".yaml.j2")]
+            if is_template_file(file_identifier):
+                data = open(str(yaml_path.absolute()), "r")
+                self.templates[file_identifier] = data.read()
+
+    def read_yaml_file(self, yaml_path: Path):
+        """Reads a YAML file"""
+        self.logger.debug("Load YAML file: " + str(yaml_path.absolute()))
+        with yaml_path.open('r') as stream:
+            try:
+                return yaml.safe_load(stream)
+            except yaml.YAMLError as exc:
+                self.logger.error("Cannot load YAML file.")
+                self.logger.error(exc)
+
+    def fill_templates(self):
+        """Fill a service definition template field with its content"""
+        for service in self.service_definitions:
+            for job in service['scraping']:
+                template_identifier = TEMPLATE_PREFIX + job['template']
+                job['template'] = self.templates[template_identifier]
diff --git a/transformation.py b/transformation.py
deleted file mode 100644
index 9111d8deb8a18de4ad0f07d32b480716934b92c9..0000000000000000000000000000000000000000
--- a/transformation.py
+++ /dev/null
@@ -1,93 +0,0 @@
-def setup_host_fullnames(config):
-    for host in config.validated_config['hosts']:
-        if 'fullname' not in host:
-            host['fullname'] = host['name']
-
-
-# Mine hosts out of the services like this:
-# 'static_dns': [{
-#   'fullname': 'avian.sch.bme.hu',
-#   'address': '10.0.209.160'
-# }]
-# Note: only first exporter's address!
-def setup_static_dns(config):
-    config.validated_config['static_dns'] = []
-    for host in config.validated_config['hosts']:
-        if len(host['exporters']) == 0:
-            continue
-        key = next(iter(host['exporters']))
-        exporter = host['exporters'][key]
-        config.validated_config['static_dns'].append({
-            'fullname': host['fullname'],
-            'address': exporter['addresses'][0]  # TODO support multiple addresses
-        })
-
-
-def setup_exporters(config, type):
-    exporters = []
-    for host in config.validated_config['hosts']:
-        if type not in host['exporters']:
-            continue
-
-        metrics_labels = [{
-            'name': 'facility',
-            'value': host['facility']
-        },
-            # {
-            #    'name': 'owner',
-            #    'value': host['owner']
-            # }
-        ]
-
-        if 'metrics_labels' in host:
-            for label in host['metrics_labels']:
-                key = next(iter(label))
-                metrics_labels.append({
-                    'name': key,
-                    'value': label[key]
-                })
-
-        exporters.append({
-            'target': host['fullname'],
-            'metrics_labels': metrics_labels
-        })
-    return exporters
-
-
-def search_config_by_host_fullname(fullname, config):
-    for host in config.validated_config['hosts']:
-        if host['fullname'] == fullname:
-            return host
-
-
-def setup_node_exporters(config):
-    config.validated_config['node_exporters'] = setup_exporters(config, 'node')
-    for exporter in config.validated_config['node_exporters']:
-        host = search_config_by_host_fullname(exporter['target'], config)
-        exporter['target'] += ':' + host['exporters']['node']['port']
-
-
-def setup_bind9_exporters(config):
-    config.validated_config['bind9_exporters'] = setup_exporters(config, 'bind9')
-    for exporter in config.validated_config['bind9_exporters']:
-        host = search_config_by_host_fullname(exporter['target'], config)
-        exporter['target'] += ':' + host['exporters']['bind9']['port']
-
-
-def setup_wmi_exporters(config):
-    config.validated_config['wmi_exporters'] = setup_exporters(config, 'wmi')
-    for exporter in config.validated_config['wmi_exporters']:
-        host = search_config_by_host_fullname(exporter['target'], config)
-        exporter['target'] += ':' + host['exporters']['wmi']['port']
-
-
-def setup_ping_exporters(config):
-    config.validated_config['ping_exporters'] = setup_exporters(config, 'ping')
-    # for exporter in services.validated_config['ping_exporters']:
-    #     host = search_config_by_host_fullname(exporter['target'])
-
-
-def setup_autogen_warning(config):
-    config.validated_config['autogen_warning'] = ""
-    for x in range(30):
-        config.validated_config['autogen_warning'] += "# !!! AUTOGENERATED ; DO NOT EDIT\n"
diff --git a/validation.py b/validation.py
index e0d87714e63c21feb0cc9e33682be54a4c3fac15..757c38000c6560d4b59cd32453ac3641adb61525 100644
--- a/validation.py
+++ b/validation.py
@@ -1,3 +1,4 @@
+"""Rules to validate schema"""
 from schema import Schema, And, Optional