graylog updates, test.debyl.io, scripts for reference

This commit is contained in:
Bastian de Byl
2026-01-13 16:08:38 -05:00
parent 364047558c
commit 34b45853e2
12 changed files with 1136 additions and 3 deletions

View File

@@ -9,3 +9,5 @@
# SSL certificates are now handled automatically by Caddy
# - role: ssl # REMOVED - Caddy handles all certificate management
- role: github-actions
- role: graylog-config
tags: graylog-config

View File

@@ -0,0 +1,170 @@
---
# Graylog API Configuration
graylog_api_url: "https://logs.debyl.io/api"
# graylog_api_token: defined in vault
# Default index set for new streams (Default Stream index set)
graylog_default_index_set: "6955a9d3cc3f442e78805871"
# Stream definitions
graylog_streams:
- title: "debyltech-api"
description: "Lambda API events from debyltech-api service"
rules:
- field: "service"
value: "debyltech-api"
type: 1 # EXACT match
inverted: false
- title: "caddy-access"
description: "Web traffic access logs from Caddy"
rules:
- field: "source"
value: "caddy"
type: 1
inverted: false
- field: "log_type"
value: "access"
type: 1
inverted: false
- title: "caddy-fulfillr"
description: "Fulfillr-specific web traffic"
rules:
- field: "source"
value: "caddy"
type: 1
inverted: false
- field: "tag"
value: "caddy.fulfillr"
type: 1
inverted: false
- title: "ssh-security"
description: "SSH access and security logs"
rules:
- field: "source"
value: "sshd"
type: 1
inverted: false
- title: "container-logs"
description: "Container stdout/stderr from Podman"
rules:
- field: "source"
value: "podman"
type: 1
inverted: false
# Pipeline definitions
graylog_pipelines:
- title: "GeoIP Enrichment"
description: "Add geographic information to access logs"
stages:
- stage: 0
match: "EITHER"
rules:
- "geoip_caddy_access"
- title: "Debyltech Event Classification"
description: "Categorize debyltech-api events"
stages:
- stage: 0
match: "EITHER"
rules:
- "classify_order_events"
- "classify_review_events"
- "classify_backinstock_events"
- "classify_shipping_events"
- "classify_product_events"
- stage: 1
match: "EITHER"
rules:
- "classify_default_events"
# Pipeline rule definitions
graylog_pipeline_rules:
- title: "geoip_caddy_access"
description: "GeoIP lookup for Caddy access logs"
source: |
rule "GeoIP for Caddy Access"
when
has_field("request_remote_ip")
then
let ip = to_string($message.request_remote_ip);
let geo = lookup("geoip-lookup", ip);
set_field("geo_country", geo["country"].iso_code);
set_field("geo_city", geo["city"].names.en);
set_field("geo_coordinates", geo["coordinates"]);
end
- title: "classify_order_events"
description: "Classify order events"
source: |
rule "Classify order events"
when
has_field("event") AND contains(to_string($message.event), "order")
then
set_field("event_category", "order");
end
- title: "classify_review_events"
description: "Classify review events"
source: |
rule "Classify review events"
when
has_field("event") AND contains(to_string($message.event), "review")
then
set_field("event_category", "review");
end
- title: "classify_backinstock_events"
description: "Classify back-in-stock events"
source: |
rule "Classify back-in-stock events"
when
has_field("event") AND contains(to_string($message.event), "backinstock")
then
set_field("event_category", "backinstock");
end
- title: "classify_shipping_events"
description: "Classify shipping events"
source: |
rule "Classify shipping events"
when
has_field("event") AND contains(to_string($message.event), "shipping")
then
set_field("event_category", "shipping");
end
- title: "classify_product_events"
description: "Classify product events"
source: |
rule "Classify product events"
when
has_field("event") AND contains(to_string($message.event), "product")
then
set_field("event_category", "product");
end
- title: "classify_default_events"
description: "Default category for unclassified events"
source: |
rule "Classify default events"
when
has_field("event") AND NOT has_field("event_category")
then
set_field("event_category", "other");
end
# Pipeline to stream connections
graylog_pipeline_connections:
- pipeline: "GeoIP Enrichment"
streams:
- "caddy-access"
- "caddy-fulfillr"
- pipeline: "Debyltech Event Classification"
streams:
- "debyltech-api"

View File

@@ -0,0 +1,187 @@
---
# Graylog Lookup Table Management via REST API
# Creates Data Adapters, Caches, and Lookup Tables for GeoIP
# =============================================================================
# Data Adapters
# =============================================================================
- name: get existing data adapters
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/lookup/adapters"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: existing_adapters
tags: graylog-config, lookup-tables
- name: build list of existing adapter names
ansible.builtin.set_fact:
existing_adapter_names: "{{ existing_adapters.json.data_adapters | default([]) | map(attribute='name') | list }}"
tags: graylog-config, lookup-tables
- name: create GeoIP data adapter
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/lookup/adapters"
method: POST
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Content-Type: application/json
body_format: json
body:
name: "geoip-adapter"
title: "GeoIP MaxMind Adapter"
description: "MaxMind GeoLite2-City database adapter"
config:
type: "maxmind_geoip"
path: "/usr/share/graylog/geoip/GeoLite2-City.mmdb"
database_type: "MAXMIND_CITY"
check_interval: 86400
check_interval_unit: "SECONDS"
status_code: [200, 201]
when: "'geoip-adapter' not in existing_adapter_names"
register: created_adapter
tags: graylog-config, lookup-tables
# =============================================================================
# Caches
# =============================================================================
- name: get existing caches
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/lookup/caches"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: existing_caches
tags: graylog-config, lookup-tables
- name: build list of existing cache names
ansible.builtin.set_fact:
existing_cache_names: "{{ existing_caches.json.caches | default([]) | map(attribute='name') | list }}"
tags: graylog-config, lookup-tables
- name: create GeoIP cache
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/lookup/caches"
method: POST
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Content-Type: application/json
body_format: json
body:
name: "geoip-cache"
title: "GeoIP Cache"
description: "Cache for GeoIP lookups"
config:
type: "guava_cache"
max_size: 10000
expire_after_access: 3600
expire_after_access_unit: "SECONDS"
expire_after_write: 0
expire_after_write_unit: "SECONDS"
status_code: [200, 201]
when: "'geoip-cache' not in existing_cache_names"
register: created_cache
tags: graylog-config, lookup-tables
# =============================================================================
# Lookup Tables
# =============================================================================
- name: refresh adapters list
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/lookup/adapters"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: all_adapters
tags: graylog-config, lookup-tables
- name: refresh caches list
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/lookup/caches"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: all_caches
tags: graylog-config, lookup-tables
- name: build adapter and cache ID maps
ansible.builtin.set_fact:
adapter_id_map: "{{ all_adapters.json.data_adapters | default([]) | items2dict(key_name='name', value_name='id') }}"
cache_id_map: "{{ all_caches.json.caches | default([]) | items2dict(key_name='name', value_name='id') }}"
tags: graylog-config, lookup-tables
- name: get existing lookup tables
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/lookup/tables"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: existing_tables
tags: graylog-config, lookup-tables
- name: build list of existing table names
ansible.builtin.set_fact:
existing_table_names: "{{ existing_tables.json.lookup_tables | default([]) | map(attribute='name') | list }}"
tags: graylog-config, lookup-tables
- name: create GeoIP lookup table
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/lookup/tables"
method: POST
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Content-Type: application/json
body_format: json
body:
name: "geoip-lookup"
title: "GeoIP Lookup Table"
description: "Lookup table for GeoIP resolution"
cache_id: "{{ cache_id_map['geoip-cache'] }}"
data_adapter_id: "{{ adapter_id_map['geoip-adapter'] }}"
default_single_value: ""
default_single_value_type: "NULL"
default_multi_value: ""
default_multi_value_type: "NULL"
status_code: [200, 201]
when:
- "'geoip-lookup' not in existing_table_names"
- "'geoip-adapter' in adapter_id_map"
- "'geoip-cache' in cache_id_map"
tags: graylog-config, lookup-tables

View File

@@ -0,0 +1,15 @@
---
# Graylog Configuration via REST API
# Configures lookup tables, streams, pipelines, and pipeline rules
- name: include lookup table configuration
ansible.builtin.include_tasks: lookup_tables.yml
tags: graylog-config, lookup-tables
- name: include stream configuration
ansible.builtin.include_tasks: streams.yml
tags: graylog-config, streams
- name: include pipeline configuration
ansible.builtin.include_tasks: pipelines.yml
tags: graylog-config, pipelines

View File

@@ -0,0 +1,188 @@
---
# Graylog Pipeline Management via REST API
# Idempotent: checks for existing pipelines/rules before creating
# =============================================================================
# Pipeline Rules
# =============================================================================
- name: get existing pipeline rules
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/pipelines/rule"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: existing_rules
tags: graylog-config, pipelines
- name: build list of existing rule titles
ansible.builtin.set_fact:
existing_rule_titles: "{{ existing_rules.json | map(attribute='title') | list }}"
existing_rule_map: "{{ existing_rules.json | items2dict(key_name='title', value_name='id') }}"
tags: graylog-config, pipelines
- name: create pipeline rules
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/pipelines/rule"
method: POST
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Content-Type: application/json
body_format: json
body:
title: "{{ item.title }}"
description: "{{ item.description | default('') }}"
source: "{{ item.source }}"
status_code: [200, 201]
loop: "{{ graylog_pipeline_rules }}"
loop_control:
label: "{{ item.title }}"
when: item.title not in existing_rule_titles
register: created_rules
tags: graylog-config, pipelines
- name: refresh rule list after creation
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/pipelines/rule"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: all_rules
tags: graylog-config, pipelines
- name: build rule ID lookup
ansible.builtin.set_fact:
rule_id_map: "{{ all_rules.json | items2dict(key_name='title', value_name='id') }}"
tags: graylog-config, pipelines
# =============================================================================
# Pipelines
# =============================================================================
- name: get existing pipelines
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/pipelines/pipeline"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: existing_pipelines
tags: graylog-config, pipelines
- name: build list of existing pipeline titles
ansible.builtin.set_fact:
existing_pipeline_titles: "{{ existing_pipelines.json | map(attribute='title') | list }}"
existing_pipeline_map: "{{ existing_pipelines.json | items2dict(key_name='title', value_name='id') }}"
tags: graylog-config, pipelines
- name: build pipeline source for each pipeline
ansible.builtin.set_fact:
pipeline_sources: "{{ pipeline_sources | default({}) | combine({item.title: lookup('template', 'pipeline_source.j2')}) }}"
loop: "{{ graylog_pipelines }}"
loop_control:
label: "{{ item.title }}"
vars:
pipeline: "{{ item }}"
tags: graylog-config, pipelines
- name: create pipelines
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/pipelines/pipeline"
method: POST
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Content-Type: application/json
body_format: json
body:
title: "{{ item.title }}"
description: "{{ item.description | default('') }}"
source: "{{ pipeline_sources[item.title] }}"
status_code: [200, 201]
loop: "{{ graylog_pipelines }}"
loop_control:
label: "{{ item.title }}"
when: item.title not in existing_pipeline_titles
register: created_pipelines
tags: graylog-config, pipelines
- name: refresh pipeline list after creation
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/pipelines/pipeline"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: all_pipelines
tags: graylog-config, pipelines
- name: build pipeline ID lookup
ansible.builtin.set_fact:
pipeline_id_map: "{{ all_pipelines.json | items2dict(key_name='title', value_name='id') }}"
tags: graylog-config, pipelines
# =============================================================================
# Pipeline to Stream Connections
# =============================================================================
- name: get current pipeline connections
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/pipelines/connections"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: current_connections
tags: graylog-config, pipelines
- name: connect pipelines to streams
ansible.builtin.uri:
url: "{{ graylog_api_url }}/system/pipelines/connections/to_stream"
method: POST
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Content-Type: application/json
body_format: json
body:
stream_id: "{{ stream_id_map[item.1] }}"
pipeline_ids:
- "{{ pipeline_id_map[item.0.pipeline] }}"
status_code: [200, 201]
loop: "{{ graylog_pipeline_connections | subelements('streams') }}"
loop_control:
label: "{{ item.0.pipeline }} -> {{ item.1 }}"
when:
- item.0.pipeline in pipeline_id_map
- item.1 in stream_id_map
ignore_errors: true
tags: graylog-config, pipelines

View File

@@ -0,0 +1,127 @@
---
# Graylog Stream Management via REST API
# Idempotent: checks for existing streams before creating
- name: get existing streams
ansible.builtin.uri:
url: "{{ graylog_api_url }}/streams"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: existing_streams
tags: graylog-config, streams
- name: build list of existing stream titles
ansible.builtin.set_fact:
existing_stream_titles: "{{ existing_streams.json.streams | map(attribute='title') | list }}"
existing_stream_map: "{{ existing_streams.json.streams | items2dict(key_name='title', value_name='id') }}"
tags: graylog-config, streams
- name: create streams
ansible.builtin.uri:
url: "{{ graylog_api_url }}/streams"
method: POST
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Content-Type: application/json
body_format: json
body:
title: "{{ item.title }}"
description: "{{ item.description | default('') }}"
index_set_id: "{{ item.index_set_id | default(graylog_default_index_set) }}"
remove_matches_from_default_stream: "{{ item.remove_from_default | default(true) }}"
status_code: [200, 201]
loop: "{{ graylog_streams }}"
loop_control:
label: "{{ item.title }}"
when: item.title not in existing_stream_titles
register: created_streams
tags: graylog-config, streams
- name: refresh stream list after creation
ansible.builtin.uri:
url: "{{ graylog_api_url }}/streams"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
register: all_streams
tags: graylog-config, streams
- name: build stream ID lookup
ansible.builtin.set_fact:
stream_id_map: "{{ all_streams.json.streams | items2dict(key_name='title', value_name='id') }}"
tags: graylog-config, streams
- name: get existing rules for each stream
ansible.builtin.uri:
url: "{{ graylog_api_url }}/streams/{{ stream_id_map[item.title] }}/rules"
method: GET
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Accept: application/json
status_code: 200
loop: "{{ graylog_streams }}"
loop_control:
label: "{{ item.title }}"
when: item.title in stream_id_map
register: stream_rules
tags: graylog-config, streams
- name: create stream rules
ansible.builtin.uri:
url: "{{ graylog_api_url }}/streams/{{ stream_id_map[item.0.title] }}/rules"
method: POST
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
Content-Type: application/json
body_format: json
body:
field: "{{ item.1.field }}"
value: "{{ item.1.value }}"
type: "{{ item.1.type | default(1) }}"
inverted: "{{ item.1.inverted | default(false) }}"
description: "{{ item.1.description | default('') }}"
status_code: [200, 201]
loop: "{{ graylog_streams | subelements('rules', skip_missing=True) }}"
loop_control:
label: "{{ item.0.title }} - {{ item.1.field }}:{{ item.1.value }}"
when:
- item.0.title in stream_id_map
- stream_rules.results | selectattr('item.title', 'equalto', item.0.title) | map(attribute='json.stream_rules') | first | default([]) | selectattr('field', 'equalto', item.1.field) | selectattr('value', 'equalto', item.1.value) | list | length == 0
tags: graylog-config, streams
- name: start streams
ansible.builtin.uri:
url: "{{ graylog_api_url }}/streams/{{ stream_id_map[item.title] }}/resume"
method: POST
user: "{{ graylog_api_token }}"
password: token
force_basic_auth: true
headers:
X-Requested-By: ansible
status_code: [200, 204]
loop: "{{ graylog_streams }}"
loop_control:
label: "{{ item.title }}"
when: item.title in stream_id_map
ignore_errors: true
tags: graylog-config, streams

View File

@@ -0,0 +1,8 @@
pipeline "{{ pipeline.title }}"
{% for stage in pipeline.stages %}
stage {{ stage.stage }} match {{ stage.match | default('EITHER') }}
{% for rule in stage.rules %}
rule "{{ rule }}"
{% endfor %}
{% endfor %}
end

View File

@@ -25,6 +25,7 @@
# Legacy volume mounts removed - Caddy manages certificates automatically
# Mount static site directories
- "/usr/local/share/fulfillr-site:/usr/local/share/fulfillr-site:ro"
- "/usr/local/share/test-site:/srv/test-site:ro"
env:
CADDY_ADMIN: "0.0.0.0:2019"
restart_policy: always

View File

@@ -27,6 +27,17 @@
- caddy
- ssl
- name: create test-site directory
become: true
ansible.builtin.file:
path: /usr/local/share/test-site
state: directory
owner: "{{ ansible_user }}"
group: "{{ ansible_user }}"
mode: '0755'
tags:
- caddy
- name: deploy caddyfile
become: true
ansible.builtin.template:

View File

@@ -327,22 +327,45 @@
reverse_proxy localhost:9054
}
# Serve static files with SPA fallback
handle {
root * /usr/local/share/fulfillr-site
try_files {path} {path}/ /index.html
file_server
}
header {
Strict-Transport-Security "max-age=31536000; includeSubDomains"
X-Content-Type-Options "nosniff"
Referrer-Policy "same-origin"
}
log {
output file /var/log/caddy/fulfillr.log
format json
}
}
# ============================================================================
# TEST/STAGING SITES
# ============================================================================
# Test Site - test.debyl.io (Public static site hosting, no caching)
test.debyl.io {
import common_headers
root * /srv/test-site
try_files {path} {path}/ /index.html
file_server
# Disable all caching for test sites
header Cache-Control "no-store, no-cache, must-revalidate, max-age=0"
header Pragma "no-cache"
log {
output file /var/log/caddy/test.log
format json
level {{ caddy_log_level }}
}
}

Binary file not shown.