0
0
Fork 0
mirror of https://github.com/netdata/netdata.git synced 2025-05-03 08:50:02 +00:00

Ιmplementation to add logs integrations ()

This commit is contained in:
Fotis Voutsas 2024-10-16 14:37:25 +03:00 committed by GitHub
parent 16fb64f485
commit dfcf47e4cc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 343 additions and 19 deletions

View file

@ -25,6 +25,9 @@ def cleanup():
for element in Path("integrations/cloud-notifications").glob('**/*/'):
if "integrations" in str(element) and not "metadata.yaml" in str(element):
shutil.rmtree(element)
for element in Path("integrations/logs").glob('**/*/'):
if "integrations" in str(element) and "metadata.yaml" not in str(element):
shutil.rmtree(element)
for element in Path("integrations/cloud-authentication").glob('**/*/'):
if "integrations" in str(element) and not "metadata.yaml" in str(element):
shutil.rmtree(element)
@ -76,6 +79,7 @@ def add_custom_edit_url(markdown_string, meta_yaml_link, sidebar_label_string, m
"""
output = ""
path_to_md_file = ""
if mode == 'default':
path_to_md_file = f'{meta_yaml_link.replace("/metadata.yaml", "")}/integrations/{clean_string(sidebar_label_string)}'
@ -86,6 +90,9 @@ def add_custom_edit_url(markdown_string, meta_yaml_link, sidebar_label_string, m
elif mode == 'agent-notification':
path_to_md_file = meta_yaml_link.replace("metadata.yaml", "README")
elif mode == 'logs':
path_to_md_file = meta_yaml_link.replace("metadata.yaml", "README")
elif mode == 'cloud-authentication':
path_to_md_file = meta_yaml_link.replace("metadata.yaml", f'integrations/{clean_string(sidebar_label_string)}')
@ -293,6 +300,34 @@ endmeta-->
except Exception as e:
print("Exception in notification md construction", e, integration['id'])
elif mode == 'logs':
try:
# initiate the variables for the logs integration
meta_yaml = integration['edit_link'].replace("blob", "edit")
sidebar_label = integration['meta']['name']
learn_rel_path = generate_category_from_name(integration['meta']['categories'][0].split("."), categories)
# build the markdown string
md = \
f"""<!--startmeta
meta_yaml: "{meta_yaml}"
sidebar_label: "{sidebar_label}"
learn_status: "Published"
learn_rel_path: "{learn_rel_path.replace("logs", "Logs")}"
message: "DO NOT EDIT THIS FILE DIRECTLY, IT IS GENERATED BY THE LOGS' metadata.yaml FILE"
endmeta-->
{create_overview(integration, integration['meta']['icon_filename'])}"""
if integration['setup']:
md += f"""
{integration['setup']}
"""
except Exception as e:
print("Exception in logs md construction", e, integration['id'])
# AUTHENTICATIONS
elif mode == 'authentication':
if True:
@ -417,7 +452,28 @@ def write_to_file(path, md, meta_yaml, sidebar_label, community, mode='default')
except FileNotFoundError as e:
print("Exception in writing to file", e)
elif mode == 'logs':
name = clean_string(integration['meta']['name'])
if not Path(f'{path}/integrations').exists():
Path(f'{path}/integrations').mkdir()
# proper_edit_name = meta_yaml.replace(
# "metadata.yaml", f'integrations/{clean_string(sidebar_label)}.md\"')
md = add_custom_edit_url(md, meta_yaml, sidebar_label, mode='logs')
finalpath = f'{path}/integrations/{name}.md'
try:
clean_and_write(
md,
Path(finalpath)
)
except FileNotFoundError as e:
print("Exception in writing to file", e)
elif mode == 'authentication':
name = clean_string(integration['meta']['name'])
@ -503,6 +559,13 @@ for integration in integrations:
path = build_path(meta_yaml)
write_to_file(path, md, meta_yaml, sidebar_label, community, mode='cloud-notification')
elif integration['integration_type'] == "logs":
meta_yaml, sidebar_label, learn_rel_path, md, community = build_readme_from_integration(
integration, mode='logs')
path = build_path(meta_yaml)
write_to_file(path, md, meta_yaml, sidebar_label, community, mode='logs')
elif integration['integration_type'] == "authentication":
meta_yaml, sidebar_label, learn_rel_path, md, community = build_readme_from_integration(

View file

@ -47,6 +47,10 @@ CLOUD_NOTIFICATION_SOURCES = [
(AGENT_REPO, INTEGRATIONS_PATH / 'cloud-notifications' / 'metadata.yaml', False),
]
LOGS_SOURCES = [
(AGENT_REPO, INTEGRATIONS_PATH / 'logs' / 'metadata.yaml', False),
]
AUTHENTICATION_SOURCES = [
(AGENT_REPO, INTEGRATIONS_PATH / 'cloud-authentication' / 'metadata.yaml', False),
]
@ -77,6 +81,11 @@ CLOUD_NOTIFICATION_RENDER_KEYS = [
'troubleshooting',
]
LOGS_RENDER_KEYS = [
'overview',
'setup',
]
AUTHENTICATION_RENDER_KEYS = [
'overview',
'setup',
@ -139,6 +148,11 @@ CLOUD_NOTIFICATION_VALIDATOR = Draft7Validator(
registry=registry,
)
LOGS_VALIDATOR = Draft7Validator(
{'$ref': './logs.json#'},
registry=registry,
)
AUTHENTICATION_VALIDATOR = Draft7Validator(
{'$ref': './authentication.json#'},
registry=registry,
@ -399,6 +413,19 @@ def _load_agent_notification_file(file, repo):
return ret
def load_agent_notifications():
ret = []
for repo, path, match in AGENT_NOTIFICATION_SOURCES:
if match and path.exists() and path.is_dir():
for file in path.glob(METADATA_PATTERN):
ret.extend(_load_agent_notification_file(file, repo))
elif not match and path.exists() and path.is_file():
ret.extend(_load_agent_notification_file(path, repo))
return ret
def _load_cloud_notification_file(file, repo):
debug(f'Loading {file}.')
data = load_yaml(file)
@ -432,19 +459,6 @@ def _load_cloud_notification_file(file, repo):
return ret
def load_agent_notifications():
ret = []
for repo, path, match in AGENT_NOTIFICATION_SOURCES:
if match and path.exists() and path.is_dir():
for file in path.glob(METADATA_PATTERN):
ret.extend(_load_agent_notification_file(file, repo))
elif not match and path.exists() and path.is_file():
ret.extend(_load_agent_notification_file(path, repo))
return ret
def load_cloud_notifications():
ret = []
@ -458,6 +472,52 @@ def load_cloud_notifications():
return ret
def _load_logs_file(file, repo):
debug(f'Loading {file}.')
data = load_yaml(file)
if not data:
return []
try:
LOGS_VALIDATOR.validate(data)
except ValidationError:
warn(f'Failed to validate {file} against the schema.', file)
return []
if 'id' in data:
data['integration_type'] = 'logs'
data['_src_path'] = file
data['_repo'] = repo
data['_index'] = 0
return [data]
else:
ret = []
for idx, item in enumerate(data):
item['integration_type'] = 'logs'
item['_src_path'] = file
item['_repo'] = repo
item['_index'] = idx
ret.append(item)
return ret
def load_logs():
ret = []
for repo, path, match in LOGS_SOURCES:
if match and path.exists() and path.is_dir():
for file in path.glob(METADATA_PATTERN):
ret.extend(_load_logs_file(file, repo))
elif not match and path.exists() and path.is_file():
ret.extend(_load_logs_file(path, repo))
return ret
def _load_authentication_file(file, repo):
debug(f'Loading {file}.')
data = load_yaml(file)
@ -818,6 +878,48 @@ def render_cloud_notifications(categories, notifications, ids):
return notifications, clean_notifications, ids
def render_logs(categories, logs, ids):
debug('Sorting logs.')
sort_integrations(logs)
debug('Checking log ids.')
logs, ids = dedupe_integrations(logs, ids)
clean_logs = []
for item in logs:
item['edit_link'] = make_edit_link(item)
clean_item = deepcopy(item)
for key in LOGS_RENDER_KEYS:
if key in item.keys():
template = get_jinja_env().get_template(f'{key}.md')
data = template.render(entry=item, clean=False)
clean_data = template.render(entry=item, clean=True)
if 'variables' in item['meta']:
template = get_jinja_env().from_string(data)
data = template.render(variables=item['meta']['variables'], clean=False)
template = get_jinja_env().from_string(clean_data)
clean_data = template.render(variables=item['meta']['variables'], clean=True)
else:
data = ''
clean_data = ''
item[key] = data
clean_item[key] = clean_data
for k in ['_src_path', '_repo', '_index']:
del item[k], clean_item[k]
clean_logs.append(clean_item)
return logs, clean_logs, ids
def render_authentications(categories, authentications, ids):
debug('Sorting authentications.')
@ -885,21 +987,21 @@ def main():
exporters = load_exporters()
agent_notifications = load_agent_notifications()
cloud_notifications = load_cloud_notifications()
logs = load_logs()
authentications = load_authentications()
collectors, clean_collectors, ids = render_collectors(categories, collectors, dict())
deploy, clean_deploy, ids = render_deploy(distros, categories, deploy, ids)
exporters, clean_exporters, ids = render_exporters(categories, exporters, ids)
agent_notifications, clean_agent_notifications, ids = render_agent_notifications(categories, agent_notifications,
ids)
cloud_notifications, clean_cloud_notifications, ids = render_cloud_notifications(categories, cloud_notifications,
ids)
agent_notifications, clean_agent_notifications, ids = render_agent_notifications(categories, agent_notifications,ids)
cloud_notifications, clean_cloud_notifications, ids = render_cloud_notifications(categories, cloud_notifications,ids)
logs, clean_logs, ids = render_logs(categories, logs,ids)
authentications, clean_authentications, ids = render_authentications(categories, authentications, ids)
integrations = collectors + deploy + exporters + agent_notifications + cloud_notifications + authentications
integrations = collectors + deploy + exporters + agent_notifications + cloud_notifications + logs + authentications
render_integrations(categories, integrations)
clean_integrations = clean_collectors + clean_deploy + clean_exporters + clean_agent_notifications + clean_cloud_notifications + clean_authentications
clean_integrations = clean_collectors + clean_deploy + clean_exporters + clean_agent_notifications + clean_cloud_notifications + clean_logs + clean_authentications
render_json(categories, clean_integrations)

View file

@ -0,0 +1,38 @@
# yamllint disable rule:line-length
---
- id: "logs-systemd-journal"
meta:
name: "Systemd Journal Logs"
link: "https://github.com/netdata/netdata/blob/master/src/collectors/systemd-journal.plugin/README.md"
categories:
- logs
icon_filename: "netdata.png"
keywords:
- systemd
- journal
- logs
overview:
description: |
The `systemd` journal plugin by Netdata makes viewing, exploring and analyzing `systemd` journal logs simple and efficient.
It automatically discovers available journal sources, allows advanced filtering, offers interactive visual representations and supports exploring the logs of both individual servers and the logs on infrastructure wide journal centralization servers.
The plugin automatically detects the available journal sources, based on the journal files available in `/var/log/journal` (persistent logs) and `/run/log/journal` (volatile logs).
visualization:
description: |
You can start exploring `systemd` journal logs on the "Logs" tab of the Netdata UI.
key_features:
description: |
- Works on both **individual servers** and **journal centralization servers**.
- Supports `persistent` and `volatile` journals.
- Supports `system`, `user`, `namespaces` and `remote` journals.
- Allows filtering on **any journal field** or **field value**, for any time-frame.
- Allows **full text search** (`grep`) on all journal fields, for any time-frame.
- Provides a **histogram** for log entries over time, with a break down per field-value, for any field and any time-frame.
- Works directly on journal files, without any other third-party components.
- Supports coloring log entries, the same way `journalctl` does.
- In PLAY mode provides the same experience as `journalctl -f`, showing new log entries immediately after they are received.
setup:
prerequisites:
description: |
- A Netdata Cloud account

View file

@ -0,0 +1,97 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Netdata Logs integrations metadata.",
"oneOf": [
{
"$ref": "#/$defs/entry"
},
{
"type": "array",
"minLength": 1,
"items": {
"$ref": "#/$defs/entry"
}
}
],
"$defs": {
"entry": {
"type": "object",
"description": "Data for a single logs integration.",
"properties": {
"id": {
"$ref": "./shared.json#/$defs/id"
},
"meta": {
"$ref": "./shared.json#/$defs/instance"
},
"keywords": {
"$ref": "./shared.json#/$defs/keywords"
},
"overview": {
"type": "object",
"properties": {
"description": {
"type": "string",
"description": "General description of what the integration does."
},
"visualization": {
"type": "object",
"properties": {
"description": {
"type": "string",
"description": "How the user can access the data provided by the integration"
}
},
"required": [
"description"
]
},
"key_features": {
"type": "object",
"properties": {
"description": {
"type": "string",
"description": "The key features of the integration."
}
},
"required": [
"description"
]
}
},
"required": [
"description",
"visualization",
"key_features"
]
},
"setup": {
"type": "object",
"properties": {
"prerequisites": {
"type": "object",
"properties": {
"description": {
"type": "string",
"description": "Prerequisites of getting the integration working. For Log Functions only a Netdata account should be needed."
}
},
"required": [
"description"
]
},
"required": [
"prerequisites"
]
}
}
},
"required": [
"id",
"meta",
"keywords",
"overview"
]
}
}
}

View file

@ -6,4 +6,6 @@
[% include 'overview/notification.md' %]
[% elif entry.integration_type == 'authentication' %]
[% include 'overview/authentication.md' %]
[% elif entry.integration_type == 'logs' %]
[% include 'overview/logs.md' %]
[% endif %]

View file

@ -0,0 +1,11 @@
# [[ entry.meta.name ]]
[[ entry.overview.description ]]
## Visualization
[[ entry.overview.visualization.description ]]
## Key features
[[ entry.overview.key_features.description ]]

View file

@ -1,4 +1,14 @@
## Setup
[% if entry.integration_type == 'logs' %]
## Prerequisites
[[ entry.setup.prerequisites.description]]
## Configuration
There is no configuration needed for this integration.
[% else %]
[% if entry.setup.description %]
[[ entry.setup.description ]]
@ -106,3 +116,4 @@ There are no configuration examples.
[% endif %]
[% endif %]
[% endif %]