diff --git a/config/_default/menus/main.en.yaml b/config/_default/menus/main.en.yaml index 8441f33bfaf67..d5b39f2527684 100644 --- a/config/_default/menus/main.en.yaml +++ b/config/_default/menus/main.en.yaml @@ -5131,876 +5131,356 @@ menu: identifier: observability_pipelines parent: log_management_heading weight: 10000 + - name: Use Cases + url: observability_pipelines/use_cases/ + parent: observability_pipelines + identifier: observability_pipelines_use_cases + weight: 1 - name: Set Up Pipelines url: observability_pipelines/set_up_pipelines/ parent: observability_pipelines identifier: observability_pipelines_set_up_pipelines - weight: 1 - - name: Log Volume Control - url: observability_pipelines/set_up_pipelines/log_volume_control/ - parent: observability_pipelines_set_up_pipelines - identifier: observability_pipelines_log_volume_control - weight: 101 - - name: Amazon Data Firehose - url: observability_pipelines/set_up_pipelines/log_volume_control/amazon_data_firehose/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_amazon_data_firehose - weight: 1011 - - name: Amazon S3 - url: observability_pipelines/set_up_pipelines/log_volume_control/amazon_s3/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_amazon_s3 - weight: 1012 - - name: Datadog Agent - url: observability_pipelines/set_up_pipelines/log_volume_control/datadog_agent/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_datadog_agent - weight: 1013 - - name: Fluent - url: observability_pipelines/set_up_pipelines/log_volume_control/fluent/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_fluent - weight: 1014 - - name: Google Pub/Sub - url: observability_pipelines/set_up_pipelines/log_volume_control/google_pubsub/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_google_pubsub - weight: 1015 - - name: HTTP Client - url: observability_pipelines/set_up_pipelines/log_volume_control/http_client/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_http_client - weight: 1016 - - name: HTTP Server - url: observability_pipelines/set_up_pipelines/log_volume_control/http_server/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_http_server - weight: 1017 - - name: Kafka - url: observability_pipelines/set_up_pipelines/log_volume_control/kafka/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_kafka - weight: 1018 - - name: Logstash - url: observability_pipelines/set_up_pipelines/log_volume_control/logstash/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_logstash - weight: 1019 - - name: Socket (TCP or UDP) - url: observability_pipelines/set_up_pipelines/log_volume_control/socket/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_socket - weight: 1020 - - name: Splunk HTTP Event Collector - url: observability_pipelines/set_up_pipelines/log_volume_control/splunk_hec/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_splunk_hec - weight: 1021 - - name: Splunk Forwarders (TCP) - url: observability_pipelines/set_up_pipelines/log_volume_control/splunk_tcp/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_splunk_tcp - weight: 1022 - - name: Sumo Logic Hosted Collector - url: observability_pipelines/set_up_pipelines/log_volume_control/sumo_logic_hosted_collector/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_sumo_logic_hosted_collector - weight: 1023 - - name: Syslog - url: observability_pipelines/set_up_pipelines/log_volume_control/syslog/ - parent: observability_pipelines_log_volume_control - identifier: observability_pipelines_log_volume_control_syslog - weight: 1024 - - name: Dual Ship Logs - url: observability_pipelines/set_up_pipelines/dual_ship_logs/ - parent: observability_pipelines_set_up_pipelines - identifier: observability_pipelines_dual_ship_logs - weight: 102 - - name: Amazon Data Firehose - url: observability_pipelines/set_up_pipelines/dual_ship_logs/amazon_data_firehose/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_amazon_data_firehose - weight: 2011 - - name: Amazon S3 - url: observability_pipelines/set_up_pipelines/dual_ship_logs/amazon_s3/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_amazon_s3 - weight: 2012 - - name: Datadog Agent - url: observability_pipelines/set_up_pipelines/dual_ship_logs/datadog_agent/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_datadog_agent - weight: 2013 - - name: Fluent - url: observability_pipelines/set_up_pipelines/dual_ship_logs/fluent/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_fluent - weight: 2014 - - name: Google Pub/Sub - url: observability_pipelines/set_up_pipelines/dual_ship_logs/google_pubsub/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_google_pubsub - weight: 2015 - - name: HTTP Client - url: observability_pipelines/set_up_pipelines/dual_ship_logs/http_client/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_http_client - weight: 2016 - - name: HTTP Server - url: observability_pipelines/set_up_pipelines/dual_ship_logs/http_server/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_http_server - weight: 2017 - - name: Kafka - url: observability_pipelines/set_up_pipelines/dual_ship_logs/kafka/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_kafka - weight: 2018 - - name: Logstash - url: observability_pipelines/set_up_pipelines/dual_ship_logs/logstash/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_logstash - weight: 2019 - - name: Socket (TCP or UDP) - url: observability_pipelines/set_up_pipelines/dual_ship_logs/socket/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_socket - weight: 2020 - - name: Splunk HTTP Event Collector - url: observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_hec/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_splunk_hec - weight: 2021 - - name: Splunk Forwarders (TCP) - url: observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_tcp/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_splunk_tcp - weight: 2022 - - name: Sumo Logic Hosted Collector - url: observability_pipelines/set_up_pipelines/dual_ship_logs/sumo_logic_hosted_collector/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_sumo_logic_hosted_collector - weight: 2023 - - name: Syslog - url: observability_pipelines/set_up_pipelines/dual_ship_logs/syslog/ - parent: observability_pipelines_dual_ship_logs - identifier: observability_pipelines_dual_ship_logs_syslog - weight: 2024 - - name: Archive Logs - url: observability_pipelines/set_up_pipelines/archive_logs/ - parent: observability_pipelines_set_up_pipelines - identifier: observability_pipelines_archive_logs - weight: 103 - - name: Amazon Data Firehose - url: observability_pipelines/set_up_pipelines/archive_logs/amazon_data_firehose/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_amazon_data_firehose - weight: 3011 - - name: Amazon S3 - url: observability_pipelines/set_up_pipelines/archive_logs/amazon_s3/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_amazon_s3 - weight: 3012 - - name: Datadog Agent - url: observability_pipelines/set_up_pipelines/archive_logs/datadog_agent/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_datadog_agent - weight: 3013 - - name: Fluent - url: observability_pipelines/set_up_pipelines/archive_logs/fluent/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_fluent - weight: 3014 - - name: Google Pub/Sub - url: observability_pipelines/set_up_pipelines/archive_logs/google_pubsub/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_google_pub_sub - weight: 3015 - - name: HTTP Client - url: observability_pipelines/set_up_pipelines/archive_logs/http_client/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_http_client - weight: 3016 - - name: HTTP Server - url: observability_pipelines/set_up_pipelines/archive_logs/http_server/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_http_server - weight: 3017 - - name: Kafka - url: observability_pipelines/set_up_pipelines/archive_logs/kafka/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_kafka - weight: 3018 - - name: Logstash - url: observability_pipelines/set_up_pipelines/archive_logs/logstash/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_logstash - weight: 3019 - - name: Socket (TCP or UDP) - url: observability_pipelines/set_up_pipelines/archive_logs/socket/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_socket - weight: 3020 - - name: Splunk HTTP Event Collector - url: observability_pipelines/set_up_pipelines/archive_logs/splunk_hec/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_splunk_hec - weight: 3021 - - name: Splunk Forwarders (TCP) - url: observability_pipelines/set_up_pipelines/archive_logs/splunk_tcp/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_splunk_tcp - weight: 3022 - - name: Sumo Logic Hosted Collector - url: observability_pipelines/set_up_pipelines/archive_logs/sumo_logic_hosted_collector/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_sumo_logic_hosted_collector - weight: 3023 - - name: Syslog - url: observability_pipelines/set_up_pipelines/archive_logs/syslog/ - parent: observability_pipelines_archive_logs - identifier: observability_pipelines_archive_logs_syslog - weight: 3024 - - name: Split Logs - url: observability_pipelines/set_up_pipelines/split_logs/ - parent: observability_pipelines_set_up_pipelines - identifier: observability_pipelines_split_logs - weight: 104 - - name: Amazon Data Firehose - url: observability_pipelines/set_up_pipelines/split_logs/amazon_data_firehose/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_amazon_data_firehose - weight: 4011 - - name: Amazon S3 - url: observability_pipelines/set_up_pipelines/split_logs/amazon_s3/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_amazon_s3 - weight: 4012 - - name: Datadog Agent - url: observability_pipelines/set_up_pipelines/split_logs/datadog_agent/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_datadog_agent - weight: 4013 - - name: Fluent - url: observability_pipelines/set_up_pipelines/split_logs/fluent/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_fluent - weight: 4014 - - name: Google Pub/Sub - url: observability_pipelines/set_up_pipelines/split_logs/google_pubsub/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_google_pubsub - weight: 4015 - - name: HTTP Client - url: observability_pipelines/set_up_pipelines/split_logs/http_client/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_http_client - weight: 4016 - - name: HTTP Server - url: observability_pipelines/set_up_pipelines/split_logs/http_server/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_http_server - weight: 4017 - - name: Kafka - url: observability_pipelines/set_up_pipelines/split_logs/kafka/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_kafka - weight: 4018 - - name: Logstash - url: observability_pipelines/set_up_pipelines/split_logs/logstash/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_logstash - weight: 4019 - - name: Socket (TCP or UDP) - url: observability_pipelines/set_up_pipelines/split_logs/socket/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_socket - weight: 4020 - - name: Splunk HTTP Event Collector - url: observability_pipelines/set_up_pipelines/split_logs/splunk_hec/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_splunk_hec - weight: 4021 - - name: Splunk Forwarders (TCP) - url: observability_pipelines/set_up_pipelines/split_logs/splunk_tcp/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_splunk_tcp - weight: 4022 - - name: Sumo Logic Hosted Collector - url: observability_pipelines/set_up_pipelines/split_logs/sumo_logic_hosted_collector/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_sumo_logic_hosted_collector - weight: 4023 - - name: Syslog - url: observability_pipelines/set_up_pipelines/split_logs/syslog/ - parent: observability_pipelines_split_logs - identifier: observability_pipelines_split_logs_syslog - weight: 4024 - - name: Sensitive Data Redaction - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/ - parent: observability_pipelines_set_up_pipelines - identifier: observability_pipelines_sensitive_data_redaction - weight: 105 - - name: Amazon Data Firehose - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/amazon_data_firehose/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_amazon_data_firehose - weight: 5011 - - name: Amazon S3 - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/amazon_s3/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_amazon_s3 - weight: 5012 - - name: Datadog Agent - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/datadog_agent/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_datadog_agent - weight: 5013 - - name: Fluent - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/fluent/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_fluent - weight: 5014 - - name: Google Pub/Sub - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/google_pubsub/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_google_pubsub - weight: 5015 - - name: HTTP Client - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_client/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_http_client - weight: 5016 - - name: HTTP Server - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_server/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_http_server - weight: 5017 - - name: Kafka - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/kafka/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_kafka - weight: 5018 - - name: Logstash - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/logstash/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_logstash - weight: 5019 - - name: Socket (TCP or UDP) - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/socket/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_socket - weight: 5020 - - name: Splunk HTTP Event Collector - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_hec/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_splunk_hec - weight: 5021 - - name: Splunk Forwarders (TCP) - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_tcp/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_splunk_tcp - weight: 5022 - - name: Sumo Logic Hosted Collector - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/sumo_logic_hosted_collector/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_sumo_logic_hosted_collector - weight: 5023 - - name: Syslog - url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/syslog/ - parent: observability_pipelines_sensitive_data_redaction - identifier: observability_pipelines_sensitive_data_redaction_syslog - weight: 5024 - - name: Log Enrichment - url: observability_pipelines/set_up_pipelines/log_enrichment/ - parent: observability_pipelines_set_up_pipelines - identifier: observability_pipelines_log_enrichment - weight: 106 - - name: Amazon Data Firehose - url: observability_pipelines/set_up_pipelines/log_enrichment/amazon_data_firehose/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_amazon_data_firehose - weight: 6011 - - name: Amazon S3 - url: observability_pipelines/set_up_pipelines/log_enrichment/amazon_s3/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_amazon_s3 - weight: 6012 - - name: Datadog Agent - url: observability_pipelines/set_up_pipelines/log_enrichment/datadog_agent/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_datadog_agent - weight: 6013 - - name: Fluent - url: observability_pipelines/set_up_pipelines/log_enrichment/fluent/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_fluent - weight: 6014 - - name: Google Pub/Sub - url: observability_pipelines/set_up_pipelines/log_enrichment/google_pubsub/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_google_pubsub - weight: 6015 - - name: HTTP Client - url: observability_pipelines/set_up_pipelines/log_enrichment/http_client/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_http_client - weight: 6016 - - name: HTTP Server - url: observability_pipelines/set_up_pipelines/log_enrichment/http_server/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_http_server - weight: 6017 - - name: Kafka - url: observability_pipelines/set_up_pipelines/log_enrichment/kafka/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_kafka - weight: 6018 - - name: Logstash - url: observability_pipelines/set_up_pipelines/log_enrichment/logstash/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_logstash - weight: 6019 - - name: Socket (TCP or UDP) - url: observability_pipelines/set_up_pipelines/log_enrichment/socket/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_socket - weight: 6020 - - name: Splunk HTTP Event Collector - url: observability_pipelines/set_up_pipelines/log_enrichment/splunk_hec/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_splunk_hec - weight: 6021 - - name: Splunk Forwarders (TCP) - url: observability_pipelines/set_up_pipelines/log_enrichment/splunk_tcp/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_splunk_tcp - weight: 6022 - - name: Sumo Logic Hosted Collector - url: observability_pipelines/set_up_pipelines/log_enrichment/sumo_logic_hosted_collector/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_sumo_logic_hosted_collector - weight: 6023 - - name: Syslog - url: observability_pipelines/set_up_pipelines/log_enrichment/syslog/ - parent: observability_pipelines_log_enrichment - identifier: observability_pipelines_log_enrichment_syslog - weight: 6024 - - name: Generate Metrics - identifier: observability_pipelines_generate_metrics - url: /observability_pipelines/set_up_pipelines/generate_metrics/ - parent: observability_pipelines_set_up_pipelines - weight: 107 - - name: Amazon Data Firehose - url: observability_pipelines/set_up_pipelines/generate_metrics/amazon_data_firehose/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_amazon_data_firehose - weight: 7011 - - name: Amazon S3 - url: observability_pipelines/set_up_pipelines/generate_metrics/amazon_s3/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_amazon_s3 - weight: 7012 - - name: Datadog Agent - url: observability_pipelines/set_up_pipelines/generate_metrics/datadog_agent/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_datadog_agent - weight: 7013 - - name: Fluent - url: observability_pipelines/set_up_pipelines/generate_metrics/fluent/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_fluent - weight: 7014 - - name: Google Pub/Sub - url: observability_pipelines/set_up_pipelines/generate_metrics/google_pubsub/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_google_pubsub - weight: 7015 - - name: HTTP Client - url: observability_pipelines/set_up_pipelines/generate_metrics/http_client/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_http_client - weight: 7016 - - name: HTTP Server - url: observability_pipelines/set_up_pipelines/generate_metrics/http_server/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_http_server - weight: 7017 - - name: Kafka - url: observability_pipelines/set_up_pipelines/generate_metrics/kafka/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_kafka - weight: 7018 - - name: Logstash - url: observability_pipelines/set_up_pipelines/generate_metrics/logstash/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_logstash - weight: 7019 - - name: Socket (TCP or UDP) - url: observability_pipelines/set_up_pipelines/generate_metrics/socket/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_socket - weight: 7020 - - name: Splunk HTTP Event Collector - url: observability_pipelines/set_up_pipelines/generate_metrics/splunk_hec/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_splunk_hec - weight: 7021 - - name: Splunk Forwarders (TCP) - url: observability_pipelines/set_up_pipelines/generate_metrics/splunk_tcp/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_splunk_tcp - weight: 7022 - - name: Sumo Logic Hosted Collector - url: observability_pipelines/set_up_pipelines/generate_metrics/sumo_logic_hosted_collector/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_sumo_logic_hosted_collector - weight: 7023 - - name: Syslog - url: observability_pipelines/set_up_pipelines/generate_metrics/syslog/ - parent: observability_pipelines_generate_metrics - identifier: observability_pipelines_generate_metrics_syslog - weight: 7024 + weight: 2 - name: Run Multiple Pipelines on a Host identifier: observability_run_multiple_pipelines_on_a_host url: /observability_pipelines/set_up_pipelines/run_multiple_pipelines_on_a_host/ parent: observability_pipelines_set_up_pipelines - weight: 108 + weight: 201 - name: Install the Worker url: observability_pipelines/install_the_worker/ identifier: observability_pipelines_install_the_worker parent: observability_pipelines - weight: 2 + weight: 3 - name: Commands url: observability_pipelines/install_the_worker/worker_commands/ identifier: install_the_worker_commands parent: observability_pipelines_install_the_worker - weight: 201 + weight: 301 - name: ECS Fargate url: observability_pipelines/install_the_worker/set_up_the_worker_in_ecs_fargate/ identifier: install_the_worker_ecs_fargate parent: observability_pipelines_install_the_worker - weight: 202 + weight: 302 - name: Update Existing Pipelines url: observability_pipelines/update_existing_pipelines/ parent: observability_pipelines identifier: observability_pipelines_update_existing_pipelines - weight: 3 + weight: 4 - name: Live Capture identifier: observability_pipelines/live_capture/ url: /observability_pipelines/live_capture/ parent: observability_pipelines - weight: 4 + weight: 5 - name: Advanced Configurations url: observability_pipelines/advanced_configurations/ parent: observability_pipelines identifier: observability_pipelines_advanced_configurations - weight: 5 + weight: 6 - name: Monitoring url: /observability_pipelines/monitoring/ identifier: observability_pipelines_monitoring parent: observability_pipelines - weight: 6 + weight: 7 - name: Metrics url: /observability_pipelines/monitoring/metrics/ identifier: observability_pipelines_metrics parent: observability_pipelines_monitoring - weight: 601 + weight: 701 - name: Performance identifier: observability_pipelines_performance url: observability_pipelines/performance/ parent: observability_pipelines - weight: 7 + weight: 8 - name: Sources url: observability_pipelines/sources/ parent: observability_pipelines identifier: observability_pipelines_sources - weight: 8 + weight: 9 - name: Amazon Data Firehose url: observability_pipelines/sources/amazon_data_firehose/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_amazon_data_firehose - weight: 801 + weight: 901 - name: Amazon S3 url: observability_pipelines/sources/amazon_s3/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_amazon_s3 - weight: 802 + weight: 902 - name: Datadog Agent url: observability_pipelines/sources/datadog_agent/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_datadog_agent - weight: 803 + weight: 903 - name: Fluent url: observability_pipelines/sources/fluent/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_fluent - weight: 804 + weight: 904 - name: Google Pub/Sub url: observability_pipelines/sources/google_pubsub/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_google_pubsub - weight: 805 + weight: 905 - name: HTTP Client url: observability_pipelines/sources/http_client/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_http_client - weight: 806 + weight: 906 - name: HTTP Server url: observability_pipelines/sources/http_server/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_http_server - weight: 807 + weight: 907 - name: Lambda Forwarder url: observability_pipelines/sources/lambda_forwarder/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_lambda_forwarder - weight: 808 + weight: 908 - name: Kafka url: observability_pipelines/sources/kafka/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_kafka - weight: 809 + weight: 909 - name: Logstash url: observability_pipelines/sources/logstash/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_logstash - weight: 810 + weight: 910 - name: Socket url: observability_pipelines/sources/socket/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_socket - weight: 811 + weight: 911 - name: Splunk HEC url: observability_pipelines/sources/splunk_hec/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_splunk_hec - weight: 812 + weight: 912 - name: Splunk TCP url: observability_pipelines/sources/splunk_tcp/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_splunk_tcp - weight: 813 + weight: 913 - name: Sumo Logic Hosted Collector url: observability_pipelines/sources/sumo_logic/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_sumo_logic - weight: 814 + weight: 914 - name: Syslog url: observability_pipelines/sources/syslog/ parent: observability_pipelines_sources identifier: observability_pipelines_sources_syslog - weight: 815 + weight: 915 - name: Processors url: observability_pipelines/processors/ parent: observability_pipelines identifier: observability_pipelines_processors - weight: 9 + weight: 10 - name: Add Environment Variables url: observability_pipelines/processors/add_environment_variables parent: observability_pipelines_processors identifier: observability_pipelines_processors_add_environment_variables - weight: 901 + weight: 1001 - name: Add hostname url: observability_pipelines/processors/add_hostname parent: observability_pipelines_processors identifier: observability_pipelines_processors_add_hostname - weight: 902 + weight: 1002 - name: Custom Processor url: observability_pipelines/processors/custom_processor parent: observability_pipelines_processors identifier: observability_pipelines_processors_custom_processor - weight: 903 + weight: 1003 - name: Deduplicate url: observability_pipelines/processors/dedupe parent: observability_pipelines_processors identifier: observability_pipelines_processors_dedupe - weight: 904 + weight: 1004 - name: Edit fields url: observability_pipelines/processors/edit_fields parent: observability_pipelines_processors identifier: observability_pipelines_processors_edit_fields - weight: 905 + weight: 1005 - name: Enrichment Table url: observability_pipelines/processors/enrichment_table parent: observability_pipelines_processors identifier: observability_pipelines_processors_enrichment_table - weight: 906 + weight: 1006 - name: Filter url: observability_pipelines/processors/filter parent: observability_pipelines_processors identifier: observability_pipelines_processors_filter - weight: 907 + weight: 1007 - name: Generate Metrics url: observability_pipelines/processors/generate_metrics parent: observability_pipelines_processors identifier: observability_pipelines_processors_generate_metrics - weight: 908 + weight: 1008 - name: Grok Parser url: observability_pipelines/processors/grok_parser parent: observability_pipelines_processors identifier: observability_pipelines_processors_grok_parser - weight: 909 + weight: 1009 - name: Parse JSON url: observability_pipelines/processors/parse_json parent: observability_pipelines_processors identifier: observability_pipelines_processors_parse_json - weight: 910 + weight: 1010 - name: Parse XML url: observability_pipelines/processors/parse_xml parent: observability_pipelines_processors identifier: observability_pipelines_processors_parse_xml - weight: 911 + weight: 1011 - name: Quota url: observability_pipelines/processors/quota parent: observability_pipelines_processors identifier: observability_pipelines_processors_quota - weight: 912 + weight: 1012 - name: Reduce url: observability_pipelines/processors/reduce parent: observability_pipelines_processors identifier: observability_pipelines_processors_reduce - weight: 913 + weight: 1013 - name: Remap to OCSF url: observability_pipelines/processors/remap_ocsf parent: observability_pipelines_processors identifier: observability_pipelines_processors_remap_ocsf - weight: 914 + weight: 1014 - name: Sample url: observability_pipelines/processors/sample parent: observability_pipelines_processors identifier: observability_pipelines_processors_sample - weight: 915 + weight: 1015 - name: Sensitive Data Scanner url: observability_pipelines/processors/sensitive_data_scanner parent: observability_pipelines_processors identifier: observability_pipelines_processors_sensitive_data_scanner - weight: 916 + weight: 1016 - name: Split Array url: observability_pipelines/processors/split_array parent: observability_pipelines_processors identifier: observability_pipelines_processors_split_array - weight: 917 + weight: 1017 - name: Tags Processor url: observability_pipelines/processors/tags_processor parent: observability_pipelines_processors identifier: observability_pipelines_processors_tags_processor - weight: 918 + weight: 1018 - name: Throttle url: observability_pipelines/processors/throttle parent: observability_pipelines_processors identifier: observability_pipelines_processors_throttle - weight: 919 + weight: 1019 - name: Destinations url: observability_pipelines/destinations/ parent: observability_pipelines identifier: observability_pipelines_destinations - weight: 10 + weight: 11 - name: Amazon OpenSearch url: observability_pipelines/destinations/amazon_opensearch/ parent: observability_pipelines_destinations identifier: observability_pipelines_amazon_opensearch - weight: 1001 + weight: 1101 - name: Amazon S3 identifier: observability_pipelines_destinations_amazon_s3 url: observability_pipelines/destinations/amazon_s3/ parent: observability_pipelines_destinations - weight: 1002 + weight: 1102 - name: Amazon Security Lake identifier: observability_pipelines_destinations_amazon_security_lake url: observability_pipelines/destinations/amazon_security_lake/ parent: observability_pipelines_destinations - weight: 1003 + weight: 1103 - name: Azure Storage identifier: observability_pipelines_azure_storage url: observability_pipelines/destinations/azure_storage/ parent: observability_pipelines_destinations - weight: 1004 + weight: 1104 - name: CrowdStrike NG-SIEM identifier: observability_pipelines_crowdstrike_ng_siem url: observability_pipelines/destinations/crowdstrike_ng_siem/ parent: observability_pipelines_destinations - weight: 1005 + weight: 1105 - name: Datadog Logs url: observability_pipelines/destinations/datadog_logs/ parent: observability_pipelines_destinations identifier: observability_pipelines_datadog_logs - weight: 1006 + weight: 1106 - name: Elasticsearch url: observability_pipelines/destinations/elasticsearch/ parent: observability_pipelines_destinations identifier: observability_pipelines_elasticsearch - weight: 1007 + weight: 1107 - name: Google Chronicle url: observability_pipelines/destinations/google_chronicle parent: observability_pipelines_destinations identifier: observability_pipelines_google_chronicle - weight: 1008 + weight: 1108 - name: Google Cloud Storage identifier: observability_pipelines_google_cloud_storage url: /observability_pipelines/destinations/google_cloud_storage/ parent: observability_pipelines_destinations - weight: 1009 + weight: 1109 - name: Microsoft Sentinel identifier: observability_pipelines_microsoft_sentinel url: /observability_pipelines/destinations/microsoft_sentinel/ parent: observability_pipelines_destinations - weight: 1010 + weight: 1110 - name: New Relic identifier: observability_pipelines_new_relic url: /observability_pipelines/destinations/new_relic/ parent: observability_pipelines_destinations - weight: 1011 + weight: 1111 - name: OpenSearch url: observability_pipelines/destinations/opensearch parent: observability_pipelines_destinations identifier: observability_pipelines_opensearch - weight: 1012 + weight: 1112 - name: SentinelOne url: observability_pipelines/destinations/sentinelone parent: observability_pipelines_destinations identifier: observability_pipelines_sentinelone - weight: 1013 + weight: 1113 - name: Socket url: observability_pipelines/destinations/socket parent: observability_pipelines_destinations identifier: observability_pipelines_socket - weight: 1014 + weight: 1114 - name: Splunk HEC url: observability_pipelines/destinations/splunk_hec parent: observability_pipelines_destinations identifier: observability_pipelines_splunk_hec - weight: 1015 + weight: 1115 - name: Sumo Logic Hosted Collector url: observability_pipelines/destinations/sumo_logic_hosted_collector parent: observability_pipelines_destinations identifier: observability_pipelines_sumo_logic_hosted_collector - weight: 1016 + weight: 1116 - name: Syslog url: observability_pipelines/destinations/syslog parent: observability_pipelines_destinations identifier: observability_pipelines_syslog - weight: 1017 + weight: 1117 - name: Environment Variables url: observability_pipelines/environment_variables/ parent: observability_pipelines identifier: observability_pipelines_environment_variables - weight: 11 + weight: 12 - name: Best Practices for Scaling Observability Pipelines url: observability_pipelines/best_practices_for_scaling_observability_pipelines/ parent: observability_pipelines identifier: observability_pipelines_best_practices_for_scaling_observability_pipelines - weight: 12 + weight: 13 - name: Guides url: observability_pipelines/guide/ parent: observability_pipelines identifier: observability_pipelines_guide - weight: 13 + weight: 14 - name: Troubleshooting url: observability_pipelines/troubleshooting/ identifier: observability_pipelines_troubleshooting parent: observability_pipelines - weight: 14 + weight: 15 - name: Log Management url: logs/ pre: log diff --git a/content/en/observability_pipelines/_index.md b/content/en/observability_pipelines/_index.md index 18b3be0e1a454..ae4db02e6ae71 100644 --- a/content/en/observability_pipelines/_index.md +++ b/content/en/observability_pipelines/_index.md @@ -67,72 +67,25 @@ The Observability Pipelines Worker is the software that runs in your infrastruct The Observability Pipelines UI provides a control plane to manage your Observability Pipelines Workers. You build and edit pipelines and deploy pipeline changes to your Workers from there. You can also enable out-of-the-box monitors for your pipelines so that you can evaluate their health. -## Get started - -To set up a pipeline: - -1. Navigate to [Observability Pipelines][1]. -1. Select a template: - - [Log volume control][2] - - [Dual ship logs][3] - - [Split logs][4] - - [Archive logs to Datadog Archives][5] - - [Sensitive data redaction][6] - - [Log Enrichment][7] - - [Generate Metrics][8] -1. Select and set up your [source][9]. -1. Select and set up your [destinations][10]. -1. Set up your [processors][11]. -1. [Install the Observability Pipelines Worker][14]. -1. Enable monitors for your pipeline. - -See [Set Up Pipelines][12] for more information. - -See [Advanced Configurations][13] for bootstrapping options and for details on setting up the Worker with Kubernetes. - ## Explore Observability Pipelines ### Build pipelines with out-of-the-box templates {{< img src="observability_pipelines/templates_20241003.png" alt="The Observability Pipelines UI showing the six templates" style="width:100%;" >}} -The templates are built for the following use cases: - -#### Log Volume Control - -Raw logs are noisy, and only some logs are useful for further search and analysis during investigations. Use the Log Volume Control template to determine which logs to send to your indexed solution, such as a SIEM or log management solution. This helps you to increase the value of your indexed logs and also remain within your planned budget. - -#### Dual Ship Logs - -As your organization grows, your observability needs for different use cases, such as security, archiving, and log management, also change. This could mean having to trial different archiving, SIEM, and log management solutions. However, managing log pipelines to different solutions can be complicated. Use the Dual Ship Logs template to centrally aggregate, process, and send copies of your logs to different destinations. - -#### Archive Logs - -Use the Archive Logs template to store logs in a cloud storage solution (Amazon S3, Google Cloud Storage, or Azure Storage). The archived logs are stored in a Datadog-rehydratable format, so that they can be rehydrated in Datadog as needed. This is useful when: - -- You have a high volume of noisy logs, but might need to index them in Datadog Log Management ad hoc for an investigation. -- You are migrating to Datadog Log Management and want to have historical logs after completing the migration. -- You have a retention policy to fulfill compliance requirements but don't necessarily need to index those logs. +Build pipelines with out-of-the-box templates for the following [use cases][6]: -#### Split Logs - -When you have logs from different services and applications, you might need to send them to different downstream services for querying, analysis, and alerting. For example, you might want to send security logs to a SIEM solution and DevOps logs to Datadog. Use the Split Logs template to preprocess your logs separately for each destination before sending them downstream. - -#### Sensitive Data Redaction - -Use the Sensitive Data Redaction template to detect and redact sensitive information on premises. The Observability Pipelines sensitive data scanner processor provides 70 out-of-the-box scanning rules, but you can also create your own custom scanning rules using regular expressions. The OOTB rules recognize standard patterns such as credit card numbers, email addresses, IP addresses, API and SSH keys, and access tokens. - -#### Log Enrichment - -Your organization's different services, systems, and applications all generate logs containing layers of information and in different formats. This can make it difficult to extract the data you need when searching and analyzing the data for an investigation. Use the Log Enrichment template to standardize your logs and enrich them with information, such as data from a reference table. - -#### Generate Metrics - -Some log sources, such as firewalls and network appliances, generate a large volume of log events that contain log data that don't need to be stored. Often, you just want to see a summary of the logs and compare it to historical data. Log-based metrics are also a cost-efficient way to summarize log data from your entire ingest stream. Use the Generate Metrics template to generate a count metric of logs that match a query or a distribution metric of a numeric value contained in the logs, such as a request duration. +- [Archive Logs][7] +- [Dual Ship Logs][8] +- [Generate Metrics][9] +- [Log Enrichment][10] +- [Log Volume Control][11] +- [Sensitive Data Redaction][12] +- [Split Logs][13] ### Build pipelines in the Observability Pipelines UI -{{% observability_pipelines/use_case_images/generate_metrics %}} +{{< img src="observability_pipelines/dual_ship_pipeline.png" alt="Pipeline with one source connect to two processor groups and two destinations" style="width:100%;" >}} Build your pipelines in the Observability Pipelines UI. After you select one of the out-the-box templates, the onboarding workflow walks you through setting up your source, processors, and destinations. The installation page provides instructions on how to install the Worker in your environment (Docker, Kubernetes, Linux, or CloudFormation). @@ -144,21 +97,28 @@ After you create your pipeline, enable out-of-the box monitors to get alerted wh - The Observability Pipelines Worker has high CPU usage or memory usage. - There are spikes in data dropped by a component. +## Get started + +You must enable [Remote Configuration][1] to use Observability Pipelines. + +See [Set Up Pipelines][2] to set up a pipeline in the UI. You can also set up pipelines using the [Observability Pipelines API][3] or [Terraform][4]. + +See [Advanced Configurations][5] for bootstrapping options and for details on setting up the Worker with Kubernetes. + ## Further Reading {{< partial name="whats-next/whats-next.html" >}} -[1]: https://app.datadoghq.com/observability-pipelines -[2]: /observability_pipelines/log_volume_control/ -[3]: /observability_pipelines/dual_ship_logs/ -[4]: /observability_pipelines/split_logs/ -[5]: /observability_pipelines/archive_logs/ -[6]: /observability_pipelines/sensitive_data_redaction/ -[7]: /observability_pipelines/log_enrichment/ -[8]: /observability_pipelines/set_up_pipelines/generate_metrics/ -[9]: /observability_pipelines/sources/ -[10]: /observability_pipelines/destinations/ -[11]: /observability_pipelines/processors/ -[12]: /observability_pipelines/set_up_pipelines/ -[13]: /observability_pipelines/advanced_configurations/ -[14]: /observability_pipelines/install_the_worker/ \ No newline at end of file +[1]: /agent/remote_config/#setup +[2]: /observability_pipelines/set_up_pipelines/ +[3]: /observability_pipelines/set_up_pipelines/?tab=api#set-up-a-pipeline +[4]: /observability_pipelines/set_up_pipelines/?tab=terraform#set-up-a-pipeline +[5]: /observability_pipelines/advanced_configurations/ +[6]: /observability_pipelines/use_cases/ +[7]: /observability_pipelines/use_cases/#archive-logs +[8]: /observability_pipelines/use_cases/#dual-ship-logs +[9]: /observability_pipelines/use_cases/#generate-metrics +[10]: /observability_pipelines/use_cases/#log-enrichment +[11]: /observability_pipelines/use_cases/#log-volume-control +[12]: /observability_pipelines/use_cases/#sensitive-data-redaction +[13]: /observability_pipelines/use_cases/#split-logs diff --git a/content/en/observability_pipelines/destinations/_index.md b/content/en/observability_pipelines/destinations/_index.md index a3c6ee5ad00de..3a3b4a80f633c 100644 --- a/content/en/observability_pipelines/destinations/_index.md +++ b/content/en/observability_pipelines/destinations/_index.md @@ -9,16 +9,9 @@ further_reading: ## Overview -Use the Observability Pipelines Worker to send your processed logs to different destinations. +Use the Observability Pipelines Worker to send your processed logs to different destinations. Most Observability Pipelines destinations send events in batches to the downstream integration. See [Event batching](#event-batching) for more information. Some Observability Pipelines' destinations also have fields that support template syntax so you can set those fields based on specific log fields. See [Template syntax](#template-syntax) for more information. -Select and set up your destinations when you [set up a pipeline][1]. This is step 4 in the pipeline setup process: - -1. Navigate to [Observability Pipelines][2]. -1. Select a template. -1. Select and set up your source. -1. Select and set up your destinations. -1. Set up your processors. -1. Install the Observability Pipelines Worker. +Select a destination in the left navigation menu to see more information about it. ## Template syntax diff --git a/content/en/observability_pipelines/install_the_worker/_index.md b/content/en/observability_pipelines/install_the_worker/_index.md index 172d832bcbc8a..fd37307f95799 100644 --- a/content/en/observability_pipelines/install_the_worker/_index.md +++ b/content/en/observability_pipelines/install_the_worker/_index.md @@ -12,13 +12,7 @@ further_reading: ## Overview -The Observability Pipelines Worker is software that runs in your environment to centrally aggregate, process, and route your logs. You install and configure the Worker as part of the pipeline setup process. These are the general steps if you are setting up a pipeline in the UI: - -1. Select a log [source][2]. -1. Select [destinations][3] to which you want to send your logs. -1. Select and configure [processors][4] to transform your logs. -1. [Install the Worker](#install-the-worker). -1. Deploy the pipeline. +The Observability Pipelines Worker is software that runs in your environment to centrally aggregate, process, and route your logs. **Note**: If you are using a proxy, see the `proxy` option in [Bootstrap options][1]. diff --git a/content/en/observability_pipelines/processors/_index.md b/content/en/observability_pipelines/processors/_index.md index 788ffe0d77785..b420aa91e6a08 100644 --- a/content/en/observability_pipelines/processors/_index.md +++ b/content/en/observability_pipelines/processors/_index.md @@ -11,17 +11,11 @@ further_reading:
The processors outlined in this documentation are specific to on-premises logging environments. To parse, structure, and enrich cloud-based logs, see the Log Management documentation.
-Use Observability Pipelines' processors to parse, structure, and enrich your logs. All processors are available for all templates. Set up your processors in the Observability Pipelines UI after you have selected a template, source, and destinations. This is step 5 in the pipeline setup process: +Use Observability Pipelines' processors to parse, structure, and enrich your logs. When you create a pipeline in the UI, pre-selected processors are added to your processor group based on the selected template. You can add additional processors and delete any existing ones based on your processing needs. -1. Navigate to [Observability Pipelines][1]. -1. Select a template. -1. Select and set up your source. -1. Select and set up your destinations. -1. Set up your processors. -1. Install the Observability Pipelines Worker. -1. Enable monitors for your pipeline. +Processor groups are executed from top to bottom. The order of the processors is important because logs are checked by each processor, but only logs that match the processor's filters are processed. To modify the order of the processors, use the drag handle on the top left corner of the processor you want to move. -{{% observability_pipelines/processors/intro %}} +Select a processor in the left navigation menu to see more information about it. {{% observability_pipelines/processors/filter_syntax %}} diff --git a/content/en/observability_pipelines/set_up_pipelines/_index.md b/content/en/observability_pipelines/set_up_pipelines/_index.md index cedd2b67bbb81..72fc7cdd1bfde 100644 --- a/content/en/observability_pipelines/set_up_pipelines/_index.md +++ b/content/en/observability_pipelines/set_up_pipelines/_index.md @@ -20,56 +20,75 @@ further_reading:
The pipelines and processors outlined in this documentation are specific to on-premises logging environments. To aggregate, process, and route cloud-based logs, see Log Management Pipelines.
-In Observability Pipelines, a pipeline is a sequential path with three types of components: source, processors, and destinations. The Observability Pipeline [source][1] receives logs from your log source (for example, the Datadog Agent). The [processors][2] enrich and transform your data, and the [destination][3] is where your processed logs are sent. For some templates, your logs are sent to more than one destination. For example, if you use the Archive Logs template, your logs are sent to a cloud storage provider and another specified destination. +In Observability Pipelines, a pipeline is a sequential path with three types of components: source, processors, and destinations. The Observability Pipeline [source][1] receives logs from your log source (for example, the Datadog Agent). The [processors][2] enrich and transform your data, and the [destination][3] is where your processed logs are sent. + +{{< img src="observability_pipelines/archive_log_pipeline.png" alt="Pipeline with one source connect to two processor groups and two destinations" style="width:100%;" >}} ## Set up a pipeline {{< tabs >}} {{% tab "Pipeline UI" %}} -Set up your pipelines and its [sources][1], [processors][2], and [destinations][3] in the Observability Pipelines UI. The general setup steps are: +Set up your pipelines and its sources, processors, and destinations in the Observability Pipelines UI. 1. Navigate to [Observability Pipelines][13]. 1. Select a template. -1. Select and set up your source. -1. Select and set up your destinations. -1. Set up your processors. -1. [Install the Observability Pipelines Worker][12]. -1. Enable monitors for your pipeline. - -For detailed setup instructions, select a template-specific documentation and then select your source from that page: - - [Log volume control][4] - - [Dual ship logs][5] - - [Split logs][6] - - [Archive logs to Datadog Archives][7] - - [Sensitive data redaction][8] - - [Log Enrichment][9] - - [Generate Metrics][10] + - [Archive Logs][4] + - [Dual Ship Logs][5] + - [Generate Metrics][6] + - [Log Enrichment][7] + - [Log Volume Control][8] + - [Sensitive Data Redaction][9] + - [Split Logs][10] +1. Select and set up your [source][1]. +1. Select and set up your [destinations][2]. +1. Set up your [processors][3]. +1. If you want to add another set of processors and destinations, click the plus sign (**+**) to the left of the processor group to add another set of processors and destinations to the source. + - To delete a processor group, you need to delete all destinations linked to that processor group. When the last destination is deleted, the processor group is removed with it. +1. If you want to add an additional destination to a processor group, click the plus sign (**+**) to the right of the processor group. + - To delete a destination, click on the pencil icon to the top right of the destination, and select **Delete destination**. If you delete a destination from a processor group that has multiple destinations, only the deleted destination is removed. If you delete a destination from a processor group that only has one destination, both the destination and the processor group are removed. + - **Notes**: + - A pipeline must have at least one destination. If a processor group only has one destination, that destination cannot be deleted. + - You can add a total of three destinations for a pipeline. + - A specific destination can only be added once. For example, you cannot add multiple Splunk HEC destinations. +1. Click **Next: Install**. +1. Select the platform on which you want to install the Worker. +1. Enter the [environment variables][15] for your sources and destinations, if applicable. +1. Follow the instructions on installing the Worker for your platform. The command provided in the UI to install the Worker has the relevant environment variables populated. See [Install the Observability Pipelines Worker][12] for more information. + - **Note**: If you are using a proxy, see the `proxy` option in [Bootstrap options][16]. +1. Enable out-of-the-box monitors for your pipeline. + 1. Navigate to the [Pipelines][1] page and find your pipelines. + 1. Click **Enable monitors** in the **Monitors** column for your pipeline. + 1. Click **Start** to set up a monitor for one of the suggested use cases.
+ The new metric monitor page is configured based on the use case you selected. You can update the configuration to further customize it. See the [Metric monitor documentation][14] for more information. After you have set up your pipeline, see [Update Existing Pipelines][11] if you want to make any changes to it. [1]: /observability_pipelines/sources/ [2]: /observability_pipelines/processors/ [3]: /observability_pipelines/destinations/ -[4]: /observability_pipelines/set_up_pipelines/log_volume_control/ -[5]: /observability_pipelines/set_up_pipelines/dual_ship_logs/ -[6]: /observability_pipelines/set_up_pipelines/split_logs/ -[7]: /observability_pipelines/set_up_pipelines/archive_logs/ -[8]: /observability_pipelines/set_up_pipelines/sensitive_data_redaction/ -[9]: /observability_pipelines/set_up_pipelines/log_enrichment/ -[10]: /observability_pipelines/set_up_pipelines/generate_metrics/ +[4]: /observability_pipelines/use_cases/#archive-logs +[5]: /observability_pipelines/use_cases/#dual-ship-logs +[6]: /observability_pipelines/use_cases/#generate-metrics +[7]: /observability_pipelines/use_cases/#log-enrichment +[8]: /observability_pipelines/use_cases/#log-volume-control +[9]: /observability_pipelines/use_cases/#sensitive-data-redaction +[10]: /observability_pipelines/use_cases/#split-logs [11]: /observability_pipelines/update_existing_pipelines/ [12]: /observability_pipelines/install_the_worker/ [13]: https://app.datadoghq.com/observability-pipelines +[14]: /monitors/types/metric/ +[15]: /observability_pipelines/environment_variables/ +[16]: /observability_pipelines/advanced_configurations/#bootstrap-options {{% /tab %}} {{% tab "API" %}}
Creating pipelines using the Datadog API is in Preview. Fill out the form to request access.
-You can use Datadog API to [create a pipeline][1]. After the pipeline has been created, [install the Worker][2] to start sending logs through the pipeline. +You can use Observability Pipelines API to [create a pipeline][1]. After the pipeline has been created, [install the Worker][2] to start sending logs through the pipeline. -Pipelines created using the API are read-only in the UI. Use the [update a pipeline][3] endpoint to make any changes to an existing pipeline. +**Note**: Pipelines created using the API are read-only in the UI. Use the [update a pipeline][3] endpoint to make any changes to an existing pipeline. [1]: /api/latest/observability-pipelines/#create-a-new-pipeline [2]: /observability_pipelines/install_the_worker/ diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/_index.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/_index.md index d9e39fb69dccc..24430a5fc8e6b 100644 --- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/_index.md +++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/_index.md @@ -3,6 +3,10 @@ title: Archive Logs to Datadog Archives disable_toc: false aliases: - /observability_pipelines/archive_logs/ +further_reading: +- link: "/observability_pipelines/set_up_pipelines/" + tag: "Documentation" + text: "Set up a pipeline" --- ## Overview @@ -15,22 +19,9 @@ Use Observability Pipelines to route ingested logs to a cloud storage solution ( {{% observability_pipelines/use_case_images/archive_logs %}} -Select a source to get started: +## Further reading -- [Amazon Data Firehose][12] -- [Amazon S3][11] -- [Datadog Agent][1] -- [Fluentd or Fluent Bit][2] -- [Google Pub/Sub][3] -- [HTTP Client][4] -- [HTTP Server][5] -- [Kafka][13] -- [Logstash][6] -- [Splunk HTTP Event Collector (HEC)][7] -- [Splunk Heavy or Universal Forwarders (TCP)][8] -- [Socket (TCP or UDP)][14] -- [Sumo Logic Hosted Collector][9] -- [rsylsog or syslog-ng][10] +{{< partial name="whats-next/whats-next.html" >}} [1]: /observability_pipelines/archive_logs/datadog_agent [2]: /observability_pipelines/archive_logs/fluent diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/_index.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/_index.md index 3ef7b3c1d4b57..710b3ee16e7dc 100644 --- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/_index.md +++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/_index.md @@ -3,6 +3,10 @@ title: Dual Ship Logs disable_toc: false aliases: - /observability_pipelines/dual_ship_logs/ +further_reading: +- link: "/observability_pipelines/set_up_pipelines/" + tag: "Documentation" + text: "Set up a pipeline" --- ## Overview @@ -11,22 +15,9 @@ As your infrastructure and your organization scales, so does your log volume, th {{% observability_pipelines/use_case_images/dual_ship_logs %}} -Select a source to get started: +## Further reading -- [Amazon Data Firehose][12] -- [Amazon S3][11] -- [Datadog Agent][1] -- [Fluentd or Fluent Bit][2] -- [Google Pub/Sub][3] -- [HTTP Client][4] -- [HTTP Server][5] -- [Kafka][13] -- [Logstash][6] -- [Splunk HTTP Event Collector (HEC)][7] -- [Splunk Heavy or Universal Forwarders (TCP)][8] -- [Socket (TCP or UDP)][14] -- [Sumo Logic Hosted Collector][9] -- [rsyslog or syslog-ng][10] +{{< partial name="whats-next/whats-next.html" >}} [1]: /observability_pipelines/dual_ship_logs/datadog_agent [2]: /observability_pipelines/dual_ship_logs/fluent diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/_index.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/_index.md index 9619bad5af6a6..85c04758a9cbb 100644 --- a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/_index.md +++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/_index.md @@ -1,6 +1,10 @@ --- title: Generate Metrics disable_toc: false +further_reading: +- link: "/observability_pipelines/set_up_pipelines/" + tag: "Documentation" + text: "Set up a pipeline" --- ## Overview @@ -15,22 +19,9 @@ Some log sources, such as firewalls and network appliances, generate a large vol {{% observability_pipelines/use_case_images/generate_metrics %}} -Select a source to get started: - -- [Amazon Data Firehose][12] -- [Amazon S3][11] -- [Datadog Agent][1] -- [Fluentd or Fluent Bit][2] -- [Google Pub/Sub][3] -- [HTTP Client][4] -- [HTTP Server][5] -- [Kafka][13] -- [Logstash][6] -- [Socket][14] -- [Splunk HTTP Event Collector (HEC)][7] -- [Splunk Heavy or Universal Forwarders (TCP)][8] -- [Sumo Logic Hosted Collector][9] -- [rsyslog or syslog-ng][10] +## Further reading + +{{< partial name="whats-next/whats-next.html" >}} [1]: /observability_pipelines/set_up_pipelines/generate_metrics/datadog_agent [2]: /observability_pipelines/set_up_pipelines/generate_metrics/fluent diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/_index.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/_index.md index 7ae81b1a011cf..99911f0ef5c56 100644 --- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/_index.md +++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/_index.md @@ -3,6 +3,10 @@ title: Log Enrichment disable_toc: false aliases: - /observability_pipelines/log_enrichment/ +further_reading: +- link: "/observability_pipelines/set_up_pipelines/" + tag: "Documentation" + text: "Set up a pipeline" --- ## Overview @@ -16,22 +20,9 @@ aliases: {{% observability_pipelines/use_case_images/log_enrichment %}} -Select a source to get started: +## Further reading -- [Amazon Data Firehose][12] -- [Amazon S3][11] -- [Datadog Agent][1] -- [Fluentd or Fluent Bit][2] -- [Google Pub/Sub][3] -- [HTTP Client][4] -- [HTTP Server][5] -- [Kafka][13] -- [Logstash][6] -- [Splunk HTTP Event Collector (HEC)][7] -- [Splunk Heavy or Universal Forwarders (TCP)][8] -- [Socket (TCP or UDP)][14] -- [Sumo Logic Hosted Collector][9] -- [rsyslog or syslog-ng][10] +{{< partial name="whats-next/whats-next.html" >}} [1]: /observability_pipelines/log_enrichment/datadog_agent [2]: /observability_pipelines/log_enrichment/fluent diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/_index.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/_index.md index 5ea30f986fd25..6e5f6b02925be 100644 --- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/_index.md +++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/_index.md @@ -7,6 +7,9 @@ further_reading: - link: "/observability_pipelines/guide/strategies_for_reducing_log_volume/" tag: "documentation" text: "Strategies for Reducing Log Volume" +- link: "/observability_pipelines/set_up_pipelines/" + tag: "Documentation" + text: "Set up a pipeline" --- ## Overview @@ -21,22 +24,9 @@ As your infrastructure and applications grow, so does your log volume and the co {{% observability_pipelines/use_case_images/log_volume_control %}} -Select a log source to get started: - -- [Amazon Data Firehose][12] -- [Amazon S3][11] -- [Datadog Agent][1] -- [Fluentd or Fluent Bit][2] -- [Google Pub/Sub][3] -- [HTTP Client][4] -- [HTTP Server][5] -- [Kafka][13] -- [Logstash][6] -- [Splunk HTTP Event Collector (HEC)][7] -- [Splunk Heavy or Universal Forwarders (TCP)][8] -- [Socket (TCP or UDP)][14] -- [Sumo Logic Hosted Collector][9] -- [rsyslog or syslog-ng][10] +## Further reading + +{{< partial name="whats-next/whats-next.html" >}} ## Further reading diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/_index.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/_index.md index 0a7836bba85ad..54ffe777490a4 100644 --- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/_index.md +++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/_index.md @@ -3,6 +3,10 @@ title: Sensitive Data Redaction disable_toc: false aliases: - /observability_pipelines/sensitive_data_redaction/ +further_reading: +- link: "/observability_pipelines/set_up_pipelines/" + tag: "Documentation" + text: "Set up a pipeline" --- ## Overview @@ -13,22 +17,9 @@ Use the Observability Pipelines Worker to identify, tag, and optionally redact o {{% observability_pipelines/use_case_images/sensitive_data_redaction %}} -Select a log source to get started: +## Further reading -- [Amazon Data Firehose][12] -- [Amazon S3][11] -- [Datadog Agent][1] -- [Fluentd or Fluent Bit][2] -- [Google Pub/Sub][3] -- [HTTP Client][4] -- [HTTP Server][5] -- [Kafka][13] -- [Logstash][6] -- [Splunk HTTP Event Collector (HEC)][7] -- [Splunk Heavy or Universal Forwarders (TCP)][8] -- [Socket (TCP or UDP)][14] -- [Sumo Logic Hosted Collector][9] -- [rsyslog or syslog-ng][10] +{{< partial name="whats-next/whats-next.html" >}} [1]: /observability_pipelines/sensitive_data_redaction/datadog_agent [2]: /observability_pipelines/sensitive_data_redaction/fluent diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/_index.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/_index.md index 4ab0c18367ba3..2ea08160b74b5 100644 --- a/content/en/observability_pipelines/set_up_pipelines/split_logs/_index.md +++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/_index.md @@ -3,6 +3,10 @@ title: Split Logs disable_toc: false aliases: - /observability_pipelines/split_logs/ +further_reading: +- link: "/observability_pipelines/set_up_pipelines/" + tag: "Documentation" + text: "Set up a pipeline" --- ## Overview @@ -11,22 +15,9 @@ Often, organizations need to send their logs to multiple products for different {{% observability_pipelines/use_case_images/split_logs %}} -Select your log source to get started: +## Further reading -- [Amazon Data Firehose][12] -- [Amazon S3][11] -- [Datadog Agent][1] -- [Fluentd or Fluent Bit][2] -- [Google Pub/Sub][3] -- [HTTP Client][4] -- [HTTP Server][5] -- [Kafka][13] -- [Logstash][6] -- [Splunk HTTP Event Collector (HEC)][7] -- [Splunk Heavy or Universal Forwarders (TCP)][8] -- [Socket (TCP or UDP)][14] -- [Sumo Logic Hosted Collector][9] -- [rsyslog or syslog-ng][10] +{{< partial name="whats-next/whats-next.html" >}} [1]: /observability_pipelines/split_logs/datadog_agent [2]: /observability_pipelines/split_logs/fluent diff --git a/content/en/observability_pipelines/sources/_index.md b/content/en/observability_pipelines/sources/_index.md index 34e2721ea3b8d..b697853f78397 100644 --- a/content/en/observability_pipelines/sources/_index.md +++ b/content/en/observability_pipelines/sources/_index.md @@ -15,18 +15,9 @@ further_reading: ## Overview -Use Observability Pipelines' sources to receive logs from your different log sources. +Use Observability Pipelines' sources to receive logs from your different log sources. Sources have different prerequisites and settings. Some sources also need to be configured to send logs to the Observability Pipelines Worker. -Select and set up your source when you build a pipeline in the UI. This is step 3 in the pipeline setup process: - -1. Navigate to [Observability Pipelines][1]. -1. Select a template. -1. Select and set up your source. -1. Select and set up your destinations. -1. Set up your processors. -1. Install the Observability Pipelines Worker. - -Sources have different prerequisites and settings. Some sources also need to be configured to send logs to the Observability Pipelines Worker. +Select a source in the left navigation menu to see more information about it. ## Standard metadata fields diff --git a/content/en/observability_pipelines/sources/amazon_data_firehose.md b/content/en/observability_pipelines/sources/amazon_data_firehose.md index a4981120bccee..972cafc09e41e 100644 --- a/content/en/observability_pipelines/sources/amazon_data_firehose.md +++ b/content/en/observability_pipelines/sources/amazon_data_firehose.md @@ -15,6 +15,10 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/amazon_data_firehose %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/amazon_data_firehose %}} + ## Send logs to the Observability Pipelines Worker over Amazon Data Firehose {{% observability_pipelines/log_source_configuration/amazon_data_firehose %}} diff --git a/content/en/observability_pipelines/sources/amazon_s3.md b/content/en/observability_pipelines/sources/amazon_s3.md index 1d29e9422c375..ed340de050ec6 100644 --- a/content/en/observability_pipelines/sources/amazon_s3.md +++ b/content/en/observability_pipelines/sources/amazon_s3.md @@ -15,6 +15,10 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/amazon_s3 %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/amazon_s3 %}} + ## AWS Authentication {{% observability_pipelines/aws_authentication/instructions %}} diff --git a/content/en/observability_pipelines/sources/datadog_agent.md b/content/en/observability_pipelines/sources/datadog_agent.md index 0cedb49419be2..e8d597a668f68 100644 --- a/content/en/observability_pipelines/sources/datadog_agent.md +++ b/content/en/observability_pipelines/sources/datadog_agent.md @@ -11,8 +11,14 @@ Use Observability Pipelines' Datadog Agent source to receive logs from the Datad ## Set up the source in the pipeline UI +Select and set up this source when you [set up a pipeline][1]. The information below is for the source settings in the pipeline UI. + {{% observability_pipelines/source_settings/datadog_agent %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/datadog_agent %}} + ## Connect the Datadog Agent to the Observability Pipelines Worker Use the Agent configuration file or the Agent Helm chart values file to connect the Datadog Agent to the Observability Pipelines Worker. diff --git a/content/en/observability_pipelines/sources/fluent.md b/content/en/observability_pipelines/sources/fluent.md index 15ad7a6718b8e..028e73ac9f3cd 100644 --- a/content/en/observability_pipelines/sources/fluent.md +++ b/content/en/observability_pipelines/sources/fluent.md @@ -15,6 +15,10 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/fluent %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/fluent %}} + ## Send logs to the Observability Pipelines Worker over Fluent {{% observability_pipelines/log_source_configuration/fluent %}} diff --git a/content/en/observability_pipelines/sources/google_pubsub.md b/content/en/observability_pipelines/sources/google_pubsub.md index 34c263c8cf05e..f696d06028d1a 100644 --- a/content/en/observability_pipelines/sources/google_pubsub.md +++ b/content/en/observability_pipelines/sources/google_pubsub.md @@ -15,4 +15,8 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/google_pubsub %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/google_pubsub %}} + [1]: /observability_pipelines/set_up_pipelines/ \ No newline at end of file diff --git a/content/en/observability_pipelines/sources/http_client.md b/content/en/observability_pipelines/sources/http_client.md index 5bf728e271f81..39ceb16f83913 100644 --- a/content/en/observability_pipelines/sources/http_client.md +++ b/content/en/observability_pipelines/sources/http_client.md @@ -15,4 +15,8 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/http_client %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/http_client %}} + [1]: /observability_pipelines/set_up_pipelines/ diff --git a/content/en/observability_pipelines/sources/http_server.md b/content/en/observability_pipelines/sources/http_server.md index a6bc7b89a27ca..33796d7dfd015 100644 --- a/content/en/observability_pipelines/sources/http_server.md +++ b/content/en/observability_pipelines/sources/http_server.md @@ -17,6 +17,10 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/http_server %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/http_server %}} + ## Send AWS vended logs with the Datadog Lambda Forwarder to Observability Pipelines To send AWS vended logs to Observability Pipelines with the HTTP/S Server source: diff --git a/content/en/observability_pipelines/sources/kafka.md b/content/en/observability_pipelines/sources/kafka.md index cdb2d3e0de4f1..0e3905bd8a15b 100644 --- a/content/en/observability_pipelines/sources/kafka.md +++ b/content/en/observability_pipelines/sources/kafka.md @@ -15,6 +15,10 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/kafka %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/kafka %}} + ## librdkafka options These are the available librdkafka options: diff --git a/content/en/observability_pipelines/sources/logstash.md b/content/en/observability_pipelines/sources/logstash.md index c88336244ddd5..2e5cfb7c8e187 100644 --- a/content/en/observability_pipelines/sources/logstash.md +++ b/content/en/observability_pipelines/sources/logstash.md @@ -15,6 +15,10 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/logstash %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/logstash %}} + ## Send logs to the Observability Pipelines Worker over Logstash {{% observability_pipelines/log_source_configuration/logstash %}} diff --git a/content/en/observability_pipelines/sources/socket.md b/content/en/observability_pipelines/sources/socket.md index 5e87172bf5dce..04be9166f9f83 100644 --- a/content/en/observability_pipelines/sources/socket.md +++ b/content/en/observability_pipelines/sources/socket.md @@ -11,6 +11,8 @@ Use Observability Pipelines' Socket source to send logs to the Worker over a soc ## Set up the source in the pipeline UI +Select and set up this source when you [set up a pipeline][1]. The information below is for the source settings in the pipeline UI. + {{% observability_pipelines/source_settings/socket %}} [1]: /observability_pipelines/set_up_pipelines/ \ No newline at end of file diff --git a/content/en/observability_pipelines/sources/splunk_hec.md b/content/en/observability_pipelines/sources/splunk_hec.md index 5914a4233b8af..82e05a2930fa6 100644 --- a/content/en/observability_pipelines/sources/splunk_hec.md +++ b/content/en/observability_pipelines/sources/splunk_hec.md @@ -15,6 +15,10 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/splunk_hec %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/splunk_hec %}} + {{% observability_pipelines/log_source_configuration/splunk_hec %}} [1]: /observability_pipelines/set_up_pipelines/ diff --git a/content/en/observability_pipelines/sources/splunk_tcp.md b/content/en/observability_pipelines/sources/splunk_tcp.md index 3296cc677e45a..285d77bf5bdb5 100644 --- a/content/en/observability_pipelines/sources/splunk_tcp.md +++ b/content/en/observability_pipelines/sources/splunk_tcp.md @@ -15,6 +15,10 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/splunk_tcp %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/splunk_tcp %}} + {{% observability_pipelines/log_source_configuration/splunk_tcp %}} [1]: /observability_pipelines/set_up_pipelines/ diff --git a/content/en/observability_pipelines/sources/sumo_logic.md b/content/en/observability_pipelines/sources/sumo_logic.md index ea5237840b193..0eded7f4e4c09 100644 --- a/content/en/observability_pipelines/sources/sumo_logic.md +++ b/content/en/observability_pipelines/sources/sumo_logic.md @@ -15,6 +15,10 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/sumo_logic %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/sumo_logic %}} + {{% observability_pipelines/log_source_configuration/sumo_logic %}} [1]: /observability_pipelines/set_up_pipelines/ \ No newline at end of file diff --git a/content/en/observability_pipelines/sources/syslog.md b/content/en/observability_pipelines/sources/syslog.md index f2238751eacd2..d6ff629b39e87 100644 --- a/content/en/observability_pipelines/sources/syslog.md +++ b/content/en/observability_pipelines/sources/syslog.md @@ -17,6 +17,10 @@ Select and set up this source when you [set up a pipeline][1]. The information b {{% observability_pipelines/source_settings/syslog %}} +## Set the environment variables + +{{% observability_pipelines/configure_existing_pipelines/source_env_vars/syslog %}} + ## Send logs to the Observability Pipelines Worker over syslog {{% observability_pipelines/log_source_configuration/syslog %}} diff --git a/content/en/observability_pipelines/use_cases.md b/content/en/observability_pipelines/use_cases.md new file mode 100644 index 0000000000000..8cbbeded5bcba --- /dev/null +++ b/content/en/observability_pipelines/use_cases.md @@ -0,0 +1,81 @@ +--- +title: Use Cases +disable_toc: false +aliases: +- /path-to-old-doc/ +further_reading: +- link: "observability_pipelines/set_up_pipelines#set-up-a-pipeline" + tag: "Documentation" + text: "Set up pipelines" +--- + +## Overview + +When you create a pipeline in the Observability Pipelines UI, select one of the out-the-box templates to build and deploy pipelines based on your use case. + +{{< img src="observability_pipelines/templates_20241003.png" alt="The Observability Pipelines UI showing the seven templates" style="width:100%;" >}} + +## Templates + +The templates are built for the following use cases: + +### Archive Logs + +Use the Archive Logs template to store logs in a cloud storage solution (Amazon S3, Google Cloud Storage, or Azure Storage). The archived logs are stored in a Datadog-rehydratable format, so that they can be rehydrated in Datadog as needed. This is useful when: + +- You have a high volume of noisy logs, but might need to index them in Datadog Log Management ad hoc for an investigation. +- You are migrating to Datadog Log Management and want to have historical logs after completing the migration. +- You have a retention policy to fulfill compliance requirements but don't necessarily need to index those logs. + +### Dual Ship Logs + +As your organization grows, your observability needs for different use cases, such as security, archiving, and log management, also change. This could mean having to trial different archiving, SIEM, and log management solutions. However, managing log pipelines to different solutions can be complicated. Use the Dual Ship Logs template to send your logs to different destinations, so you can evaluate different tools and workflows with minimal disruption to your production environment. + +### Generate Metrics + +Some log sources, such as firewalls and network appliances, generate a large volume of log events that contain log data that don't need to be stored. Often, you just want to see a summary of the logs and compare it to historical data. Log-based metrics are also a cost-efficient way to summarize log data from your entire ingest stream. Use the Generate Metrics template to generate a count metric of logs that match a query or a distribution metric of a numeric value contained in the logs, such as a request duration. The template starts you off with the following processors: + +- **Filter**: Add a query to send only a subset of logs based on your conditions. +- **Grok Parser**: Parse your logs using grok parsing rules that are available for a set of sources or add custom parsing rules. +- **Generate metrics**: Generate metrics for your logs or a subset of them. You can generate these types of metrics for your logs. + | Metric type | Description | Example | + | ------------ | ----------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | + | COUNT | Represents the total number of event occurrences in one time interval. This value can be reset to zero, but cannot be decreased. | You want to count the number of logs with `status:error`. | + | GAUGE | Represents a snapshot of events in one time interval. | You want to measure the latest CPU utilization per host for all logs in the production environment. | + | DISTRIBUTION | Represent the global statistical distribution of a set of values calculated across your entire distributed infrastructure in one time interval. | You want to measure the average time it takes for an API call to be made. | + + +### Log Enrichment + +Your organization's different services, systems, and applications all generate logs containing layers of information and in different formats. To manage these logs, you might need to standardize their format and add information to make it easier to search and analyze them. For example, each log source has its own unique format. This can make it difficult to search and analyze during investigations if they have not been reformatted and standardized. You could also have additional information, such as customer IDs or IP addresses, that you want to add to your logs. Use the Log Enrichment Template and these Observability Pipelines processors to enrich and transform your logs: + +- **Enrichment Table**: Enrich your logs with information from a reference table, which could be a local file or a GeoIP database. +- **Grok Parser**: Parse your logs using grok parsing rules that are available for a set of sources. +- **Add hostname**: Add the name of the host that sent the log so you can use it to find the root cause of an issue. +- **Parse JSON**: Convert fields into JSON objects. + +### Log Volume Control + +Raw logs are noisy, and only some logs are useful for further search and analysis during investigations. Use the Log Volume Control template to determine which logs to send to your indexed solution, such as a SIEM or log management solution. This helps you to increase the value of your indexed logs and also remain within your planned budget. + +You can use the following processors in the Observability Pipeline Worker to manage your log volume: + +- **Filter**: Add a query to send only a subset of logs based on your conditions. +- **Sample**: Define a sampling rate to send only a subset of your logs. +- **Quota**: Enforce daily limits on either the volume of log data or the number of log events. +- **Dedupe**: Drop duplicate copies of your logs, for example, due to retries because of network issues. +- **Remap**: Add, drop, or rename a field in your logs. + +### Sensitive Data Redaction + +Sensitive data, such as credit card numbers, bank routing numbers, and API keys, can be revealed unintentionally in your logs, which can expose your organization to financial and privacy risks. + +Use the Sensitive Data Redaction template to detect and redact sensitive information on premises. The Observability Pipelines sensitive data scanner processor provides 70 out-of-the-box scanning rules, but you can also create your own custom scanning rules using regular expressions. The OOTB rules recognize standard patterns such as credit card numbers, email addresses, IP addresses, API and SSH keys, and access tokens. + +### Split Logs + +When you have logs from different services and applications, you might need to send them to different downstream services for querying, analysis, and alerting. For example, you might want to send security logs to a SIEM solution and DevOps logs to Datadog. Use the Split Logs template to preprocess your logs separately for each destination before sending them downstream. + +## Further reading + +{{< partial name="whats-next/whats-next.html" >}} \ No newline at end of file diff --git a/layouts/shortcodes/observability_pipelines/multiple_processors.md b/layouts/shortcodes/observability_pipelines/multiple_processors.md index 01a70feb9f3b3..8005df6fbfdf8 100644 --- a/layouts/shortcodes/observability_pipelines/multiple_processors.md +++ b/layouts/shortcodes/observability_pipelines/multiple_processors.md @@ -1,3 +1,3 @@ -Click the plus sign (**+**) to the left of the processors to add another set of processors and destinations to the source. See [Add additional destinations](#add-additional-destinations) on adding additional destinations to the processor group. +In the Pipeline UI, click the plus sign (**+**) to the left of the processors to add another set of processors and destinations to the source. See [Add additional destinations](#add-additional-destinations) on adding additional destinations to the processor group. To delete a processor group, you need to delete all destinations linked to that processor group. When the last destination is deleted, the processor group is removed with it. \ No newline at end of file diff --git a/static/images/observability_pipelines/archive_log_pipeline.png b/static/images/observability_pipelines/archive_log_pipeline.png new file mode 100644 index 0000000000000..0b87efb324d4d Binary files /dev/null and b/static/images/observability_pipelines/archive_log_pipeline.png differ diff --git a/static/images/observability_pipelines/dual_ship_pipeline.png b/static/images/observability_pipelines/dual_ship_pipeline.png new file mode 100644 index 0000000000000..bbe4ca5aab0ef Binary files /dev/null and b/static/images/observability_pipelines/dual_ship_pipeline.png differ