logcollector: log lines may be lost

53 views
Skip to first unread message

M V

unread,
Oct 17, 2025, 3:42:09 AM (2 days ago) Oct 17
to Wazuh | Mailing List
Hello wazuh'ers,
I've created custom decoder + rules for processing `mongodb` logs running on a vm which aren't generating alerts as it should. Error lines from agent are highlighted in log below.

Steps which work:
1. Rules corrected formulated ✅
2. Rules trigger with correct log format ✅
3. Agent config pushed correctly by server ✅
4. Agent sends other alerts to servers ✅
5. Agent's mongodb log file has appropriate content ✅

Help please.

/1/ mongodb rule (customized)
```
<!--
  -  MongoDB rules
  -  Created by Wazuh, Inc.
  -  Copyright (C) 2015, Wazuh Inc.
  -  This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2.
-->

<!-- ID: 110000 - 110014 -->

<!--
F Fatal
E Error
W Warning
I Informational, for Verbosity Level of 0
D Debug, for All Verbosity Levels > 0
-->

<!-- Modified by Maulik Vaidya
    {
        <whatever_from_mongo.log>,
        "esco.app": "mongo"
        "esco.log": "mongo.log"
        "esco.file": "/var/log/mongodb"
    }
-->
<group name="mongodb,">

    <rule id="110000" level="0">
        <decoded_as>json</decoded_as>
        <field name="esco.app">mongo</field>
        <description>MongoDB: Match only when our tag (esco.app) is present AND is 'mongo'</description>
      </rule>

    <rule id="110001" level="9">
        <if_sid>110000</if_sid>
        <field name="s">F</field>
        <description>MongoDB: Fatal message</description>
        <group>gdpr_IV_35.7.d,</group>
    </rule>

    <rule id="110002" level="5">
        <if_sid>110000</if_sid>
        <field name="s">E</field>
        <description>MongoDB: Error message</description>
    </rule>

    <rule id="110003" level="4">
        <if_sid>110000</if_sid>
        <field name="s">W</field>
        <description>MongoDB: Warning message</description>
    </rule>

    <rule id="110004" level="0">
        <if_sid>110000</if_sid>
        <field name="s">D</field>
        <description>MongoDB: Debug message</description>
    </rule>

    <rule id="110005" level="3">
        <if_sid>110000</if_sid>
        <field name="s">I</field>
        <description>MongoDB: Informational message</description>
    </rule>

    <!--
        {"t":{"$date":"2025-04-12T00:09:25.265-07:00"},"s":"I",  "c":"NETWORK",  "id":22943,   "ctx":"listener","msg":"Connection accepted","attr":{"remote":"127.0.0.1:33692","uuid":{"uuid":{"$uuid":"f2a84222-ba2a-4608-90d1-764faa008f15"}},"connectionId":1989,"connectionCount":29}}
    -->
    <rule id="110006" level="3">
        <if_sid>110005</if_sid>
        <field name="c">NETWORK</field>
        <field name="msg" ignorecase="yes">connection accepted</field>
        <description>MongoDB: Connection from $(attr.remote) accepted.</description>
    </rule>

    <!--
        {"t":{"$date":"2025-04-12T00:06:55.342-07:00"},"s":"I",  "c":"NETWORK",  "id":22944,   "ctx":"conn1975","msg":"Connection ended","attr":{"remote":"127.0.0.1    :54632","uuid":{"uuid":{"$uuid":"eb6b93a8-4129-445d-8f9f-dcd487383386"}},"connectionId":1975,"connectionCount":28}}
    -->
    <rule id="110007" level="3">
        <if_sid>110005</if_sid>
        <field name="c">NETWORK</field>
        <field name="msg" ignorecase="yes">connection ended</field>
        <description>MongoDB: Connection from $(attr.remote) closed.</description>
    </rule>

    <!--
        {"t":{"$date":"2025-04-12T00:10:55.274-07:00"},"s":"I",  "c":"ACCESS",   "id":5286306, "ctx":"conn1998","msg":"Successfully authenticated","attr":{"client":"127.0.0.1:43220","isSpeculative":true,"isClusterMember":false,"mechanism":"SCRAM-SHA-256","user":"moriksan","db":"admin","result":0,"metrics":{"conversation_duration":{"micros":8198,"summary":{"0":{"step":1,"step_total":2,"duration_micros":51},"1":{"step":2,"step_total":2,"duration_micros":25}}}},"extraInfo":{}}}
    -->
    <rule id="110008" level="3">
        <if_sid>110005</if_sid>
        <field name="c">ACCESS</field>
        <field name="msg" ignorecase="yes">Successfully authenticated</field>
        <description>MongoDB: $(attr.user) authenticated successfully to $(attr.db) database.</description>
        <mitre>
            <id>T1078</id>
        </mitre>
        <group>authentication_success,pci_dss_10.2.5,gpg13_7.1,gpg13_7.2,gdpr_IV_32.2,hipaa_164.312.b,nist_800_53_AU.14,nist_800_53_AC.7,tsc_CC6.8,tsc_CC7.2,tsc_CC7.3,</group>
    </rule>

    <!--
        {"t":{"$date":"2024-09-06T09:48:04.391-07:00"},"s":"I",  "c":"CONTROL",  "id":20698,   "ctx":"main","msg":"***** SERVER RESTARTED *****"}
    -->
    <rule id="110009" level="10">
        <if_sid>110005</if_sid>
        <field name="c">CONTROL</field>
        <field name="msg" ignorecase="yes">server restarted</field>
        <description>MongoDB: SERVER RESTARTED!.</description>
        <mitre>
            <id>T1529</id>
        </mitre>
        <group>mongodb_restarted,</group>
    </rule>
   
    <!--
        {"t":{"$date":"2025-04-12T00:15:35.560-07:00"},"s":"I",  "c":"ACCESS",   "id":5286307, "ctx":"conn2028","msg":"Failed to authenticate","attr":{"client":"192.168.0.20:58862","isSpeculative":true,"isClusterMember":false,"mechanism":"SCRAM-SHA-256","user":"first_User","db":"admin","error":"UserNotFound: Could not find user \"first_User\" for db \"admin\"","result":11,"metrics":{"conversation_duration":{"micros":192,"summary":{"0":{"step":1,"step_total":2,"duration_micros":176}}}},"extraInfo":{}},"esco.app":"mongo"}
    -->
    <rule id="110010" level="5">
        <if_sid>110005</if_sid>
        <field name="c">ACCESS</field>
        <field name="msg" ignorecase="yes">failed to authenticate</field>
        <description>MongoDB: $(attr.user) failed authentication while accessing $(attr.db) database!</description>
        <group>authentication_failed,pci_dss_10.2.4,pci_dss_10.2.5,gpg13_7.1,gdpr_IV_35.7.d,gdpr_IV_32.2,hipaa_164.312.b,nist_800_53_AU.14,nist_800_53_AC.7,tsc_CC6.1,tsc_CC6.8,tsc_CC7.2,tsc_CC7.3,</group>
    </rule>
   
    <rule id="110011" level="10" frequency="8">
      <if_matched_sid>110000</if_matched_sid>
      <same_source_ip />
      <description>MongoDB: $(attr.user) failed multiple authentications while accessing $(attr.db) database!</description>
      <mitre>
          <id>T1110</id>
      </mitre>
      <group>authentication_failures,pci_dss_10.2.4,pci_dss_10.2.5,pci_dss_11.4,gpg13_7.1,gdpr_IV_35.7.d,gdpr_IV_32.2,hipaa_164.312.b,nist_800_53_AU.14,nist_800_53_AC.7,nist_800_53_SI.4,tsc_CC6.1,tsc_CC6.8,tsc_CC7.2,tsc_CC7.3,</group>
    </rule>

    <!--
        {"t":{"$date":"2024-08-13T22:41:00.211-07:00"},"s":"I",  "c":"ACCESS",   "id":20436,   "ctx":"conn5795","msg":"Checking authorization failed","attr":{"error":{"code":13,"codeName":"Unauthorized","errmsg":"not authorized on config to execute command { find: \"collections\", filter: { _id: \"dev_liveliteandwell.user_info\", unsplittable: { $ne: true } }, projection: { key: 1, _id: 0 }, maxTimeMS: 60000, lsid: { id: UUID(\"f80de61f-a018-4cdf-b1bc-7ff0c2b5605f\") }, $db: \"config\" }"}}}
    -->
    <rule id="110012" level="7">
        <if_sid>110005</if_sid>
        <field name="c">ACCESS</field>
        <field name="codeName" ignorecase="yes">unauthorized</field>
        <description>MongoDB: $(attr.user) executed commands without the necessary privileges.</description>
        <group>pci_dss_10.2.5,gdpr_IV_35.7.d,gdpr_IV_32.2,</group>
    </rule>

    <!--
        {"t":{"$date":"2024-09-05T19:07:09.645-07:00"},"s":"I",  "c":"NETWORK",  "id":20562,   "ctx":"SignalHandler","msg":"Shutdown: going to close listening sockets"}            
    -->
    <rule id="110013" level="12">
        <if_sid>110005</if_sid>
        <field name="ctx">SignalHandler</field>
        <field name="msg" ignorecase="yes">shutdown</field>
        <description>MongoDB: SHUTTING DOWN $(c) service!</description>
        <mitre>
            <id>T1489</id>
        </mitre>
        <group>mongodb_shutdown,service_shutdown,</group>
    </rule>
   
    <!--
        {"t":{"$date":"2024-09-05T19:07:09.750-07:00"},"s":"I",  "c":"CONTROL",  "id":8423404, "ctx":"SignalHandler","msg":"mongod shutdown complete","attr":{"Summary of time elapsed":{"Statistics":{"Enter terminal shutdown":"0 ms","Step down the replication coordinator for shutdown":"0 ms","Time spent in quiesce mode":"0 ms","Shut down FLE Crud subsystem":"0 ms","Shut down MirrorMaestro":"0 ms","Shut down WaitForMajorityService":"0 ms","Shut down the logical session cache":"4 ms","Shut down the transport layer":"18 ms","Shut down the global connection pool":"0 ms","Shut down the flow control ticket holder":"0 ms","Kill all operations for shutdown":"8 ms","Shut down all tenant migration access blockers on global shutdown":"1 ms","Shut down all open transactions":"0 ms","Acquire the RSTL for shutdown":"0 ms","Shut down the IndexBuildsCoordinator and wait for index builds to finish":"0 ms","Shut down the replica set monitor":"0 ms","Shut down the migration util executor":"0 ms","Shut down the health log":"0 ms","Shut down the TTL monitor":"0 ms","Shut down expired pre-images and documents removers":"0 ms","Shut down the storage engine":"72 ms","Wait for the oplog cap maintainer thread to stop":"0 ms","Shut down full-time data capture":"0 ms","shutdownTask total elapsed time":"110 ms"}}}}
    -->  
    <rule id="110014" level="12">
        <if_sid>110005</if_sid>
        <field name="ctx">SignalHandler</field>
        <field name="msg" ignorecase="yes">mongod shutdown complete</field>
        <description>MongoDB: SHUT DOWN completed in $(shutdownTask total elapsed time)ms !!</description>
        <mitre>
            <id>T1489</id>
        </mitre>
        <group>mongodb_shutdown,service_shutdown,</group>
    </rule>  
</group>
```
/3/ agent's relevant config
```
  <agent_config profile="mongodb">
    <!-- Mongodb related. Custom labels help with decoder operation -->
    <localfile>
      <location>/var/log/mongodb/mongod.log</location>
      <log_format>json</log_format>
      <label key="esco.app">mongo</label>
      <label key="esco.logfile">mongod.log</label>
      <label key="esco.loglocation">/var/log/mongodb</label>
    </localfile>
  </agent_config>
```
local_internal_options.conf
```
wazuh_command.remote_commands=1
logcollector.remote_commands=1
sca.remote_commands=1
analysisd.decoder_order_size=1024
logcollector.debug=1
```

/2/ Decoder + Rules test output
```
../../bin/wazuh-logtest -d
2025-10-17 00:30:54,423 wazuh_logtest[INFO] Starting wazuh-logtest v4.13.1
2025-10-17 00:30:54,423 wazuh_logtest[INFO] Type one log per line

{"t":{"$date":"2025-10-17T00:07:59.205-07:00"},"s":"I",  "c":"WTCHKPT",  "id":22430,   "ctx":"Checkpointer","msg":"WiredTiger message","attr":{"message":{"ts_sec":1760684879,"ts_usec":205859,"thread":"1223319:0x7f2a58f236c0","session_name":"WT_SESSION.checkpoint","category":"WT_VERB_CHECKPOINT_PROGRESS","category_id":7,"verbose_level":"DEBUG_1","verbose_level_id":1,"msg":"saving checkpoint snapshot min: 5911514, snapshot max: 5911514 snapshot count: 0, oldest timestamp: (0, 0) , meta checkpoint timestamp: (0, 0) base write gen: 8402272"}},"esco.app": "mongo","esco.log": "mongo.log","esco.file": "/var/log/mongodb"}
2025-10-17 00:30:55,505 wazuh_logtest[INFO]
2025-10-17 00:30:55,505 wazuh_logtest[DEBUG] Request: {"version": 1, "origin": {"name": "wazuh-logtest", "module": "wazuh-logtest"}, "command": "log_processing", "parameters": {"location": "stdin", "log_format": "syslog", "event": "{\"t\":{\"$date\":\"2025-10-17T00:07:59.205-07:00\"},\"s\":\"I\",  \"c\":\"WTCHKPT\",  \"id\":22430,   \"ctx\":\"Checkpointer\",\"msg\":\"WiredTiger message\",\"attr\":{\"message\":{\"ts_sec\":1760684879,\"ts_usec\":205859,\"thread\":\"1223319:0x7f2a58f236c0\",\"session_name\":\"WT_SESSION.checkpoint\",\"category\":\"WT_VERB_CHECKPOINT_PROGRESS\",\"category_id\":7,\"verbose_level\":\"DEBUG_1\",\"verbose_level_id\":1,\"msg\":\"saving checkpoint snapshot min: 5911514, snapshot max: 5911514 snapshot count: 0, oldest timestamp: (0, 0) , meta checkpoint timestamp: (0, 0) base write gen: 8402272\"}},\"esco.app\": \"mongo\",\"esco.log\": \"mongo.log\",\"esco.file\": \"/var/log/mongodb\"}"}}

2025-10-17 00:30:55,950 wazuh_logtest[DEBUG] Reply: {"error":0,"data":{"messages":["INFO: (7202): Session initialized with token '204f8899'"],"token":"204f8899","output":{"timestamp":"2025-10-17T00:30:55.950-0700","rule":{"level":3,"description":"MongoDB: Informational message","id":"110005","firedtimes":1,"mail":false,"groups":["mongodb"]},"agent":{"id":"000","name":"scanner.esco.ghaar"},"manager":{"name":"scanner.esco.ghaar"},"id":"1760686255.169903","full_log":"{\"t\":{\"$date\":\"2025-10-17T00:07:59.205-07:00\"},\"s\":\"I\",  \"c\":\"WTCHKPT\",  \"id\":22430,   \"ctx\":\"Checkpointer\",\"msg\":\"WiredTiger message\",\"attr\":{\"message\":{\"ts_sec\":1760684879,\"ts_usec\":205859,\"thread\":\"1223319:0x7f2a58f236c0\",\"session_name\":\"WT_SESSION.checkpoint\",\"category\":\"WT_VERB_CHECKPOINT_PROGRESS\",\"category_id\":7,\"verbose_level\":\"DEBUG_1\",\"verbose_level_id\":1,\"msg\":\"saving checkpoint snapshot min: 5911514, snapshot max: 5911514 snapshot count: 0, oldest timestamp: (0, 0) , meta checkpoint timestamp: (0, 0) base write gen: 8402272\"}},\"esco.app\": \"mongo\",\"esco.log\": \"mongo.log\",\"esco.file\": \"/var/log/mongodb\"}","decoder":{"name":"json"},"data":{"id":"22430","t":{"$date":"2025-10-17T00:07:59.205-07:00"},"s":"I","c":"WTCHKPT","ctx":"Checkpointer","msg":"WiredTiger message","attr":{"message":{"ts_sec":"1760684879","ts_usec":"205859","thread":"1223319:0x7f2a58f236c0","session_name":"WT_SESSION.checkpoint","category":"WT_VERB_CHECKPOINT_PROGRESS","category_id":"7","verbose_level":"DEBUG_1","verbose_level_id":"1","msg":"saving checkpoint snapshot min: 5911514, snapshot max: 5911514 snapshot count: 0, oldest timestamp: (0, 0) , meta checkpoint timestamp: (0, 0) base write gen: 8402272"}},"esco":{"app":"mongo","log":"mongo.log","file":"/var/log/mongodb"}},"location":"stdin"},"alert":false,"codemsg":0}}

2025-10-17 00:30:55,951 wazuh_logtest[DEBUG] {
  "messages": [
    "INFO: (7202): Session initialized with token '204f8899'"
  ],
  "token": "204f8899",
  "output": {
    "timestamp": "2025-10-17T00:30:55.950-0700",
    "rule": {
      "level": 3,
      "description": "MongoDB: Informational message",
      "id": "110005",
      "firedtimes": 1,
      "mail": false,
      "groups": [
        "mongodb"
      ]
    },
    "agent": {
      "id": "000",
      "name": "scanner.esco.ghaar"
    },
    "manager": {
      "name": "scanner.esco.ghaar"
    },
    "id": "1760686255.169903",
    "full_log": "{\"t\":{\"$date\":\"2025-10-17T00:07:59.205-07:00\"},\"s\":\"I\",  \"c\":\"WTCHKPT\",  \"id\":22430,   \"ctx\":\"Checkpointer\",\"msg\":\"WiredTiger message\",\"attr\":{\"message\":{\"ts_sec\":1760684879,\"ts_usec\":205859,\"thread\":\"1223319:0x7f2a58f236c0\",\"session_name\":\"WT_SESSION.checkpoint\",\"category\":\"WT_VERB_CHECKPOINT_PROGRESS\",\"category_id\":7,\"verbose_level\":\"DEBUG_1\",\"verbose_level_id\":1,\"msg\":\"saving checkpoint snapshot min: 5911514, snapshot max: 5911514 snapshot count: 0, oldest timestamp: (0, 0) , meta checkpoint timestamp: (0, 0) base write gen: 8402272\"}},\"esco.app\": \"mongo\",\"esco.log\": \"mongo.log\",\"esco.file\": \"/var/log/mongodb\"}",
    "decoder": {
      "name": "json"
    },
    "data": {
      "id": "22430",
      "t": {
        "$date": "2025-10-17T00:07:59.205-07:00"
      },
      "s": "I",
      "c": "WTCHKPT",
      "ctx": "Checkpointer",
      "msg": "WiredTiger message",
      "attr": {
        "message": {
          "ts_sec": "1760684879",
          "ts_usec": "205859",
          "thread": "1223319:0x7f2a58f236c0",
          "session_name": "WT_SESSION.checkpoint",
          "category": "WT_VERB_CHECKPOINT_PROGRESS",
          "category_id": "7",
          "verbose_level": "DEBUG_1",
          "verbose_level_id": "1",
          "msg": "saving checkpoint snapshot min: 5911514, snapshot max: 5911514 snapshot count: 0, oldest timestamp: (0, 0) , meta checkpoint timestamp: (0, 0) base write gen: 8402272"
        }
      },
      "esco": {
        "app": "mongo",
        "log": "mongo.log",
        "file": "/var/log/mongodb"
      }
    },
    "location": "stdin"
  },
  "alert": false,
  "codemsg": 0
}
2025-10-17 00:30:55,951 wazuh_logtest[INFO] **Phase 1: Completed pre-decoding.
2025-10-17 00:30:55,951 wazuh_logtest[INFO] full event: '{"t":{"$date":"2025-10-17T00:07:59.205-07:00"},"s":"I",  "c":"WTCHKPT",  "id":22430,   "ctx":"Checkpointer","msg":"WiredTiger message","attr":{"message":{"ts_sec":1760684879,"ts_usec":205859,"thread":"1223319:0x7f2a58f236c0","session_name":"WT_SESSION.checkpoint","category":"WT_VERB_CHECKPOINT_PROGRESS","category_id":7,"verbose_level":"DEBUG_1","verbose_level_id":1,"msg":"saving checkpoint snapshot min: 5911514, snapshot max: 5911514 snapshot count: 0, oldest timestamp: (0, 0) , meta checkpoint timestamp: (0, 0) base write gen: 8402272"}},"esco.app": "mongo","esco.log": "mongo.log","esco.file": "/var/log/mongodb"}'
2025-10-17 00:30:55,951 wazuh_logtest[INFO]
2025-10-17 00:30:55,951 wazuh_logtest[INFO] **Phase 2: Completed decoding.
2025-10-17 00:30:55,951 wazuh_logtest[INFO] name: 'json'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] attr.message.category: 'WT_VERB_CHECKPOINT_PROGRESS'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] attr.message.category_id: '7'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] attr.message.msg: 'saving checkpoint snapshot min: 5911514, snapshot max: 5911514 snapshot count: 0, oldest timestamp: (0, 0) , meta checkpoint timestamp: (0, 0) base write gen: 8402272'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] attr.message.session_name: 'WT_SESSION.checkpoint'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] attr.message.thread: '1223319:0x7f2a58f236c0'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] attr.message.ts_sec: '1760684879'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] attr.message.ts_usec: '205859'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] attr.message.verbose_level: 'DEBUG_1'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] attr.message.verbose_level_id: '1'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] c: 'WTCHKPT'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] ctx: 'Checkpointer'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] esco.app: 'mongo'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] esco.file: '/var/log/mongodb'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] esco.log: 'mongo.log'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] id: '22430'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] msg: 'WiredTiger message'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] s: 'I'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] t.$date: '2025-10-17T00:07:59.205-07:00'
2025-10-17 00:30:55,951 wazuh_logtest[INFO]
2025-10-17 00:30:55,951 wazuh_logtest[INFO] **Phase 3: Completed filtering (rules).
2025-10-17 00:30:55,951 wazuh_logtest[INFO] id: '110005'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] level: '3'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] description: 'MongoDB: Informational message'
2025-10-17 00:30:55,951 wazuh_logtest[INFO] groups: '['mongodb']'
2025-10-17 00:30:55,952 wazuh_logtest[INFO] firedtimes: '1'
2025-10-17 00:30:55,952 wazuh_logtest[INFO] mail: 'False'

```

The above rule, which should be triggered, can't be found in alerts.log|json on server side. 

```
grep -E "110005" ../../logs/alerts/alerts.log
```

/4/ Server's alert log for this agent
```
tail -f ../../logs/alerts/alerts.json | grep -e "mongo"
{"timestamp":"2025-10-17T00:21:32.073-0700","rule":{"level":7,"description":"Agent event queue is 90% full.","id":"202","firedtimes":1,"mail":false,"groups":["wazuh","agent_flooding"],"pci_dss":["10.6.1"],"gdpr":["IV_35.7.d"]},"agent":{"id":"027","name":"ag_web-mongodb","ip":"192.168.100.18"},"manager":{"name":"scanner.esco.ghaar"},"id":"1760685692.163093","full_log":"wazuh: Agent buffer: '90%'.","decoder":{"parent":"wazuh","name":"wazuh"},"data":{"level":"90%"},"location":"wazuh-agent"}
{"timestamp":"2025-10-17T00:21:32.107-0700","rule":{"level":9,"description":"Agent event queue is full. Events may be lost.","id":"203","firedtimes":1,"mail":false,"groups":["wazuh","agent_flooding"],"pci_dss":["10.6.1"],"gdpr":["IV_35.7.d"]},"agent":{"id":"027","name":"ag_web-mongodb","ip":"192.168.100.18"},"manager":{"name":"scanner.esco.ghaar"},"id":"1760685692.163326","full_log":"wazuh: Agent buffer: 'full'.","decoder":{"parent":"wazuh","name":"wazuh"},"data":{"level":"full"},"location":"wazuh-agent"}
{"timestamp":"2025-10-17T00:21:34.043-0700","rule":{"level":7,"description":"Agent event queue is 90% full.","id":"202","firedtimes":2,"mail":false,"groups":["wazuh","agent_flooding"],"pci_dss":["10.6.1"],"gdpr":["IV_35.7.d"]},"agent":{"id":"027","name":"ag_web-mongodb","ip":"192.168.100.18"},"manager":{"name":"scanner.esco.ghaar"},"id":"1760685694.163577","full_log":"wazuh: Agent buffer: '90%'.","decoder":{"parent":"wazuh","name":"wazuh"},"data":{"level":"90%"},"location":"wazuh-agent"}
{"timestamp":"2025-10-17T00:21:34.051-0700","rule":{"level":9,"description":"Agent event queue is full. Events may be lost.","id":"203","firedtimes":2,"mail":false,"groups":["wazuh","agent_flooding"],"pci_dss":["10.6.1"],"gdpr":["IV_35.7.d"]},"agent":{"id":"027","name":"ag_web-mongodb","ip":"192.168.100.18"},"manager":{"name":"scanner.esco.ghaar"},"id":"1760685694.163810","full_log":"wazuh: Agent buffer: 'full'.","decoder":{"parent":"wazuh","name":"wazuh"},"data":{"level":"full"},"location":"wazuh-agent"}
```

/5/ Mongodb's log
```
{"t":{"$date":"2025-10-17T00:38:25.267-07:00"},"s":"I",  "c":"NETWORK",  "id":22943,   "ctx":"listener","msg":"Connection accepted","attr":{"remote":"127.0.0.1:42458","isLoadBalanced":false,"uuid":{"uuid":{"$uuid":"0a6d3c4b-9d55-4757-acb8-18b7091b0a5c"}},"connectionId":136107,"connectionCount":27}}
{"t":{"$date":"2025-10-17T00:38:25.267-07:00"},"s":"I",  "c":"NETWORK",  "id":51800,   "ctx":"conn136106","msg":"client metadata","attr":{"remote":"127.0.0.1:42454","client":"conn136106","negotiatedCompressors":[],"doc":{"application":{"name":"mongodb_exporter"},"driver":{"name":"mongo-go-driver","version":"1.17.1"},"os":{"type":"linux","architecture":"amd64"},"platform":"go1.23.4"}}}
{"t":{"$date":"2025-10-17T00:38:25.267-07:00"},"s":"I",  "c":"NETWORK",  "id":51800,   "ctx":"conn136107","msg":"client metadata","attr":{"remote":"127.0.0.1:42458","client":"conn136107","negotiatedCompressors":[],"doc":{"application":{"name":"mongodb_exporter"},"driver":{"name":"mongo-go-driver","version":"1.17.1"},"os":{"type":"linux","architecture":"amd64"},"platform":"go1.23.4"}}}
{"t":{"$date":"2025-10-17T00:38:25.267-07:00"},"s":"I",  "c":"ACCESS",   "id":6788604, "ctx":"conn136107","msg":"Auth metrics report","attr":{"metric":"acquireUser","micros":0}}
{"t":{"$date":"2025-10-17T00:38:25.275-07:00"},"s":"I",  "c":"ACCESS",   "id":5286306, "ctx":"conn136107","msg":"Successfully authenticated","attr":{"client":"127.0.0.1:42458","isSpeculative":true,"isClusterMember":false,"mechanism":"SCRAM-SHA-256","user":"moriksan","db":"admin","result":0,"metrics":{"conversation_duration":{"micros":7604,"summary":{"0":{"step":1,"step_total":2,"duration_micros":62},"1":{"step":2,"step_total":2,"duration_micros":24}}}},"extraInfo":{}}}
{"t":{"$date":"2025-10-17T00:38:25.275-07:00"},"s":"I",  "c":"NETWORK",  "id":6788700, "ctx":"conn136107","msg":"Received first command on ingress connection since session start or auth handshake","attr":{"elapsedMillis":0}}
{"t":{"$date":"2025-10-17T00:38:25.355-07:00"},"s":"I",  "c":"-",        "id":20883,   "ctx":"conn136105","msg":"Interrupted operation as its client disconnected","attr":{"opId":171554817}}
{"t":{"$date":"2025-10-17T00:38:25.355-07:00"},"s":"I",  "c":"NETWORK",  "id":22944,   "ctx":"conn136106","msg":"Connection ended","attr":{"remote":"127.0.0.1:42454","isLoadBalanced":false,"uuid":{"uuid":{"$uuid":"b5492d8a-b877-4ece-ae9b-132c28ddc188"}},"connectionId":136106,"connectionCount":26}}
{"t":{"$date":"2025-10-17T00:38:25.355-07:00"},"s":"I",  "c":"NETWORK",  "id":22944,   "ctx":"conn136107","msg":"Connection ended","attr":{"remote":"127.0.0.1:42458","isLoadBalanced":false,"uuid":{"uuid":{"$uuid":"0a6d3c4b-9d55-4757-acb8-18b7091b0a5c"}},"connectionId":136107,"connectionCount":25}}
{"t":{"$date":"2025-10-17T00:38:25.355-07:00"},"s":"I",  "c":"NETWORK",  "id":22944,   "ctx":"conn136105","msg":"Connection ended","attr":{"remote":"127.0.0.1:42440","isLoadBalanced":false,"uuid":{"uuid":{"$uuid":"f97f35af-985d-4b3c-928c-479b81140449"}},"connectionId":136105,"connectionCount":24}}
```

Wazuh-agent's ossec.log
```
2025/10/17 00:20:22 wazuh-agentd: INFO: Agent is restarting due to shared configuration changes.
2025/10/17 00:20:22 wazuh-logcollector[1546007] debug_op.c:116 at _log_function(): DEBUG: Logging module auto-initialized
2025/10/17 00:20:22 wazuh-logcollector[1546007] main.c:126 at main(): DEBUG: Wazuh home directory: /var/ossec
2025/10/17 00:20:23 wazuh-modulesd: INFO: Shutting down Wazuh modules.
2025/10/17 00:20:23 wazuh-modulesd:syscollector: INFO: Stop received for Syscollector.
2025/10/17 00:20:23 wazuh-modulesd:syscollector: INFO: Module finished.
2025/10/17 00:20:23 wazuh-modulesd:docker-listener: INFO: Module finished.
2025/10/17 00:20:23 wazuh-logcollector[1542764] sig_op.c:49 at HandleSIG(): INFO: (1225): SIGNAL [(15)-(Terminated)] Received. Exit Cleaning...
2025/10/17 00:20:23 wazuh-agentd: INFO: (1225): SIGNAL [(15)-(Terminated)] Received. Exit Cleaning...
2025/10/17 00:20:23 wazuh-execd: INFO: (1314): Shutdown received. Deleting responses.
2025/10/17 00:20:23 wazuh-execd: INFO: (1225): SIGNAL [(15)-(Terminated)] Received. Exit Cleaning...
2025/10/17 00:20:24 wazuh-execd: INFO: Started (pid: 1546080).
2025/10/17 00:20:25 wazuh-agentd: INFO: (1410): Reading authentication keys file.
2025/10/17 00:20:25 wazuh-agentd: INFO: Using notify time: 10 and max time to reconnect: 60
2025/10/17 00:20:25 wazuh-agentd: INFO: Version detected -> Linux |web-mongodb.esco.ghaar |6.8.0-71-generic |#71-Ubuntu SMP PREEMPT_DYNAMIC Tue Jul 22 16:52:38 UTC 2025 |x86_64 [Ubuntu|ubuntu: 24.04.3 LTS (Noble Numbat)] - Wazuh v4.13.1
2025/10/17 00:20:25 wazuh-agentd: INFO: Started (pid: 1546094).
2025/10/17 00:20:25 wazuh-agentd: INFO: Using AES as encryption method.
2025/10/17 00:20:25 wazuh-agentd: INFO: Trying to connect to server ([scanner.esco.ghaar]:1514/udp).
2025/10/17 00:20:25 wazuh-agentd: INFO: (4102): Connected to the server ([scanner.esco.ghaar]:1514/udp).
2025/10/17 00:20:26 wazuh-syscheckd: INFO: Started (pid: 1546108).
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6003): Monitoring path: '/bin', with options 'size | permissions | owner | group | mtime | inode | hash_md5 | hash_sha1 | hash_sha256 | scheduled'.
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6003): Monitoring path: '/boot', with options 'size | permissions | owner | group | mtime | inode | hash_md5 | hash_sha1 | hash_sha256 | scheduled'.
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6003): Monitoring path: '/etc', with options 'size | permissions | owner | group | mtime | inode | hash_md5 | hash_sha1 | hash_sha256 | report_changes | whodata'.
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6003): Monitoring path: '/root', with options 'size | permissions | owner | group | mtime | inode | hash_md5 | hash_sha1 | hash_sha256 | report_changes | whodata'.
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6003): Monitoring path: '/sbin', with options 'size | permissions | owner | group | mtime | inode | hash_md5 | hash_sha1 | hash_sha256 | scheduled'.
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6003): Monitoring path: '/usr/bin', with options 'size | permissions | owner | group | mtime | inode | hash_md5 | hash_sha1 | hash_sha256 | scheduled'.
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6003): Monitoring path: '/usr/local/etc', with options 'size | permissions | owner | group | mtime | inode | hash_md5 | hash_sha1 | hash_sha256 | report_changes | whodata'.
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6003): Monitoring path: '/usr/local/share/ca-certificates', with options 'size | permissions | owner | group | mtime | inode | hash_md5 | hash_sha1 | hash_sha256 | report_changes | whodata'.
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6003): Monitoring path: '/usr/sbin', with options 'size | permissions | owner | group | mtime | inode | hash_md5 | hash_sha1 | hash_sha256 | scheduled'.
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/mtab'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/hosts.deny'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/mail/statistics'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/random-seed'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/random.seed'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/adjtime'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/httpd/logs'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/utmpx'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/wtmpx'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/cups/certs'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/dumpdates'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/etc/svc/volatile'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6206): Ignore 'file' entry '/var'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '.log$|.swp$'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '.vim/'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '.config/'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '.cache/'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex 'codeproject.ai-server'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/rc0.d/S01multipath-tools'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/rc1.d/S01multipath-tools'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/rc2.d/S01multipath-tools'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/rc3.d/S01multipath-tools'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/rc4.d/S01multipath-tools'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/rc5.d/S01multipath-tools'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/rc6.d/S01multipath-tools'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/rc0.d/K01multipath-tools'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/rc1.d/K01multipath-tools'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/rc6.d/K01multipath-tools'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/systemd/system/multi-user.target.wants/snapd.aa-prompt-listener.service'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/systemd/system/dbus-org.freedesktop.timesync1.service'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/systemd/system/timers.target.wants/fwupd-refresh.timer'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/systemd/system/vmtoolsd.service'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/systemd/system/open-vm-tools.service.requires/vgauth.service'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/systemd/system/sysinit.target.wants/systemd-timesyncd.service'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/systemd/system/multi-user.target.wants/open-vm-tools.service'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/systemd/system/multi-user.target.wants/snapd.aa-prompt-listener.service'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6207): Ignore 'file' sregex '/etc/systemd/system/multi-user.target.wants/snapd.aa-prompt-listener.service'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6004): No diff for file: '/etc/ssl/private.key'
2025/10/17 00:20:26 wazuh-syscheckd: INFO: (6046): Internal audit queue size set to '16384'.
2025/10/17 00:20:27 rootcheck: INFO: Starting rootcheck scan.
2025/10/17 00:20:27 wazuh-logcollector[1546124] debug_op.c:116 at _log_function(): DEBUG: Logging module auto-initialized
2025/10/17 00:20:27 wazuh-logcollector[1546124] main.c:126 at main(): DEBUG: Wazuh home directory: /var/ossec
2025/10/17 00:20:27 wazuh-logcollector[1546124] mq_op.c:52 at StartMQWithSpecificOwnerAndPerms(): DEBUG: Connected succesfully to 'queue/sockets/queue' after 0 attempts
2025/10/17 00:20:27 wazuh-logcollector[1546124] mq_op.c:53 at StartMQWithSpecificOwnerAndPerms(): DEBUG: (unix_domain) Maximum send buffer set to: '212992'.
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:273 at LogCollectorStart(): DEBUG: Entering LogCollectorStart().
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:342 at LogCollectorStart(): INFO: Monitoring output of command(360): df -P
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:346 at LogCollectorStart(): DEBUG: Socket target for 'df -P' -> agent
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:370 at LogCollectorStart(): INFO: Monitoring full output of command(360): netstat -tulpn | sed 's/\([[:alnum:]]\+\)\ \+[[:digit:]]\+\ \+[[:digit:]]\+\ \+\(.*\):\([[:digit:]]*\)\ \+\([0-9\.\:\*]\+\).\+\ \([[:digit:]]*\/[[:alnum:]\-]*\).*/\1 \2 == \3 == \4 \5/' | sort -k 4 -g | sed 's/ == \(.*\) ==/:\1/' | sed 1,2d
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:374 at LogCollectorStart(): DEBUG: Socket target for 'netstat -tulpn | sed 's/\([[:alnum:]]\+\)\ \+[[:digit:]]\+\ \+[[:digit:]]\+\ \+\(.*\):\([[:digit:]]*\)\ \+\([0-9\.\:\*]\+\).\+\ \([[:digit:]]*\/[[:alnum:]\-]*\).*/\1 \2 == \3 == \4 \5/' | sort -k 4 -g | sed 's/ == \(.*\) ==/:\1/' | sed 1,2d' -> agent
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:370 at LogCollectorStart(): INFO: Monitoring full output of command(360): last -n 20
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:374 at LogCollectorStart(): DEBUG: Socket target for 'last -n 20' -> agent
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:1236 at set_read(): DEBUG: Socket target for '/var/ossec/logs/active-responses.log' -> agent
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:435 at LogCollectorStart(): INFO: (1950): Analyzing file: '/var/ossec/logs/active-responses.log'.
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:1236 at set_read(): DEBUG: Socket target for '/var/log/auth.log' -> agent
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:435 at LogCollectorStart(): INFO: (1950): Analyzing file: '/var/log/auth.log'.
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:1236 at set_read(): DEBUG: Socket target for '/var/log/syslog' -> agent
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:435 at LogCollectorStart(): INFO: (1950): Analyzing file: '/var/log/syslog'.
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:1236 at set_read(): DEBUG: Socket target for '/var/log/dpkg.log' -> agent
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:435 at LogCollectorStart(): INFO: (1950): Analyzing file: '/var/log/dpkg.log'.
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:1236 at set_read(): DEBUG: Socket target for '/var/log/kern.log' -> agent
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:435 at LogCollectorStart(): INFO: (1950): Analyzing file: '/var/log/kern.log'.
2025/10/17 00:20:27 wazuh-modulesd: INFO: Started (pid: 1546136).
2025/10/17 00:20:27 wazuh-modulesd:agent-upgrade: INFO: (8153): Module Agent Upgrade started.
2025/10/17 00:20:27 wazuh-modulesd:osquery: INFO: Module disabled. Exiting...
2025/10/17 00:20:27 wazuh-modulesd:ciscat: INFO: Module disabled. Exiting...
2025/10/17 00:20:27 sca: INFO: Module started.
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:1236 at set_read(): DEBUG: Socket target for '/var/log/mongodb/mongod.log' -> agent
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:435 at LogCollectorStart(): INFO: (1950): Analyzing file: '/var/log/mongodb/mongod.log'.
2025/10/17 00:20:27 sca: INFO: Loaded policy '/var/ossec/ruleset/sca/cis_ubuntu24-04.yml'
2025/10/17 00:20:27 wazuh-modulesd:docker-listener: INFO: Module docker-listener started.
2025/10/17 00:20:27 wazuh-modulesd:docker-listener: INFO: Starting to listening Docker events.
2025/10/17 00:20:27 wazuh-modulesd:control: INFO: Starting control thread.
2025/10/17 00:20:27 sca: INFO: Starting Security Configuration Assessment scan.
2025/10/17 00:20:27 wazuh-modulesd:syscollector: INFO: Module started.
2025/10/17 00:20:27 wazuh-modulesd:syscollector: INFO: Starting evaluation.
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:1236 at set_read(): DEBUG: Socket target for '/var/log/audit/audit.log' -> agent
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:435 at LogCollectorStart(): INFO: (1950): Analyzing file: '/var/log/audit/audit.log'.
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:1236 at set_read(): DEBUG: Socket target for '/var/log/letsencrypt/letsencrypt.log' -> agent
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:435 at LogCollectorStart(): INFO: (1950): Analyzing file: '/var/log/letsencrypt/letsencrypt.log'.
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:486 at LogCollectorStart(): INFO: Started (pid: 1546126).
2025/10/17 00:20:27 wazuh-logcollector[1546124] logcollector.c:487 at LogCollectorStart(): DEBUG: (1961): Files being monitored: 8/1000.
2025/10/17 00:20:27 wazuh-logcollector[1546124] lccom.c:511 at lccom_main(): DEBUG: Local requests thread ready
2025/10/17 00:20:28 sca: INFO: Starting evaluation of policy: '/var/ossec/ruleset/sca/cis_ubuntu24-04.yml'
2025/10/17 00:20:28 wazuh-modulesd:syscollector: INFO: Evaluation finished.
2025/10/17 00:20:29 wazuh-syscheckd: INFO: (6000): Starting daemon...
2025/10/17 00:20:29 wazuh-syscheckd: INFO: (6010): File integrity monitoring scan frequency: 43200 seconds
2025/10/17 00:20:29 wazuh-syscheckd: INFO: (6008): File integrity monitoring scan started.
2025/10/17 00:20:32 sca: INFO: Evaluation finished for policy '/var/ossec/ruleset/sca/cis_ubuntu24-04.yml'
2025/10/17 00:20:32 sca: INFO: Security Configuration Assessment scan finished. Duration: 5 seconds.
2025/10/17 00:20:53 rootcheck: INFO: Ending rootcheck scan.
2025/10/17 00:21:26 wazuh-syscheckd: INFO: (6009): File integrity monitoring scan ended.
2025/10/17 00:21:26 wazuh-syscheckd: INFO: FIM sync module started.
2025/10/17 00:21:26 wazuh-syscheckd: INFO: (6019): File integrity monitoring real-time Whodata engine started.
2025/10/17 00:21:32 wazuh-logcollector[1546124] logcollector.c:531 at LogCollectorStart(): DEBUG: Performing file check.
2025/10/17 00:21:32 wazuh-logcollector[1546124] logcollector.c:752 at LogCollectorStart(): DEBUG: File inode changed. /var/log/audit/audit.log
2025/10/17 00:21:32 wazuh-logcollector[1546124] logcollector.c:1885 at w_msg_queue_push(): WARNING: Target 'agent' message queue is full (1024). Log lines may be lost.
2025/10/17 00:21:32 wazuh-agentd: WARNING: Agent buffer at 90 %.
2025/10/17 00:21:32 wazuh-agentd: WARNING: Agent buffer is full: Events may be lost.
2025/10/17 00:21:33 wazuh-agentd: INFO: Agent buffer is under 70 %. Working properly again.
2025/10/17 00:21:34 wazuh-agentd: WARNING: Agent buffer at 90 %.
2025/10/17 00:21:34 wazuh-agentd: WARNING: Agent buffer is full: Events may be lost.
2025/10/17 00:21:35 wazuh-agentd: INFO: Agent buffer is under 70 %. Working properly again.

```

Parash Mani Kafle

unread,
Oct 17, 2025, 5:44:50 AM (2 days ago) Oct 17
to Wazuh | Mailing List
Hello,

The warning message: wazuh-logcollector[1546124] logcollector.c:1885 at w_msg_queue_push(): WARNING: Target 'agent' message queue is full (1024). Log lines may be lost indicates that the Wazuh agent's internal message queue has reached its maximum capacity. As a result, it's unable to process or forward additional log entries to the Wazuh manager, which can lead to log data being dropped.

To better handle high log volumes, you can increase the agent’s buffer capacity by adjusting the <client_buffer> settings in the ossec.conf file. This involves raising both the internal queue size and the event forwarding rate.

Add or modify the following section in the agent’s ossec.conf:

<client_buffer>
<!-- Agent buffer settings -->
<disabled>no</disabled>
<queue_size>10000</queue_size>
<!-- Default: 5000 | Maximum: 100000 -->
<events_per_second>1000</events_per_second>
<!-- Default: 500 | Maximum: 1000 -->
</client_buffer>

This configuration helps prevent log loss by allowing the agent to queue more events and forward them at a higher rate to the manager.
After the implementation of the configuration. Please restart the wazuh agent using the command.

systemctl restart wazuh-agent.

Thank you.

M V

unread,
Oct 17, 2025, 12:17:34 PM (2 days ago) Oct 17
to Wazuh | Mailing List
Thank you for the prompt response, Parash!

Changing both queue_size (5000->25000), and eps (500->900) does not seem to alleviate the issue. Modified agent.conf

    <!-- Mongodb log processing is rather intensive. client_buffer handling needs increased queue sizes -->
    <client_buffer>
        <!-- Agent buffer options -->
        <disabled>no</disabled>
        <!-- default queue {5000,100000} -->
        <queue_size>25000</queue_size>
        <!-- default eps {500, 1000} -->
        <events_per_second>900</events_per_second>
    </client_buffer>

I've even taken the queue_size up to max. Same result. 

2025/10/17 09:12:23 wazuh-logcollector: WARNING: Target 'agent' message queue is full (1024). Log lines may be lost.
2025/10/17 09:12:25 wazuh-agentd: WARNING: Agent buffer at 90 %.
2025/10/17 09:12:26 wazuh-agentd: INFO: Agent buffer is under 70 %. Working properly again.
2025/10/17 09:12:32 rootcheck: INFO: Ending rootcheck scan.
2025/10/17 09:13:05 wazuh-syscheckd: INFO: (6009): File integrity monitoring scan ended.
2025/10/17 09:13:05 wazuh-syscheckd: INFO: FIM sync module started.
2025/10/17 09:13:05 wazuh-syscheckd: INFO: (6019): File integrity monitoring real-time Whodata engine started.
2025/10/17 09:13:11 wazuh-agentd: WARNING: Agent buffer at 90 %.
2025/10/17 09:13:12 wazuh-agentd: INFO: Agent buffer is under 70 %. Working properly again.
2025/10/17 09:13:13 wazuh-agentd: WARNING: Agent buffer at 90 %.
2025/10/17 09:13:13 wazuh-agentd: WARNING: Agent buffer is full: Events may be lost.
2025/10/17 09:13:15 wazuh-agentd: INFO: Agent buffer is under 70 %. Working properly again.

mongodb log directory

ll /var/log/mongodb
total 1.2G
drwxr-xr-x  2 mongodb mongodb 4.0K Oct 16 22:29 ./
drwxrwxr-x 17 root    syslog  4.0K Oct 12 00:00 ../
-rw-------  1 mongodb mongodb 5.4M Oct 17 09:16 mongod.log
-rw-------  1 mongodb mongodb 1.1G Apr 12  2025 mongod.log-20250412
-rw-------  1 mongodb mongodb 3.7M Jul 15 23:59 mongod.log-2025-07-16.gz
-rw-------  1 mongodb mongodb 3.6M Jul 20 23:59 mongod.log-2025-07-21.gz
-rw-------  1 mongodb mongodb 3.7M Jul 25 23:59 mongod.log-2025-07-26.gz
-rw-------  1 mongodb mongodb 3.7M Jul 30 23:59 mongod.log-2025-07-31.gz
-rw-------  1 mongodb mongodb 3.7M Aug  4 23:59 mongod.log-2025-08-05.gz
-rw-------  1 mongodb mongodb 3.7M Aug  9 23:59 mongod.log-2025-08-10.gz
-rw-------  1 mongodb mongodb 3.7M Aug 14 23:59 mongod.log-2025-08-15.gz
-rw-------  1 mongodb mongodb 3.7M Aug 19 23:59 mongod.log-2025-08-20.gz
-rw-------  1 mongodb mongodb 3.7M Aug 24 23:59 mongod.log-2025-08-25.gz
-rw-------  1 mongodb mongodb 3.7M Aug 29 23:59 mongod.log-2025-08-30.gz
-rw-------  1 mongodb mongodb 3.7M Sep  3 23:59 mongod.log-2025-09-04.gz
-rw-------  1 mongodb mongodb 3.7M Sep  8 23:59 mongod.log-2025-09-09.gz
-rw-------  1 mongodb mongodb 3.7M Sep 13 23:59 mongod.log-2025-09-14.gz
-rw-------  1 mongodb mongodb 3.7M Sep 18 23:59 mongod.log-2025-09-19.gz
-rw-------  1 mongodb mongodb 3.7M Sep 23 23:59 mongod.log-2025-09-24.gz
-rw-------  1 mongodb mongodb 3.7M Sep 28 23:59 mongod.log-2025-09-29.gz
-rw-------  1 mongodb mongodb 3.7M Oct  3 23:59 mongod.log-2025-10-04.gz
-rw-------  1 mongodb mongodb 3.7M Oct  8 23:59 mongod.log-2025-10-09.gz
-rw-------  1 mongodb mongodb 3.7M Oct 13 23:59 mongod.log-2025-10-14.gz
-rw-------  1 mongodb mongodb 2.2M Oct 16 22:28 mongod.log-2025-10-16.gz

Parash Mani Kafle

unread,
Oct 17, 2025, 12:58:35 PM (2 days ago) Oct 17
to Wazuh | Mailing List

Could you please confirm whether any FIM (File Integrity Monitoring) directories have been enabled and if real-time monitoring has been implemented for directories that triggers to many alerts.
For examples the FIM monitoring path such as /tmp or /var/log for the FIM entries.

Additonally, To optimize performance and reduce noise, you can exclude logs that are not required by configuring the agent as shown below on the wazuh agents ossec.conf:

For example:

<localfile>
<log_format>audit</log_format>
<location>/var/log/audit/audit.log</location>
<ignore type="PCRE2">type=.+_CHANGE</ignore>
<ignore type="osregex">type=CONFIG_\.+</ignore>
</localfile>

This configuration ensures that only relevant logs are processed by the Wazuh agent, reducing the likelihood of agent queue overflow.

For more information, please refer to the official Wazuh documentation at the following links:

Thank You.

M V

unread,
Oct 17, 2025, 2:27:37 PM (2 days ago) Oct 17
to Parash Mani Kafle, Wazuh | Mailing List
Thank you, Parash, for your continued help. My responses can be found inline below.

On Fri, Oct 17, 2025 at 9:58 AM 'Parash Mani Kafle' via Wazuh | Mailing List <wa...@googlegroups.com> wrote:

Could you please confirm whether any FIM (File Integrity Monitoring) directories have been enabled and if real-time monitoring has been implemented for directories that triggers to many alerts.
For examples the FIM monitoring path such as /tmp or /var/log for the FIM entries.

[mv] FIM with real-time monitoring + whodat is enabled. Mentioned directories are ignored. FIM overrides can be found in below group configuration for ubuntu_24.

Additonally, To optimize performance and reduce noise, you can exclude logs that are not required by configuring the agent as shown below on the wazuh agents ossec.conf:

For example:

<localfile>
<log_format>audit</log_format>
<location>/var/log/audit/audit.log</location>
<ignore type="PCRE2">type=.+_CHANGE</ignore>
<ignore type="osregex">type=CONFIG_\.+</ignore>
</localfile>

This configuration ensures that only relevant logs are processed by the Wazuh agent, reducing the likelihood of agent queue overflow.

[mv] Certainly. audit.log already contains the above mentioned config change ignore lines. This vm belongs to the following main groups. The respective server-side agent.conf is below:

group = ubuntu_24
  <agent_config os="^Linux" profile="ubuntu_24|ubuntu24|ubuntu_24.04">
    <!-- Shared agent configuration here -->

    <client_buffer>
      <!-- Agent buffer options -->
      <disabled>no</disabled>
      <queue_size>5000</queue_size>
      <events_per_second>1000</events_per_second>
    </client_buffer>

    <localfile>
        <log_format>audit</log_format>
        <location>/var/log/audit/audit.log</location>
        <ignore type="PCRE2">type=.+_CHANGE</ignore>
        <ignore type="osregex">type=CONFIG_\.+</ignore>
    </localfile>
    <syscheck>
      <directories check_all="yes" report_changes="yes" realtime="yes" whodata="yes">/root</directories>
      <directories check_all="yes" report_changes="yes" realtime="yes" whodata="yes">/etc</directories>
      <directories check_all="yes" report_changes="yes" realtime="yes" whodata="yes">/usr/local/etc</directories>
      <directories check_all="yes" report_changes="yes" realtime="yes" whodata="yes">/usr/local/share/ca-certificates</directories>
      <ignore type="sregex">.vim/</ignore>
      <ignore type="sregex">.config/</ignore>
      <ignore type="sregex">.cache/</ignore>
      <ignore type="sregex">codeproject.ai-server</ignore>
      <ignore type="sregex">/etc/rc0.d/S01multipath-tools</ignore>
      <ignore type="sregex">/etc/rc1.d/S01multipath-tools</ignore>
      <ignore type="sregex">/etc/rc2.d/S01multipath-tools</ignore>
      <ignore type="sregex">/etc/rc3.d/S01multipath-tools</ignore>
      <ignore type="sregex">/etc/rc4.d/S01multipath-tools</ignore>
      <ignore type="sregex">/etc/rc5.d/S01multipath-tools</ignore>
      <ignore type="sregex">/etc/rc6.d/S01multipath-tools</ignore>
      <ignore type="sregex">/etc/rc0.d/K01multipath-tools</ignore>
      <ignore type="sregex">/etc/rc1.d/K01multipath-tools</ignore>
      <ignore type="sregex">/etc/rc6.d/K01multipath-tools</ignore>
      <ignore type="sregex">/etc/systemd/system/multi-user.target.wants/snapd.aa-prompt-listener.service</ignore>
      <ignore type="sregex">/etc/systemd/system/dbus-org.freedesktop.timesync1.service</ignore>
      <ignore type="sregex">/etc/systemd/system/timers.target.wants/fwupd-refresh.timer</ignore>
      <ignore type="sregex">/etc/systemd/system/vmtoolsd.service</ignore>
      <ignore type="sregex">/etc/systemd/system/open-vm-tools.service.requires/vgauth.service</ignore>
      <ignore type="sregex">/etc/systemd/system/sysinit.target.wants/systemd-timesyncd.service</ignore>
      <ignore type="sregex">/etc/systemd/system/multi-user.target.wants/open-vm-tools.service</ignore>
      <ignore type="sregex">/etc/systemd/system/multi-user.target.wants/snapd.aa-prompt-listener.service</ignore>
      <ignore type="sregex">/etc/systemd/system/multi-user.target.wants/snapd.aa-prompt-listener.service</ignore>
      <!--<ignore>/root/.vim</ignore>-->
      <!--<ignore>/root/.config</ignore>-->
      <!--<ignore>/root/.cache</ignore>-->
      <!-- ignore any dot files for now. Optimize later -->
      <ignore type="sregex">^\..*</ignore>
      <ignore>/tmp</ignore>
      <ignore>/var/log</ignore>
    </syscheck>
    <rootcheck>
      <ignore>/var/cache</ignore>
      <!--<ignore>/var/lib/kubelet</ignore>-->
      <ignore>/var/backups</ignore>
      <!--<ignore>/var/lib/containerd</ignore>-->
      <!--<ignore>/var/lib/docker/overlay2</ignore>-->
      <rootkit_files>etc/shared/rootkit_files.txt</rootkit_files>
      <rootkit_trojans>etc/shared/rootkit_trojans.txt</rootkit_trojans>
    </rootcheck>
    <!--<sca>-->
    <!--  <policies>-->
    <!--    <policy enabled="yes">etc/shared/cis_ubuntu24-04.yml</policy>-->
    <!--  </policies>-->
    <!--</sca>-->
  </agent_config>


group = mongodb
  <agent_config profile="mongodb">

    <!-- Mongodb log processing is rather intensive. client_buffer handling needs increased queue sizes -->
    <client_buffer>
      <!-- Agent buffer options -->
      <disabled>no</disabled>
      <!-- default queue {5000,100000} -->
      <queue_size>25000</queue_size>
      <!-- default eps {500, 1000} -->
      <events_per_second>900</events_per_second>
    </client_buffer>
    <!-- Mongodb custom labels help with decoder operation -->

    <localfile>
      <location>/var/log/mongodb/mongod.log</location>
      <log_format>json</log_format>
      <label key="esco.app">mongo</label>
      <label key="esco.logfile">mongod.log</label>
      <label key="esco.loglocation">/var/log/mongodb</label>
    </localfile>
  </agent_config>


group = ubuntu_24_letsencrypt
  <agent_config profile="ubuntu_24_letsencrypt">
    <!-- Shared agent configuration here -->
    <localfile>
      <location>/var/log/letsencrypt/letsencrypt.log</location>
      <log_format>syslog</log_format>
    </localfile>
  </agent_config>

--
You received this message because you are subscribed to a topic in the Google Groups "Wazuh | Mailing List" group.
To unsubscribe from this topic, visit https://groups.google.com/d/topic/wazuh/rndlese28Aw/unsubscribe.
To unsubscribe from this group and all its topics, send an email to wazuh+un...@googlegroups.com.
To view this discussion visit https://groups.google.com/d/msgid/wazuh/84c46e14-69bd-43ca-ac5c-d97d9454c18dn%40googlegroups.com.
Reply all
Reply to author
Forward
0 new messages