Set up automated SNMP device discovery using nmap network scanning and Python scripts for dynamic inventory management. Integrate with Zabbix and Grafana for comprehensive network monitoring and visualization.
Prerequisites
- Root or sudo access
- Network access to target devices
- SNMP enabled on network devices
- Python 3.8 or higher
- At least 2GB free disk space
What this solves
SNMP device auto-discovery automates the process of finding and cataloging network devices across your infrastructure. Instead of manually maintaining device inventories, this system uses network scanning to detect SNMP-enabled devices and automatically adds them to your monitoring systems. This approach reduces operational overhead and ensures your network monitoring stays current as devices are added or removed.
Step-by-step installation
Update system packages
Start by updating your package manager to ensure you get the latest versions of all required components.
sudo apt update && sudo apt upgrade -yInstall SNMP tools and dependencies
Install the core SNMP utilities, nmap for network scanning, and Python dependencies for automation scripts.
sudo apt install -y snmp snmp-mibs-downloader nmap python3 python3-pip python3-venv curl jqCreate discovery environment
Set up a dedicated user and directory structure for the SNMP discovery system with proper permissions.
sudo useradd -r -s /bin/bash -d /opt/snmp-discovery snmp-discovery
sudo mkdir -p /opt/snmp-discovery/{scripts,logs,config,data}
sudo chown -R snmp-discovery:snmp-discovery /opt/snmp-discovery
sudo chmod 755 /opt/snmp-discovery
sudo chmod 775 /opt/snmp-discovery/{scripts,logs,config,data}Install Python dependencies
Create a virtual environment and install required Python libraries for SNMP operations and database connectivity.
sudo -u snmp-discovery python3 -m venv /opt/snmp-discovery/venv
sudo -u snmp-discovery /opt/snmp-discovery/venv/bin/pip install --upgrade pip
sudo -u snmp-discovery /opt/snmp-discovery/venv/bin/pip install pysnmp requests sqlite3-python ipaddress netaddrConfigure SNMP client settings
Set up SNMP client configuration to enable MIB downloads and configure community strings.
# Allow MIB loading
mibs +ALL
Default SNMP version
defVersion 2c
Timeout settings
timeout 5
retries 3Create discovery configuration
Configure the network ranges, SNMP communities, and discovery parameters for your environment.
# Network Discovery Configuration
[networks]
Define network ranges to scan
network_ranges = 192.168.1.0/24,10.0.0.0/24,172.16.0.0/24
[snmp]
SNMP communities to try
communities = public,private,community
versions = 2c,1
timeout = 5
retries = 3
port = 161
[discovery]
Discovery intervals in minutes
scan_interval = 60
full_scan_interval = 1440
Device identification OIDs
system_name_oid = 1.3.6.1.2.1.1.5.0
system_descr_oid = 1.3.6.1.2.1.1.1.0
system_uptime_oid = 1.3.6.1.2.1.1.3.0
system_contact_oid = 1.3.6.1.2.1.1.4.0
system_location_oid = 1.3.6.1.2.1.1.6.0
[output]
Output formats and destinations
enable_json = true
enable_csv = true
enable_zabbix = true
enable_grafana = false
[database]
database_path = /opt/snmp-discovery/data/inventory.dbCreate the main discovery script
Develop the core Python script that performs network scanning, SNMP queries, and inventory management.
#!/opt/snmp-discovery/venv/bin/python3
import os
import sys
import json
import sqlite3
import subprocess
import configparser
import logging
from datetime import datetime
from pysnmp.hlapi import *
from ipaddress import IPv4Network
import csv
class SNMPDiscovery:
def __init__(self, config_file):
self.config = configparser.ConfigParser()
self.config.read(config_file)
self.setup_logging()
self.setup_database()
def setup_logging(self):
log_file = '/opt/snmp-discovery/logs/discovery.log'
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler(log_file),
logging.StreamHandler()
]
)
self.logger = logging.getLogger(__name__)
def setup_database(self):
db_path = self.config.get('database', 'database_path')
self.conn = sqlite3.connect(db_path)
self.conn.execute('''
CREATE TABLE IF NOT EXISTS devices (
id INTEGER PRIMARY KEY AUTOINCREMENT,
ip_address TEXT UNIQUE,
hostname TEXT,
system_name TEXT,
system_descr TEXT,
system_contact TEXT,
system_location TEXT,
uptime TEXT,
snmp_community TEXT,
snmp_version TEXT,
first_seen TEXT,
last_seen TEXT,
status TEXT
)
''')
self.conn.commit()
def scan_network(self, network_range):
self.logger.info(f"Scanning network range: {network_range}")
cmd = ['nmap', '-sn', network_range]
result = subprocess.run(cmd, capture_output=True, text=True)
active_ips = []
for line in result.stdout.split('\n'):
if 'Nmap scan report for' in line:
ip = line.split()[-1].strip('()')
active_ips.append(ip)
return active_ips
def snmp_query(self, ip, community, version, oid):
try:
if version == '2c':
snmp_version = 1
else:
snmp_version = 0
for (errorIndication, errorStatus, errorIndex, varBinds) in nextCmd(
SnmpEngine(),
CommunityData(community, mpModel=snmp_version),
UdpTransportTarget((ip, 161), timeout=5),
ContextData(),
ObjectType(ObjectIdentity(oid)),
lexicographicMode=False):
if errorIndication:
break
elif errorStatus:
break
else:
for varBind in varBinds:
return str(varBind[1])
except Exception as e:
self.logger.debug(f"SNMP query failed for {ip}: {e}")
return None
def discover_device(self, ip):
communities = self.config.get('snmp', 'communities').split(',')
versions = self.config.get('snmp', 'versions').split(',')
for community in communities:
community = community.strip()
for version in versions:
version = version.strip()
# Try to get system name
system_name = self.snmp_query(
ip, community, version,
self.config.get('discovery', 'system_name_oid')
)
if system_name:
self.logger.info(f"Found SNMP device: {ip} ({system_name})")
device_info = {
'ip_address': ip,
'system_name': system_name,
'snmp_community': community,
'snmp_version': version,
'system_descr': self.snmp_query(
ip, community, version,
self.config.get('discovery', 'system_descr_oid')
),
'system_contact': self.snmp_query(
ip, community, version,
self.config.get('discovery', 'system_contact_oid')
),
'system_location': self.snmp_query(
ip, community, version,
self.config.get('discovery', 'system_location_oid')
),
'uptime': self.snmp_query(
ip, community, version,
self.config.get('discovery', 'system_uptime_oid')
)
}
return device_info
return None
def update_inventory(self, device_info):
now = datetime.now().isoformat()
# Check if device exists
cursor = self.conn.execute(
'SELECT id FROM devices WHERE ip_address = ?',
(device_info['ip_address'],)
)
if cursor.fetchone():
# Update existing device
self.conn.execute('''
UPDATE devices SET
system_name = ?, system_descr = ?, system_contact = ?,
system_location = ?, uptime = ?, snmp_community = ?,
snmp_version = ?, last_seen = ?, status = 'active'
WHERE ip_address = ?
''', (
device_info['system_name'], device_info['system_descr'],
device_info['system_contact'], device_info['system_location'],
device_info['uptime'], device_info['snmp_community'],
device_info['snmp_version'], now, device_info['ip_address']
))
else:
# Insert new device
self.conn.execute('''
INSERT INTO devices (
ip_address, system_name, system_descr, system_contact,
system_location, uptime, snmp_community, snmp_version,
first_seen, last_seen, status
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'active')
''', (
device_info['ip_address'], device_info['system_name'],
device_info['system_descr'], device_info['system_contact'],
device_info['system_location'], device_info['uptime'],
device_info['snmp_community'], device_info['snmp_version'],
now, now
))
self.conn.commit()
def export_inventory(self):
cursor = self.conn.execute('SELECT * FROM devices WHERE status = "active"')
devices = cursor.fetchall()
# Export to JSON
if self.config.getboolean('output', 'enable_json'):
json_data = []
for device in devices:
json_data.append({
'ip_address': device[1],
'hostname': device[2],
'system_name': device[3],
'system_descr': device[4],
'system_contact': device[5],
'system_location': device[6],
'uptime': device[7],
'snmp_community': device[8],
'snmp_version': device[9],
'first_seen': device[10],
'last_seen': device[11]
})
with open('/opt/snmp-discovery/data/inventory.json', 'w') as f:
json.dump(json_data, f, indent=2)
# Export to CSV
if self.config.getboolean('output', 'enable_csv'):
with open('/opt/snmp-discovery/data/inventory.csv', 'w', newline='') as f:
writer = csv.writer(f)
writer.writerow([
'ip_address', 'hostname', 'system_name', 'system_descr',
'system_contact', 'system_location', 'uptime',
'snmp_community', 'snmp_version', 'first_seen', 'last_seen'
])
for device in devices:
writer.writerow(device[1:])
def run_discovery(self):
self.logger.info("Starting SNMP device discovery")
network_ranges = self.config.get('networks', 'network_ranges').split(',')
for network_range in network_ranges:
network_range = network_range.strip()
active_ips = self.scan_network(network_range)
for ip in active_ips:
device_info = self.discover_device(ip)
if device_info:
self.update_inventory(device_info)
self.export_inventory()
self.logger.info("Discovery completed")
if __name__ == '__main__':
config_file = '/opt/snmp-discovery/config/discovery.conf'
discovery = SNMPDiscovery(config_file)
discovery.run_discovery()Create Zabbix integration script
Build a script to automatically add discovered devices to Zabbix monitoring system.
#!/opt/snmp-discovery/venv/bin/python3
import json
import requests
import sqlite3
import logging
import configparser
class ZabbixIntegration:
def __init__(self, config_file):
self.config = configparser.ConfigParser()
self.config.read(config_file)
self.setup_logging()
# Zabbix API settings
self.zabbix_url = "http://localhost/zabbix/api_jsonrpc.php"
self.zabbix_user = "admin"
self.zabbix_password = "zabbix"
self.auth_token = None
def setup_logging(self):
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
self.logger = logging.getLogger(__name__)
def zabbix_authenticate(self):
payload = {
"jsonrpc": "2.0",
"method": "user.login",
"params": {
"user": self.zabbix_user,
"password": self.zabbix_password
},
"id": 1
}
response = requests.post(self.zabbix_url, json=payload)
result = response.json()
if 'result' in result:
self.auth_token = result['result']
self.logger.info("Zabbix authentication successful")
return True
else:
self.logger.error(f"Zabbix authentication failed: {result}")
return False
def create_host(self, device_info):
payload = {
"jsonrpc": "2.0",
"method": "host.create",
"params": {
"host": device_info['ip_address'],
"name": device_info['system_name'] or device_info['ip_address'],
"interfaces": [{
"type": 2, # SNMP interface
"main": 1,
"useip": 1,
"ip": device_info['ip_address'],
"dns": "",
"port": "161",
"details": {
"version": 2 if device_info['snmp_version'] == '2c' else 1,
"community": device_info['snmp_community']
}
}],
"groups": [{"groupid": "4"}], # Linux servers group
"templates": [{"templateid": "10001"}] # Template OS Linux
},
"auth": self.auth_token,
"id": 1
}
response = requests.post(self.zabbix_url, json=payload)
result = response.json()
if 'result' in result:
self.logger.info(f"Created Zabbix host: {device_info['ip_address']}")
return True
else:
self.logger.error(f"Failed to create host: {result}")
return False
def sync_devices(self):
if not self.zabbix_authenticate():
return
db_path = self.config.get('database', 'database_path')
conn = sqlite3.connect(db_path)
cursor = conn.execute('SELECT * FROM devices WHERE status = "active"')
devices = cursor.fetchall()
for device in devices:
device_info = {
'ip_address': device[1],
'system_name': device[3],
'snmp_community': device[8],
'snmp_version': device[9]
}
self.create_host(device_info)
if __name__ == '__main__':
config_file = '/opt/snmp-discovery/config/discovery.conf'
integration = ZabbixIntegration(config_file)
integration.sync_devices()Create monitoring dashboard script
Develop a script to generate monitoring data for Grafana dashboards using discovered device information.
#!/opt/snmp-discovery/venv/bin/python3
import json
import sqlite3
import logging
import configparser
from datetime import datetime
class GrafanaExport:
def __init__(self, config_file):
self.config = configparser.ConfigParser()
self.config.read(config_file)
self.setup_logging()
def setup_logging(self):
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
self.logger = logging.getLogger(__name__)
def generate_dashboard_data(self):
db_path = self.config.get('database', 'database_path')
conn = sqlite3.connect(db_path)
cursor = conn.execute('SELECT * FROM devices WHERE status = "active"')
devices = cursor.fetchall()
dashboard_data = {
"dashboard": {
"title": "SNMP Device Discovery Dashboard",
"tags": ["snmp", "discovery", "network"],
"timezone": "browser",
"panels": [],
"time": {
"from": "now-1h",
"to": "now"
},
"refresh": "30s"
}
}
# Device count panel
device_count_panel = {
"id": 1,
"title": "Total Discovered Devices",
"type": "stat",
"targets": [{
"expr": f"scalar({len(devices)})",
"refId": "A"
}],
"gridPos": {"h": 4, "w": 6, "x": 0, "y": 0}
}
# Device list table
device_table_panel = {
"id": 2,
"title": "Discovered Devices",
"type": "table",
"targets": [{
"data": [
{
"ip_address": device[1],
"system_name": device[3],
"system_descr": device[4][:50] + "..." if len(device[4] or "") > 50 else device[4],
"last_seen": device[11]
} for device in devices
],
"refId": "A"
}],
"gridPos": {"h": 8, "w": 18, "x": 6, "y": 0}
}
dashboard_data["dashboard"]["panels"] = [
device_count_panel,
device_table_panel
]
# Export dashboard configuration
with open('/opt/snmp-discovery/data/grafana_dashboard.json', 'w') as f:
json.dump(dashboard_data, f, indent=2)
# Export Prometheus metrics format
metrics_data = []
for device in devices:
metrics_data.append(f'snmp_device_up{{ip="{device[1]}",name="{device[3]}"}} 1')
with open('/opt/snmp-discovery/data/prometheus_metrics.prom', 'w') as f:
f.write('\n'.join(metrics_data))
self.logger.info(f"Exported dashboard data for {len(devices)} devices")
if __name__ == '__main__':
config_file = '/opt/snmp-discovery/config/discovery.conf'
exporter = GrafanaExport(config_file)
exporter.generate_dashboard_data()Set proper permissions
Configure file permissions for the discovery scripts with security best practices.
sudo chown -R snmp-discovery:snmp-discovery /opt/snmp-discovery
sudo chmod 755 /opt/snmp-discovery/scripts/*.py
sudo chmod 644 /opt/snmp-discovery/config/discovery.conf
sudo chmod 664 /opt/snmp-discovery/logs/discovery.logCreate systemd service
Set up a systemd service to run the discovery process automatically on a schedule.
[Unit]
Description=SNMP Device Discovery Service
After=network.target
[Service]
Type=oneshot
User=snmp-discovery
Group=snmp-discovery
WorkingDirectory=/opt/snmp-discovery
ExecStart=/opt/snmp-discovery/scripts/snmp_discovery.py
Environment=PATH=/opt/snmp-discovery/venv/bin:/usr/bin:/bin
StandardOutput=journal
StandardError=journalCreate systemd timer
Configure a systemd timer to run the discovery service at regular intervals.
[Unit]
Description=Run SNMP Discovery every hour
Requires=snmp-discovery.service
[Timer]
OnCalendar=hourly
Persistent=true
[Install]
WantedBy=timers.targetEnable and start services
Activate the systemd timer to begin automated discovery operations.
sudo systemctl daemon-reload
sudo systemctl enable snmp-discovery.timer
sudo systemctl start snmp-discovery.timer
sudo systemctl status snmp-discovery.timerInstall Zabbix integration
Install Zabbix server to provide comprehensive device monitoring capabilities. This integrates with our discovery system for automated monitoring setup.
wget https://repo.zabbix.com/zabbix/7.0/ubuntu/pool/main/z/zabbix-release/zabbix-release_7.0-2+ubuntu24.04_all.deb
sudo dpkg -i zabbix-release_7.0-2+ubuntu24.04_all.deb
sudo apt update
sudo apt install -y zabbix-server-pgsql zabbix-frontend-php zabbix-apache-confConfigure Zabbix integration cron job
Set up automated synchronization between discovered devices and Zabbix monitoring.
sudo -u snmp-discovery crontab -eAdd the following line to run Zabbix sync every 4 hours:
0 /4 /opt/snmp-discovery/scripts/zabbix_integration.pyVerify your setup
Test the discovery system and verify that it's correctly identifying SNMP devices on your network.
# Check service status
sudo systemctl status snmp-discovery.timer
sudo systemctl list-timers snmp-discovery.timer
Run manual discovery
sudo -u snmp-discovery /opt/snmp-discovery/scripts/snmp_discovery.py
Check discovery logs
tail -f /opt/snmp-discovery/logs/discovery.log
Verify inventory database
sudo -u snmp-discovery sqlite3 /opt/snmp-discovery/data/inventory.db "SELECT ip_address, system_name, last_seen FROM devices;"
Check exported data
ls -la /opt/snmp-discovery/data/
cat /opt/snmp-discovery/data/inventory.json | jq .Common issues
| Symptom | Cause | Fix |
|---|---|---|
| Discovery finds no devices | SNMP communities incorrect | Update communities in discovery.conf and verify device SNMP settings |
| Permission denied errors | Incorrect file ownership | sudo chown -R snmp-discovery:snmp-discovery /opt/snmp-discovery |
| Python import errors | Missing virtual environment | Reinstall pip packages: sudo -u snmp-discovery /opt/snmp-discovery/venv/bin/pip install pysnmp requests |
| Timer not running | Systemd timer not enabled | sudo systemctl enable --now snmp-discovery.timer |
| Database lock errors | Multiple discovery processes | Kill running processes: sudo pkill -f snmp_discovery.py |
| Nmap scan fails | Network connectivity issues | Test manually: nmap -sn 192.168.1.0/24 and check network access |
Next steps
- Configure Zabbix SNMP monitoring for network devices with automated discovery and templates
- Integrate SNMP monitoring with InfluxDB and Telegraf for time-series analysis
- Configure network monitoring with SNMP and Grafana dashboards for infrastructure visibility
- Implement SNMP trap monitoring and alerting system for proactive network management
- Set up automated network topology discovery with SNMP and LLDP for infrastructure mapping
Automated install script
Run this to automate the entire setup
#!/usr/bin/env bash
set -euo pipefail
# SNMP Discovery System Installation Script
# Implements automated network device discovery and inventory management
# Colors for output
readonly RED='\033[0;31m'
readonly GREEN='\033[0;32m'
readonly YELLOW='\033[1;33m'
readonly NC='\033[0m'
# Global variables
NETWORK_RANGES=""
SNMP_COMMUNITIES="public"
# Logging function
log() {
echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')] $1${NC}"
}
warn() {
echo -e "${YELLOW}[WARNING] $1${NC}"
}
error() {
echo -e "${RED}[ERROR] $1${NC}"
exit 1
}
# Cleanup on failure
cleanup() {
warn "Installation failed. Cleaning up..."
systemctl stop snmp-discovery.service 2>/dev/null || true
systemctl disable snmp-discovery.service 2>/dev/null || true
rm -f /etc/systemd/system/snmp-discovery.service
userdel -f snmp-discovery 2>/dev/null || true
rm -rf /opt/snmp-discovery
warn "Cleanup completed"
}
trap cleanup ERR
# Usage message
usage() {
echo "Usage: $0 [OPTIONS]"
echo "Options:"
echo " -n, --networks Network ranges to scan (comma-separated, e.g., 192.168.1.0/24,10.0.0.0/24)"
echo " -c, --communities SNMP communities (comma-separated, default: public)"
echo " -h, --help Show this help message"
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-n|--networks)
NETWORK_RANGES="$2"
shift 2
;;
-c|--communities)
SNMP_COMMUNITIES="$2"
shift 2
;;
-h|--help)
usage
;;
*)
error "Unknown option: $1"
;;
esac
done
# Check if running as root
if [[ $EUID -ne 0 ]]; then
error "This script must be run as root"
fi
# Detect distribution
if [ -f /etc/os-release ]; then
. /etc/os-release
case "$ID" in
ubuntu|debian)
PKG_MGR="apt"
PKG_INSTALL="apt install -y"
PKG_UPDATE="apt update && apt upgrade -y"
SNMP_PKG="snmp snmp-mibs-downloader"
PYTHON_PKG="python3 python3-pip python3-venv"
;;
almalinux|rocky|centos|rhel|ol|fedora)
PKG_MGR="dnf"
PKG_INSTALL="dnf install -y"
PKG_UPDATE="dnf update -y"
SNMP_PKG="net-snmp net-snmp-utils"
PYTHON_PKG="python3 python3-pip"
;;
amzn)
PKG_MGR="yum"
PKG_INSTALL="yum install -y"
PKG_UPDATE="yum update -y"
SNMP_PKG="net-snmp net-snmp-utils"
PYTHON_PKG="python3 python3-pip"
;;
*)
error "Unsupported distribution: $ID"
;;
esac
else
error "Cannot detect distribution"
fi
log "[1/8] Updating system packages..."
$PKG_UPDATE
log "[2/8] Installing SNMP tools and dependencies..."
$PKG_INSTALL $SNMP_PKG nmap $PYTHON_PKG curl jq
log "[3/8] Creating discovery environment..."
useradd -r -s /bin/bash -d /opt/snmp-discovery snmp-discovery || true
mkdir -p /opt/snmp-discovery/{scripts,logs,config,data}
chown -R snmp-discovery:snmp-discovery /opt/snmp-discovery
chmod 755 /opt/snmp-discovery
chmod 775 /opt/snmp-discovery/{scripts,logs,config,data}
log "[4/8] Setting up Python environment..."
sudo -u snmp-discovery python3 -m venv /opt/snmp-discovery/venv
sudo -u snmp-discovery /opt/snmp-discovery/venv/bin/pip install --upgrade pip
sudo -u snmp-discovery /opt/snmp-discovery/venv/bin/pip install pysnmp requests ipaddress
log "[5/8] Configuring SNMP client settings..."
cat > /etc/snmp/snmp.conf << 'EOF'
mibs +ALL
defVersion 2c
timeout 5
retries 3
EOF
log "[6/8] Creating discovery configuration..."
cat > /opt/snmp-discovery/config/discovery.conf << EOF
[networks]
network_ranges = ${NETWORK_RANGES:-192.168.1.0/24}
[snmp]
communities = ${SNMP_COMMUNITIES}
versions = 2c,1
timeout = 5
retries = 3
port = 161
[discovery]
scan_interval = 60
full_scan_interval = 1440
system_name_oid = 1.3.6.1.2.1.1.5.0
system_descr_oid = 1.3.6.1.2.1.1.1.0
system_uptime_oid = 1.3.6.1.2.1.1.3.0
system_contact_oid = 1.3.6.1.2.1.1.4.0
system_location_oid = 1.3.6.1.2.1.1.6.0
[output]
enable_json = true
enable_csv = true
enable_zabbix = true
[database]
database_path = /opt/snmp-discovery/data/inventory.db
EOF
log "[7/8] Creating discovery script..."
cat > /opt/snmp-discovery/scripts/snmp_discovery.py << 'EOF'
#!/opt/snmp-discovery/venv/bin/python3
import os
import sys
import json
import sqlite3
import subprocess
import configparser
import logging
from datetime import datetime
from pysnmp.hlapi import *
from ipaddress import IPv4Network
import csv
class SNMPDiscovery:
def __init__(self, config_file):
self.config = configparser.ConfigParser()
self.config.read(config_file)
self.setup_logging()
self.setup_database()
def setup_logging(self):
log_file = '/opt/snmp-discovery/logs/discovery.log'
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler(log_file),
logging.StreamHandler()
]
)
self.logger = logging.getLogger(__name__)
def setup_database(self):
db_path = self.config.get('database', 'database_path')
self.conn = sqlite3.connect(db_path)
self.conn.execute('''
CREATE TABLE IF NOT EXISTS devices (
id INTEGER PRIMARY KEY,
ip_address TEXT UNIQUE,
hostname TEXT,
description TEXT,
uptime TEXT,
contact TEXT,
location TEXT,
community TEXT,
last_seen TIMESTAMP,
created TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
''')
self.conn.commit()
def scan_network(self):
networks = self.config.get('networks', 'network_ranges').split(',')
active_hosts = []
for network in networks:
network = network.strip()
self.logger.info(f"Scanning network: {network}")
result = subprocess.run([
'nmap', '-sn', network
], capture_output=True, text=True)
for line in result.stdout.split('\n'):
if 'Nmap scan report' in line:
ip = line.split()[-1].strip('()')
active_hosts.append(ip)
return active_hosts
def snmp_query(self, ip, community, oid):
try:
for (errorIndication, errorStatus, errorIndex, varBinds) in nextCmd(
SnmpEngine(),
CommunityData(community),
UdpTransportTarget((ip, 161), timeout=5, retries=3),
ContextData(),
ObjectType(ObjectIdentity(oid)),
lexicographicMode=False):
if errorIndication:
break
if errorStatus:
break
for varBind in varBinds:
return str(varBind[1])
except Exception as e:
self.logger.debug(f"SNMP query failed for {ip}: {e}")
return None
def discover_devices(self):
hosts = self.scan_network()
communities = self.config.get('snmp', 'communities').split(',')
for host in hosts:
for community in communities:
community = community.strip()
name = self.snmp_query(host, community, self.config.get('discovery', 'system_name_oid'))
if name:
descr = self.snmp_query(host, community, self.config.get('discovery', 'system_descr_oid'))
uptime = self.snmp_query(host, community, self.config.get('discovery', 'system_uptime_oid'))
contact = self.snmp_query(host, community, self.config.get('discovery', 'system_contact_oid'))
location = self.snmp_query(host, community, self.config.get('discovery', 'system_location_oid'))
self.conn.execute('''
INSERT OR REPLACE INTO devices
(ip_address, hostname, description, uptime, contact, location, community, last_seen)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
''', (host, name, descr, uptime, contact, location, community, datetime.now()))
self.conn.commit()
self.logger.info(f"Discovered device: {name} ({host})")
break
def export_inventory(self):
cursor = self.conn.execute('SELECT * FROM devices ORDER BY ip_address')
devices = cursor.fetchall()
if self.config.getboolean('output', 'enable_json'):
with open('/opt/snmp-discovery/data/inventory.json', 'w') as f:
json.dump([dict(zip([col[0] for col in cursor.description], row)) for row in devices], f, indent=2, default=str)
if self.config.getboolean('output', 'enable_csv'):
with open('/opt/snmp-discovery/data/inventory.csv', 'w', newline='') as f:
writer = csv.writer(f)
writer.writerow([col[0] for col in cursor.description])
writer.writerows(devices)
def run(self):
self.logger.info("Starting SNMP discovery scan")
self.discover_devices()
self.export_inventory()
self.logger.info("Discovery scan completed")
if __name__ == '__main__':
discovery = SNMPDiscovery('/opt/snmp-discovery/config/discovery.conf')
discovery.run()
EOF
chmod 755 /opt/snmp-discovery/scripts/snmp_discovery.py
chown snmp-discovery:snmp-discovery /opt/snmp-discovery/scripts/snmp_discovery.py
cat > /etc/systemd/system/snmp-discovery.service << 'EOF'
[Unit]
Description=SNMP Device Discovery Service
After=network.target
[Service]
Type=oneshot
User=snmp-discovery
Group=snmp-discovery
WorkingDirectory=/opt/snmp-discovery
ExecStart=/opt/snmp-discovery/scripts/snmp_discovery.py
StandardOutput=journal
StandardError=journal
[Install]
WantedBy=multi-user.target
EOF
cat > /etc/systemd/system/snmp-discovery.timer << 'EOF'
[Unit]
Description=Run SNMP Discovery every hour
Requires=snmp-discovery.service
[Timer]
OnCalendar=hourly
Persistent=true
[Install]
WantedBy=timers.target
EOF
systemctl daemon-reload
systemctl enable snmp-discovery.timer
systemctl start snmp-discovery.timer
chown -R snmp-discovery:snmp-discovery /opt/snmp-discovery/config/discovery.conf
log "[8/8] Verifying installation..."
if systemctl is-active --quiet snmp-discovery.timer; then
log "SNMP Discovery timer is active"
else
error "Failed to start SNMP Discovery timer"
fi
if [[ -f /opt/snmp-discovery/scripts/snmp_discovery.py ]]; then
log "Discovery script installed successfully"
else
error "Discovery script installation failed"
fi
sudo -u snmp-discovery /opt/snmp-discovery/scripts/snmp_discovery.py &
log "SNMP Discovery System installation completed successfully!"
log "Configuration file: /opt/snmp-discovery/config/discovery.conf"
log "Logs directory: /opt/snmp-discovery/logs/"
log "Data directory: /opt/snmp-discovery/data/"
log "Service runs hourly via systemd timer"
Review the script before running. Execute with: bash install.sh