Commit 04b295ba by zmops

远景能源采集平台代码

parent d8401abf
# 这是 Meraki API Key 不可为空 # 这是 Meraki API Key 不可为空
#MERAKI_API_KEY=bbaa461f371d9eb323ba478028a9585624f2500c MERAKI_API_KEY=bbaa461f371d9eb323ba478028a9585624f2500c
MERAKI_API_KEY=715183336c6eecfa3924a95e07256c464ac9516d #MERAKI_API_KEY=715183336c6eecfa3924a95e07256c464ac9516d
# 组织 ID 可为空 # 组织 ID 可为空
ORGANIZATION_ID= ORGANIZATION_ID=
# Meraki API 基础 URL 可为空 # Meraki API 基础 URL 可为空
#MERAKI_API_BASE=https://n3.dashboard.meraki.cn/api/v1 MERAKI_API_BASE=https://n3.dashboard.meraki.cn/api/v1
MERAKI_API_BASE=https://n190.dashboard.meraki.com/api/v1 #MERAKI_API_BASE=https://n190.dashboard.meraki.com/api/v1
# 是否开启debug模式 # 是否开启debug模式
DJANGO_DEBUG=True DJANGO_DEBUG=False
#redis 相关配置 #redis 相关配置
...@@ -18,6 +18,11 @@ DJANGO_DEBUG=True ...@@ -18,6 +18,11 @@ DJANGO_DEBUG=True
REDIS_HOST=14.103.242.161 REDIS_HOST=14.103.242.161
#REDIS_HOST=127.0.0.1 #REDIS_HOST=127.0.0.1
REDIS_PORT=6379 REDIS_PORT=6379
REDIS_DB=9 REDIS_DB=8
REDIS_USERNAME= REDIS_USERNAME=
REDIS_PASSWORD=redis@123 REDIS_PASSWORD=redis@123
ZABBIX_API_URL=https://netmonitor.envision-energy.com/api_jsonrpc.php
ZABBIX_USER=weiguan
ZABBIX_PASSWORD=Test@123
\ No newline at end of file
Not Found: / [INFO] 2026-01-09 13:40:17,280 meraki_Interface_forward.views.zabbix_views Starting Meraki to Zabbix synchronization task...
Not Found: /favicon.ico [INFO] 2026-01-09 13:40:17,282 meraki_Interface_forward.views.zabbix_views Request parameters - Filter Networks: 14, Custom Map Keys: ['东莞办公室', '丰宁二期-工厂', '乌兰二期-工厂', '乌兰察布工厂IT网络', '乐亭-工厂', '五河-工厂', '包头主机工厂', '包头储能工厂', '包头合金办公室', '包头合金工厂IT网络', '北京-办公室', '博荟广场C座5F', '博荟广场C座6F', '博荟广场C座15F', '博荟广场C座22F', '台前云中心', '呼和浩特办公室', '商都叶片工厂', '大连-庄河工厂', '天津-办公室', '如东叶片-工厂', '山东-单县塔基工厂', '巴彦淖尔二期-工厂', '广西南宁-工厂', '广西桂平-工厂', '庆阳-工厂', '新疆-吉木乃工厂', '新疆乌鲁木齐办公室', '武威储能', '武威叶片-工厂', '武威叶片宿舍食堂', '江阴RDC外仓', '江阴一期堆场', '江阴一期篮球场', '江阴一期车间', '江阴一期辅房', '江阴国家级供应商外仓', '江阴宝湾国际物流园', '江阴渔光会馆C区', '江阴二期车间', '江阴二期辅房', '江阴-口罩厂', '江阴三期', '江阴四期-仓库', '江阴四期变压办公室', '江阴石庄仓库', '江阴齿轮办公室1F', '江阴传动链-工厂', '江阴制氢办公室', '江阴氢能星球工厂2#厂房', '江阴-小湖工厂', '沈阳工厂', '沙尔沁-工厂', '沙尔沁-辅房', '沙尔沁储能', '河北沧州塔基工厂', '海兴工厂', '海兴工厂二期', '淮安-盱眙工厂', '濮阳工厂', '白城-工厂', '翁牛特旗-工厂', '翁牛特旗-辅房', '苍南轴承工厂', '若羌工厂', '襄阳工厂', '赤峰元宝山P1期', '赤峰元宝山制氢工厂', '赤峰元宝山氢能3#', '郎溪工厂', '酒泉工厂', '钦州-三期叶片工厂', '陕西榆林云中心', '高安-工厂', '高安-辅房', '魏县-工厂', 'Bangalore Office', 'Bangalore Office 11F', 'Boston-Office', 'Boulder-Office', 'Brazil-Office', 'Denmark Office', 'DuBai-Office', 'GIC-WorkShop', 'London-Office', 'Melbourne-Office', 'Menlo Park', 'Singapore Office', 'Spain-Office'], Meraki API: http://10.93.64.204:5432
[INFO] 2026-01-09 13:40:17,282 meraki_Interface_forward.views.zabbix_views Fetching Meraki devices with filters...
[INFO] 2026-01-09 13:40:21,676 meraki_Interface_forward.services.meraki_service get_filtered_devices: Redis miss for ALL devices. Fetching from Meraki API...
[INFO] 2026-01-09 13:40:23,773 meraki_Interface_forward.services.meraki_service get_filtered_devices: Fetched 118 devices. Caching by serial...
[INFO] 2026-01-09 13:40:24,044 meraki_Interface_forward.services.meraki_service get_filtered_devices: Caching complete.
[INFO] 2026-01-09 13:40:24,046 meraki_Interface_forward.views.zabbix_views Fetched 118 devices from Meraki service.
[INFO] 2026-01-09 13:40:24,047 meraki_Interface_forward.views.zabbix_views Identified 118 unique valid devices.
[INFO] 2026-01-09 13:40:24,047 meraki_Interface_forward.views.zabbix_views Initializing Zabbix service and fetching host groups...
[ERROR] 2026-01-09 13:40:24,660 meraki_Interface_forward.services.zabbix_service Zabbix request failed: user.login, error: Zabbix API Error: {'code': -32602, 'message': 'Invalid params.', 'data': 'Invalid parameter "/": unexpected parameter "user".'}
[WARNING] 2026-01-09 13:40:24,660 meraki_Interface_forward.services.zabbix_service Zabbix login with 'user' failed, retrying with 'username'...
[INFO] 2026-01-09 13:40:25,505 meraki_Interface_forward.services.zabbix_service Zabbix login successful
[INFO] 2026-01-09 13:40:26,609 meraki_Interface_forward.views.zabbix_views Fetched 161 host groups from Zabbix.
[INFO] 2026-01-09 13:40:27,251 meraki_Interface_forward.views.zabbix_views Loaded 0 cached serials from Redis.
[INFO] 2026-01-09 13:40:27,251 meraki_Interface_forward.views.zabbix_views Diff calculation: 0 hosts to disable, 118 hosts to add/update.
[INFO] 2026-01-09 13:40:27,251 meraki_Interface_forward.views.zabbix_views Pre-fetching Zabbix template IDs...
[INFO] 2026-01-09 13:40:27,910 meraki_Interface_forward.views.zabbix_views Template 'Env_Meraki_Wireless_Template' found, ID: 10708
[INFO] 2026-01-09 13:40:28,469 meraki_Interface_forward.views.zabbix_views Template 'Env_Meraki_Switch_Template' found, ID: 10707
[INFO] 2026-01-09 13:40:28,469 meraki_Interface_forward.views.zabbix_views Processing current devices (Add/Update)...
[INFO] 2026-01-09 13:40:29,975 meraki_Interface_forward.services.zabbix_service Host Q2DN-ZLSW-RMV3 (MX-80) created: {'hostids': ['14192']}
[INFO] 2026-01-09 13:40:32,655 meraki_Interface_forward.services.zabbix_service Host Q2GW-8JNP-KP9S (16F-AS02) created: {'hostids': ['14193']}
[INFO] 2026-01-09 13:40:33,989 meraki_Interface_forward.services.zabbix_service Host Q2GW-8K76-PA6Y (14F-Envision) created: {'hostids': ['14194']}
[INFO] 2026-01-09 13:40:35,380 meraki_Interface_forward.services.zabbix_service Host Q2GW-B45M-MTKA (16F-AS01-Core) created: {'hostids': ['14195']}
[INFO] 2026-01-09 13:40:36,694 meraki_Interface_forward.services.zabbix_service Host Q2GW-DX6W-ZFXQ (AS01Q2GW-DX6W-ZFXQ) created: {'hostids': ['14196']}
[INFO] 2026-01-09 13:40:38,079 meraki_Interface_forward.services.zabbix_service Host Q2GW-FFDX-W38S (Spain-AS01) created: {'hostids': ['14197']}
[INFO] 2026-01-09 13:40:39,354 meraki_Interface_forward.services.zabbix_service Host Q2GW-GX77-TSTY (Spain-AS02) created: {'hostids': ['14198']}
[INFO] 2026-01-09 13:40:40,823 meraki_Interface_forward.services.zabbix_service Host Q2GW-RVW8-45G6 (AUMEL01MDFR01AS01-1) created: {'hostids': ['14199']}
[INFO] 2026-01-09 13:40:42,311 meraki_Interface_forward.services.zabbix_service Host Q2GW-RX2E-2FBH (AUMEL01MDFR01AS01-2) created: {'hostids': ['14200']}
[INFO] 2026-01-09 13:40:43,677 meraki_Interface_forward.services.zabbix_service Host Q2GW-TVTM-F9U4 (AS01Q2GW-TVTM-F9U4) created: {'hostids': ['14201']}
[INFO] 2026-01-09 13:40:44,965 meraki_Interface_forward.services.zabbix_service Host Q2GW-U4JD-Y2EJ (AS02Q2GW-U4JD-Y2EJ) created: {'hostids': ['14202']}
[INFO] 2026-01-09 13:40:47,338 meraki_Interface_forward.services.zabbix_service Host Q2HN-EY6E-BTEZ (Q2HN-EY6E-BTEZ) created: {'hostids': ['14203']}
[INFO] 2026-01-09 13:40:49,119 meraki_Interface_forward.services.zabbix_service Host Q2JD-AEC8-T7BE (AP1) created: {'hostids': ['14204']}
[INFO] 2026-01-09 13:40:50,423 meraki_Interface_forward.services.zabbix_service Host Q2KP-HSX3-F3FZ (MS220-24P-1) created: {'hostids': ['14205']}
[INFO] 2026-01-09 13:40:51,756 meraki_Interface_forward.services.zabbix_service Host Q2KW-CQP8-ZLTY (DB01BIDCR01AS01) created: {'hostids': ['14206']}
[INFO] 2026-01-09 13:40:53,102 meraki_Interface_forward.services.zabbix_service Host Q2KW-K2UN-NAAE (EE-BOU-CORE-SW3) created: {'hostids': ['14207']}
[INFO] 2026-01-09 13:40:54,484 meraki_Interface_forward.services.zabbix_service Host Q2KW-MFA2-V4TH (EULONASA902FMDFR01AS1) created: {'hostids': ['14208']}
[INFO] 2026-01-09 13:40:55,853 meraki_Interface_forward.services.zabbix_service Host Q2KW-NXAF-NN57 (EE-BOU-CORE-SW2) created: {'hostids': ['14209']}
[INFO] 2026-01-09 13:40:57,258 meraki_Interface_forward.services.zabbix_service Host Q2KW-NYA7-S5V8 (EE-BOU-CORE-SW1) created: {'hostids': ['14210']}
[INFO] 2026-01-09 13:40:58,690 meraki_Interface_forward.services.zabbix_service Host Q2ZD-GBSP-C28P (EE-BOU-AP4) created: {'hostids': ['14211']}
[INFO] 2026-01-09 13:41:00,053 meraki_Interface_forward.services.zabbix_service Host Q2ZD-Z6Y9-8CCV (EE-BOU-AP2) created: {'hostids': ['14212']}
[INFO] 2026-01-09 13:41:01,484 meraki_Interface_forward.services.zabbix_service Host Q3AC-5ZEB-M7PS (AP01) created: {'hostids': ['14213']}
[INFO] 2026-01-09 13:41:03,207 meraki_Interface_forward.services.zabbix_service Host Q3AC-B76K-HMJX (AP02) created: {'hostids': ['14214']}
[INFO] 2026-01-09 13:41:04,710 meraki_Interface_forward.services.zabbix_service Host Q3AC-BKG9-QY8S (AP03) created: {'hostids': ['14215']}
[INFO] 2026-01-09 13:41:06,113 meraki_Interface_forward.services.zabbix_service Host Q3AC-BW4K-BMSN (AP04) created: {'hostids': ['14216']}
[INFO] 2026-01-09 13:41:07,547 meraki_Interface_forward.services.zabbix_service Host Q3AC-DC7Q-V3YJ (AP05) created: {'hostids': ['14217']}
[INFO] 2026-01-09 13:41:08,971 meraki_Interface_forward.services.zabbix_service Host Q3AC-EFDK-SLQR (AP06) created: {'hostids': ['14218']}
[INFO] 2026-01-09 13:41:10,458 meraki_Interface_forward.services.zabbix_service Host Q3AC-GYPG-AEW5 (AP07) created: {'hostids': ['14219']}
[INFO] 2026-01-09 13:41:12,047 meraki_Interface_forward.services.zabbix_service Host Q3AC-JESD-NRCS (AP08) created: {'hostids': ['14220']}
[INFO] 2026-01-09 13:41:13,508 meraki_Interface_forward.services.zabbix_service Host Q3AC-JLRX-KMWB (AP09) created: {'hostids': ['14221']}
[INFO] 2026-01-09 13:41:15,321 meraki_Interface_forward.services.zabbix_service Host Q3AC-JSU5-BH8T (AP10) created: {'hostids': ['14222']}
[INFO] 2026-01-09 13:41:16,868 meraki_Interface_forward.services.zabbix_service Host Q3AC-JV42-7EYG (AP11) created: {'hostids': ['14223']}
[INFO] 2026-01-09 13:41:18,625 meraki_Interface_forward.services.zabbix_service Host Q3AC-KM7Q-KTMN (AP12) created: {'hostids': ['14224']}
[INFO] 2026-01-09 13:41:20,083 meraki_Interface_forward.services.zabbix_service Host Q3AC-KUJU-VEVF (AP13) created: {'hostids': ['14225']}
[INFO] 2026-01-09 13:41:21,456 meraki_Interface_forward.services.zabbix_service Host Q3AC-NMXL-ZYPE (AP14) created: {'hostids': ['14226']}
[INFO] 2026-01-09 13:41:22,830 meraki_Interface_forward.services.zabbix_service Host Q3AC-PYBW-R59L (AP15) created: {'hostids': ['14227']}
[INFO] 2026-01-09 13:41:24,108 meraki_Interface_forward.services.zabbix_service Host Q3AC-STW2-GGDY (AP16) created: {'hostids': ['14228']}
[INFO] 2026-01-09 13:41:25,495 meraki_Interface_forward.services.zabbix_service Host Q3AC-T3RS-VVPM (AP17) created: {'hostids': ['14229']}
[INFO] 2026-01-09 13:41:26,915 meraki_Interface_forward.services.zabbix_service Host Q3AC-TVXV-4GFD (AP18) created: {'hostids': ['14230']}
[INFO] 2026-01-09 13:41:28,243 meraki_Interface_forward.services.zabbix_service Host Q3AC-UG6V-643F (AP19) created: {'hostids': ['14231']}
[INFO] 2026-01-09 13:41:29,477 meraki_Interface_forward.services.zabbix_service Host Q3AC-UJ5Z-67R3 (AP20) created: {'hostids': ['14232']}
[INFO] 2026-01-09 13:41:30,737 meraki_Interface_forward.services.zabbix_service Host Q3AC-XWGD-C67V (AP21) created: {'hostids': ['14233']}
[INFO] 2026-01-09 13:41:32,044 meraki_Interface_forward.services.zabbix_service Host Q3AC-Y6TP-PNFX (AP22) created: {'hostids': ['14234']}
[INFO] 2026-01-09 13:41:33,305 meraki_Interface_forward.services.zabbix_service Host Q3AC-ZMDE-S3MA (AP23) created: {'hostids': ['14235']}
[INFO] 2026-01-09 13:41:34,580 meraki_Interface_forward.services.zabbix_service Host Q3AC-ZRER-ZYPA (AP24) created: {'hostids': ['14236']}
[INFO] 2026-01-09 13:41:35,985 meraki_Interface_forward.services.zabbix_service Host Q3AL-2A5G-3AF5 (Q3AL-2A5G-3AF5) created: {'hostids': ['14237']}
[INFO] 2026-01-09 13:41:37,319 meraki_Interface_forward.services.zabbix_service Host Q3AL-3B7T-8P7W (11FAP01) created: {'hostids': ['14238']}
[INFO] 2026-01-09 13:41:38,679 meraki_Interface_forward.services.zabbix_service Host Q3AL-3C27-BGS7 (SG01BAP01) created: {'hostids': ['14239']}
[INFO] 2026-01-09 13:41:40,018 meraki_Interface_forward.services.zabbix_service Host Q3AL-3QF8-QEDC (SG01BAP04) created: {'hostids': ['14240']}
[INFO] 2026-01-09 13:41:41,278 meraki_Interface_forward.services.zabbix_service Host Q3AL-493Q-8DJY (Q3AL-493Q-8DJY) created: {'hostids': ['14241']}
[INFO] 2026-01-09 13:41:42,556 meraki_Interface_forward.services.zabbix_service Host Q3AL-4JT9-RSTD (EULONASA901FAP1) created: {'hostids': ['14242']}
[INFO] 2026-01-09 13:41:43,873 meraki_Interface_forward.services.zabbix_service Host Q3AL-4LBV-PYB3 (BrazilAP01) created: {'hostids': ['14243']}
[INFO] 2026-01-09 13:41:45,101 meraki_Interface_forward.services.zabbix_service Host Q3AL-4RKD-ZPUP (Q3AL-4RKD-ZPUP) created: {'hostids': ['14244']}
[INFO] 2026-01-09 13:41:46,286 meraki_Interface_forward.services.zabbix_service Host Q3AL-59ZK-B862 (11FAP02) created: {'hostids': ['14245']}
[INFO] 2026-01-09 13:41:47,570 meraki_Interface_forward.services.zabbix_service Host Q3AL-6J8A-PVR6 (BrazilAP02) created: {'hostids': ['14246']}
[INFO] 2026-01-09 13:41:48,851 meraki_Interface_forward.services.zabbix_service Host Q3AL-6NKK-HJGX (DB01BAP03) created: {'hostids': ['14247']}
[INFO] 2026-01-09 13:41:50,128 meraki_Interface_forward.services.zabbix_service Host Q3AL-77VV-YQDF (EULONASA901FAP2) created: {'hostids': ['14248']}
[INFO] 2026-01-09 13:41:51,502 meraki_Interface_forward.services.zabbix_service Host Q3AL-7DA2-4FRA (SG01BAP03) created: {'hostids': ['14249']}
[INFO] 2026-01-09 13:41:52,745 meraki_Interface_forward.services.zabbix_service Host Q3AL-7H5L-N4BK (DB01BAP04) created: {'hostids': ['14250']}
[INFO] 2026-01-09 13:41:54,043 meraki_Interface_forward.services.zabbix_service Host Q3AL-8D3M-CC6P (EE-BOU-AP3) created: {'hostids': ['14251']}
[WARNING] 2026-01-09 13:41:54,044 meraki_Interface_forward.views.zabbix_views Target group 'GIC-WorkShop' not found for device Q3AL-95PW-CTBU (GIC-Workshop-AP01).
[INFO] 2026-01-09 13:41:55,369 meraki_Interface_forward.services.zabbix_service Host Q3AL-9BMF-T9PM (DB01BAP01) created: {'hostids': ['14252']}
[INFO] 2026-01-09 13:41:56,737 meraki_Interface_forward.services.zabbix_service Host Q3AL-9LZU-LF7S (SG01BAP02) created: {'hostids': ['14253']}
[INFO] 2026-01-09 13:41:58,057 meraki_Interface_forward.services.zabbix_service Host Q3AL-ATMJ-U7DM (16FAP03) created: {'hostids': ['14254']}
[INFO] 2026-01-09 13:41:59,324 meraki_Interface_forward.services.zabbix_service Host Q3AL-B5A5-NRSW (14FAP01) created: {'hostids': ['14255']}
[INFO] 2026-01-09 13:42:00,583 meraki_Interface_forward.services.zabbix_service Host Q3AL-BGDM-Q63Z (16FAP04) created: {'hostids': ['14256']}
[WARNING] 2026-01-09 13:42:00,583 meraki_Interface_forward.views.zabbix_views Target group 'GIC-WorkShop' not found for device Q3AL-BJML-QBA6 (GIC-Workshop-AP02).
[INFO] 2026-01-09 13:42:01,807 meraki_Interface_forward.services.zabbix_service Host Q3AL-D285-QUPU (EE-BOU-AP5) created: {'hostids': ['14257']}
[INFO] 2026-01-09 13:42:03,022 meraki_Interface_forward.services.zabbix_service Host Q3AL-DGFY-KT9B (DB01BAP07) created: {'hostids': ['14258']}
[WARNING] 2026-01-09 13:42:03,022 meraki_Interface_forward.views.zabbix_views Target group 'GIC-WorkShop' not found for device Q3AL-DGWS-VS4D (Crane Repeater).
[INFO] 2026-01-09 13:42:04,300 meraki_Interface_forward.services.zabbix_service Host Q3AL-DLGC-MAWT (EULONASA901FAP3) created: {'hostids': ['14259']}
[INFO] 2026-01-09 13:42:05,497 meraki_Interface_forward.services.zabbix_service Host Q3AL-DQCP-KK4J (DB01BAP05) created: {'hostids': ['14260']}
[INFO] 2026-01-09 13:42:06,718 meraki_Interface_forward.services.zabbix_service Host Q3AL-DQF5-LUYA (14FAP02) created: {'hostids': ['14261']}
[INFO] 2026-01-09 13:42:07,973 meraki_Interface_forward.services.zabbix_service Host Q3AL-DS3L-A3DE (DB01BAP06) created: {'hostids': ['14262']}
[INFO] 2026-01-09 13:42:09,205 meraki_Interface_forward.services.zabbix_service Host Q3AL-E5FG-VYXQ (16FAP05) created: {'hostids': ['14263']}
[INFO] 2026-01-09 13:42:10,518 meraki_Interface_forward.services.zabbix_service Host Q3AL-E7ST-QKBU (Q3AL-E7ST-QKBU) created: {'hostids': ['14264']}
[INFO] 2026-01-09 13:42:11,918 meraki_Interface_forward.services.zabbix_service Host Q3AL-EWJ2-V3C6 (EULONASA901FAP4) created: {'hostids': ['14265']}
[INFO] 2026-01-09 13:42:13,201 meraki_Interface_forward.services.zabbix_service Host Q3AL-FNDP-FZZN (Q3AL-FNDP-FZZN) created: {'hostids': ['14266']}
[INFO] 2026-01-09 13:42:14,436 meraki_Interface_forward.services.zabbix_service Host Q3AL-FTNC-8NLG (Backup-1) created: {'hostids': ['14267']}
[INFO] 2026-01-09 13:42:15,662 meraki_Interface_forward.services.zabbix_service Host Q3AL-G76S-TJ3B (SpainAP05) created: {'hostids': ['14268']}
[INFO] 2026-01-09 13:42:17,904 meraki_Interface_forward.services.zabbix_service Host Q3AL-GWK2-GBZ4 (Q3AL-GWK2-GBZ4) created: {'hostids': ['14269']}
[INFO] 2026-01-09 13:42:19,142 meraki_Interface_forward.services.zabbix_service Host Q3AL-H9DG-V2EV (BrazilAP03) created: {'hostids': ['14270']}
[INFO] 2026-01-09 13:42:20,360 meraki_Interface_forward.services.zabbix_service Host Q3AL-HGVG-DUUT (DB01BAP02) created: {'hostids': ['14271']}
[INFO] 2026-01-09 13:42:21,648 meraki_Interface_forward.services.zabbix_service Host Q3AL-J4QL-56FB (AUMEL01-AP01) created: {'hostids': ['14272']}
[INFO] 2026-01-09 13:42:22,894 meraki_Interface_forward.services.zabbix_service Host Q3AL-K4BD-8PSR (DB01BAP08) created: {'hostids': ['14273']}
[INFO] 2026-01-09 13:42:24,124 meraki_Interface_forward.services.zabbix_service Host Q3AL-KACV-PAMR (14FAP03) created: {'hostids': ['14274']}
[INFO] 2026-01-09 13:42:25,376 meraki_Interface_forward.services.zabbix_service Host Q3AL-KNPG-TSL5 (BrazilAP04) created: {'hostids': ['14275']}
[INFO] 2026-01-09 13:42:26,699 meraki_Interface_forward.services.zabbix_service Host Q3AL-M4HR-6YNJ (AUMEL01-AP04) created: {'hostids': ['14276']}
[INFO] 2026-01-09 13:42:28,004 meraki_Interface_forward.services.zabbix_service Host Q3AL-NJDN-SCBM (Q3AL-NJDN-SCBM) created: {'hostids': ['14277']}
[INFO] 2026-01-09 13:42:29,261 meraki_Interface_forward.services.zabbix_service Host Q3AL-NN6C-XFZT (EE-BOU-AP1) created: {'hostids': ['14278']}
[INFO] 2026-01-09 13:42:30,427 meraki_Interface_forward.services.zabbix_service Host Q3AL-NYDM-25VF (Q3AL-NYDM-25VF) created: {'hostids': ['14279']}
[INFO] 2026-01-09 13:42:31,653 meraki_Interface_forward.services.zabbix_service Host Q3AL-Q6WJ-74CX (BrazilAP05) created: {'hostids': ['14280']}
[INFO] 2026-01-09 13:42:32,874 meraki_Interface_forward.services.zabbix_service Host Q3AL-Q7VD-DS8K (Q3AL-Q7VD-DS8K) created: {'hostids': ['14281']}
[INFO] 2026-01-09 13:42:34,111 meraki_Interface_forward.services.zabbix_service Host Q3AL-QMG9-B54Z (Q3AL-QMG9-B54Z) created: {'hostids': ['14282']}
[INFO] 2026-01-09 13:42:35,358 meraki_Interface_forward.services.zabbix_service Host Q3AL-QXGJ-SNSH (BrazilAP06) created: {'hostids': ['14283']}
[INFO] 2026-01-09 13:42:36,614 meraki_Interface_forward.services.zabbix_service Host Q3AL-SXR3-HZWR (Q3AL-SXR3-HZWR) created: {'hostids': ['14284']}
[INFO] 2026-01-09 13:42:37,941 meraki_Interface_forward.services.zabbix_service Host Q3AL-T2JA-DN5H (SpainAP02) created: {'hostids': ['14285']}
[INFO] 2026-01-09 13:42:39,173 meraki_Interface_forward.services.zabbix_service Host Q3AL-TC8R-RQVZ (Q3AL-TC8R-RQVZ) created: {'hostids': ['14286']}
[INFO] 2026-01-09 13:42:40,445 meraki_Interface_forward.services.zabbix_service Host Q3AL-U29K-56ET (Q3AL-U29K-56ET) created: {'hostids': ['14287']}
[INFO] 2026-01-09 13:42:41,682 meraki_Interface_forward.services.zabbix_service Host Q3AL-VGE4-42NH (SpainAP06) created: {'hostids': ['14288']}
[INFO] 2026-01-09 13:42:42,981 meraki_Interface_forward.services.zabbix_service Host Q3AL-VMGF-BNDL (14FAP04) created: {'hostids': ['14289']}
[INFO] 2026-01-09 13:42:44,343 meraki_Interface_forward.services.zabbix_service Host Q3AL-VP94-CHSM (AUMEL01-AP03) created: {'hostids': ['14290']}
[INFO] 2026-01-09 13:42:45,657 meraki_Interface_forward.services.zabbix_service Host Q3AL-W4UD-6SXT (14FAP05) created: {'hostids': ['14291']}
[INFO] 2026-01-09 13:42:46,876 meraki_Interface_forward.services.zabbix_service Host Q3AL-W6XM-BF6P (14FAP06) created: {'hostids': ['14292']}
[INFO] 2026-01-09 13:42:48,130 meraki_Interface_forward.services.zabbix_service Host Q3AL-WJKJ-73RT (SpainAP04) created: {'hostids': ['14293']}
[INFO] 2026-01-09 13:42:49,424 meraki_Interface_forward.services.zabbix_service Host Q3AL-WQAH-S2MB (SpainAP03) created: {'hostids': ['14294']}
[INFO] 2026-01-09 13:42:50,661 meraki_Interface_forward.services.zabbix_service Host Q3AL-XCQZ-R69P (Q3AL-XCQZ-R69P) created: {'hostids': ['14295']}
[INFO] 2026-01-09 13:42:51,972 meraki_Interface_forward.services.zabbix_service Host Q3AL-XPES-Q3YG (BrazilAP07) created: {'hostids': ['14296']}
[INFO] 2026-01-09 13:42:53,247 meraki_Interface_forward.services.zabbix_service Host Q3AL-XSWX-82HQ (AUMEL01-AP02) created: {'hostids': ['14297']}
[INFO] 2026-01-09 13:42:54,493 meraki_Interface_forward.services.zabbix_service Host Q3AL-Y2M5-CL5Q (SpainAP01) created: {'hostids': ['14298']}
[INFO] 2026-01-09 13:42:55,709 meraki_Interface_forward.services.zabbix_service Host Q3AL-Y7UP-UF6T (EULONASA901FAP5) created: {'hostids': ['14299']}
[INFO] 2026-01-09 13:42:56,998 meraki_Interface_forward.services.zabbix_service Host Q3AL-YBXR-764F (Q3AL-YBXR-764F) created: {'hostids': ['14300']}
[INFO] 2026-01-09 13:42:58,268 meraki_Interface_forward.services.zabbix_service Host Q3AL-Z2EP-72ZS (BrazilAP08) created: {'hostids': ['14301']}
[INFO] 2026-01-09 13:42:59,541 meraki_Interface_forward.services.zabbix_service Host Q3AL-ZTMB-3QY2 (16FAP01) created: {'hostids': ['14302']}
[INFO] 2026-01-09 13:43:00,727 meraki_Interface_forward.services.zabbix_service Host Q3AL-ZV4U-7M98 (16FAP02) created: {'hostids': ['14303']}
[INFO] 2026-01-09 13:43:02,273 meraki_Interface_forward.services.zabbix_service Host Q4AV-59HV-Y6JP (AS02Q4AV-59HV-Y6JP) created: {'hostids': ['14304']}
[INFO] 2026-01-09 13:43:03,464 meraki_Interface_forward.services.zabbix_service Host Q4AV-5BSJ-YDJ8 (AS03) created: {'hostids': ['14305']}
[INFO] 2026-01-09 13:43:04,694 meraki_Interface_forward.services.zabbix_service Host Q4AV-TBV5-C2XL (AS01Q4AV-TBV5-C2XL) created: {'hostids': ['14306']}
[INFO] 2026-01-09 13:43:04,694 meraki_Interface_forward.views.zabbix_views Sync complete. Created: 115, Skipped: 0, Failed: 3
[INFO] 2026-01-09 13:43:04,901 meraki_Interface_forward.views.zabbix_views Updated Redis sync cache.
[INFO] 2026-01-09 13:43:04,903 meraki_Interface_forward.views.zabbix_views Backed up synced hosts list to C:\Users\fan.yang29\Desktop\20260118\meraki_Interface_forward\meraki_Interface_forward\logs\zabbix_synced_hosts.txt
[09/Jan/2026 13:43:04] "POST /zabbix/sync_hosts HTTP/1.1" 200 11573
...@@ -6,6 +6,7 @@ import logging ...@@ -6,6 +6,7 @@ import logging
from meraki_Interface_forward.redis_utils import ( from meraki_Interface_forward.redis_utils import (
set_json, set_json,
CacheKey, CacheKey,
get_redis_client,
) )
logger = logging.getLogger("meraki_Interface_forward.services.cache_service") logger = logging.getLogger("meraki_Interface_forward.services.cache_service")
...@@ -19,22 +20,36 @@ CHANNEL_UTILIZATION_CACHE_PREFIX = "channel_utilization:" ...@@ -19,22 +20,36 @@ CHANNEL_UTILIZATION_CACHE_PREFIX = "channel_utilization:"
def cache_devices_by_serial(devices, ttl: int = 60 * 60 * 12) -> None: def cache_devices_by_serial(devices, ttl: int = 60 * 60 * 12) -> None:
""" """
将 Meraki 设备列表按 serial 维度拆分,逐个写入 Redis。 将 Meraki 设备列表按 serial 维度拆分,批量写入 Redis。
- key: device:<serial> 使用 Pipeline 优化,避免 N+1 次网络请求。
- value: 单个设备 JSON
""" """
if not isinstance(devices, list): if not isinstance(devices, list) or not devices:
return return
for dev in devices:
if not isinstance(dev, dict): client = get_redis_client()
continue if not client:
serial = dev.get("serial") return
if not serial:
continue try:
try: pipeline = client.pipeline()
set_json(f"{DEVICE_CACHE_PREFIX}{serial}", dev, ex=ttl) import json
except Exception: for dev in devices:
continue if not isinstance(dev, dict):
continue
serial = dev.get("serial")
if not serial:
continue
# 直接使用 pipeline.set,手动序列化 json
# 注意:set_json 内部是调用的 client.set,这里为了 pipeline 效率直接操作
key = f"{DEVICE_CACHE_PREFIX}{serial}"
val = json.dumps(dev, ensure_ascii=False)
pipeline.set(key, val, ex=ttl)
pipeline.execute()
except Exception as e:
logger.error(f"cache_devices_by_serial pipeline error: {e}")
def cache_uplinks_by_serial(uplinks, ttl: int = 60 * 60 * 6) -> None: def cache_uplinks_by_serial(uplinks, ttl: int = 60 * 60 * 6) -> None:
......
...@@ -4,8 +4,10 @@ Meraki API 服务层:封装所有 Meraki API 调用 ...@@ -4,8 +4,10 @@ Meraki API 服务层:封装所有 Meraki API 调用
import os import os
import meraki import meraki
import logging import logging
import json
from meraki_Interface_forward.redis_utils import CacheKey, set_json from meraki_Interface_forward.redis_utils import CacheKey, set_json, get_json
from meraki_Interface_forward.services.cache_service import cache_devices_by_serial
logger = logging.getLogger("meraki_Interface_forward.services.meraki_service") logger = logging.getLogger("meraki_Interface_forward.services.meraki_service")
...@@ -44,7 +46,7 @@ def get_organization_networks(): ...@@ -44,7 +46,7 @@ def get_organization_networks():
total_pages='all' total_pages='all'
) )
if len(networks): if len(networks):
set_json(CacheKey.NETWORKS.value, networks, ex=60*60*6) set_json(CacheKey.NETWORKS.value, networks)
return networks return networks
except Exception as e: except Exception as e:
msg = str(e) msg = str(e)
...@@ -132,3 +134,98 @@ def get_device_switch_ports_status(serial): ...@@ -132,3 +134,98 @@ def get_device_switch_ports_status(serial):
timespan=3600, timespan=3600,
) )
def get_filtered_devices(filter_networks=None):
"""
获取经过过滤和处理的设备列表
:param filter_networks: list of network names to exclude
:return: list of device dicts
"""
filter_names = set(filter_networks) if isinstance(filter_networks, list) else set()
# 获取网络列表(缓存优先)
networks = get_json(CacheKey.NETWORKS.value) or []
if isinstance(networks, str):
try:
networks = json.loads(networks)
except Exception:
networks = []
if not networks:
networks = get_organization_networks() or []
network_map = {}
if isinstance(networks, list):
network_map = {net.get("id"): net.get("name") for net in networks if isinstance(net, dict)}
# 网络名 -> id 映射,用于过滤
exclude_network_ids = set()
if filter_names and isinstance(networks, list):
for net in networks:
if isinstance(net, dict) and net.get("name") in filter_names:
nid = net.get("id")
if nid:
exclude_network_ids.add(nid)
# 获取设备(缓存优先,未命中则回源)
devices = get_json(CacheKey.DEVICES.value)
if not devices:
logger.info("get_filtered_devices: Redis miss for ALL devices. Fetching from Meraki API...")
devices = get_organization_devices()
if devices:
set_json(CacheKey.DEVICES.value, devices, ex=60 * 60)
logger.info(f"get_filtered_devices: Fetched {len(devices)} devices. Caching by serial...")
cache_devices_by_serial(devices, ttl=60 * 60)
logger.info("get_filtered_devices: Caching complete.")
else:
logger.info(f"get_filtered_devices: Redis hit. {len(devices)} devices found.")
if not devices:
return []
# 过滤:networkId 命中需要排除的网络则丢弃
if exclude_network_ids:
filtered = []
for dev in devices:
if not isinstance(dev, dict):
continue
nid = dev.get("networkId")
if nid in exclude_network_ids:
continue
filtered.append(dev)
devices = filtered
if devices:
processed = []
for dev in devices:
if not isinstance(dev, dict):
continue
name = dev.get("name")
serial = dev.get("serial")
mac = dev.get("mac")
if not name or (isinstance(name, str) and not name.strip()):
dev["name"] = serial or mac or "UNKNOWN"
# 追加 networkName 字段
net_id = dev.get("networkId")
if network_map and net_id:
dev["networkName"] = network_map.get(net_id)
processed.append(dev)
# 重名处理
name_counts = {}
for dev in processed:
n = dev.get("name")
if not isinstance(n, str):
n = str(n) if n is not None else ""
name_counts[n] = name_counts.get(n, 0) + 1
for dev in processed:
n = dev.get("name")
if name_counts.get(n, 0) > 1:
s = dev.get("serial") or ""
dev["name"] = f"{n}{s}"
devices = processed
return devices
...@@ -24,6 +24,12 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) ...@@ -24,6 +24,12 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
load_dotenv(os.path.join(BASE_DIR, '.env')) load_dotenv(os.path.join(BASE_DIR, '.env'))
# Zabbix Configuration
ZABBIX_API_URL = os.getenv("ZABBIX_API_URL", "")
ZABBIX_USER = os.getenv("ZABBIX_USER", "")
ZABBIX_PASSWORD = os.getenv("ZABBIX_PASSWORD", "")
REDIS_URL = os.getenv("REDIS_URL") REDIS_URL = os.getenv("REDIS_URL")
REDIS_HOST = os.getenv("REDIS_HOST") REDIS_HOST = os.getenv("REDIS_HOST")
REDIS_PORT = os.getenv("REDIS_PORT") REDIS_PORT = os.getenv("REDIS_PORT")
...@@ -153,7 +159,7 @@ try: ...@@ -153,7 +159,7 @@ try:
if networks: if networks:
# 缓存进入redis # 缓存进入redis
res = json.dumps(networks) res = json.dumps(networks)
set(CacheKey.NETWORKS.value, res, ex=60*60*6) set(CacheKey.NETWORKS.value, res)
except Exception as e: except Exception as e:
print(f"缓存 networks 失败: {e}") print(f"缓存 networks 失败: {e}")
...@@ -315,6 +321,20 @@ class CustomTimedRotatingFileHandler(logging.handlers.TimedRotatingFileHandler): ...@@ -315,6 +321,20 @@ class CustomTimedRotatingFileHandler(logging.handlers.TimedRotatingFileHandler):
new_name = f"meraki.{date_str}.log" new_name = f"meraki.{date_str}.log"
return os.path.join(base_dir, new_name) return os.path.join(base_dir, new_name)
return name return name
def getFilesToDelete(self):
import re
base_dir = os.path.dirname(self.baseFilename)
base_name = os.path.splitext(os.path.basename(self.baseFilename))[0]
pattern = re.compile(rf"{re.escape(base_name)}\.(\d{{8}})\.log$")
dated = []
for fn in os.listdir(base_dir):
m = pattern.match(fn)
if m:
dated.append((m.group(1), os.path.join(base_dir, fn)))
dated.sort(key=lambda x: x[0])
if len(dated) <= self.backupCount:
return []
return [p for _, p in dated[:-self.backupCount]]
LOGGING = { LOGGING = {
...@@ -339,7 +359,7 @@ LOGGING = { ...@@ -339,7 +359,7 @@ LOGGING = {
"()": CustomTimedRotatingFileHandler, "()": CustomTimedRotatingFileHandler,
"filename": LOG_FILE_PATH, "filename": LOG_FILE_PATH,
"when": "midnight", "when": "midnight",
"backupCount": 7, "backupCount": 6,
"encoding": "utf-8", "encoding": "utf-8",
}, },
}, },
......
...@@ -4,8 +4,6 @@ ...@@ -4,8 +4,6 @@
import threading import threading
import time import time
import logging import logging
from datetime import datetime
from zoneinfo import ZoneInfo
from django.http import JsonResponse from django.http import JsonResponse
from meraki_Interface_forward.resultAnalysis import logger from meraki_Interface_forward.resultAnalysis import logger
...@@ -48,7 +46,7 @@ def _execute_synchronization(): ...@@ -48,7 +46,7 @@ def _execute_synchronization():
try: try:
networks = get_organization_networks() networks = get_organization_networks()
if networks: if networks:
set_json(CacheKey.NETWORKS.value, networks, ex=60 * 60 * 6) set_json(CacheKey.NETWORKS.value, networks)
logger.info("网络数据同步完成") logger.info("网络数据同步完成")
except Exception as e: except Exception as e:
logger.error(f"Failed to cache networks: {e}") logger.error(f"Failed to cache networks: {e}")
...@@ -75,7 +73,7 @@ def _execute_synchronization(): ...@@ -75,7 +73,7 @@ def _execute_synchronization():
try: try:
alerts = get_organization_alert() alerts = get_organization_alert()
if alerts: if alerts:
set_json(CacheKey.ASSURANCE_ALERTS.value, alerts, ex=60) set_json(CacheKey.ASSURANCE_ALERTS.value, alerts, ex= 60 * 5 )
logger.info("告警数据同步完成") logger.info("告警数据同步完成")
except Exception as e: except Exception as e:
logger.error(f"Failed to cache alert: {e}") logger.error(f"Failed to cache alert: {e}")
...@@ -84,18 +82,17 @@ def _execute_synchronization(): ...@@ -84,18 +82,17 @@ def _execute_synchronization():
try: try:
devices = get_organization_devices() devices = get_organization_devices()
if devices: if devices:
set_json(CacheKey.DEVICES.value, devices, ex=60 * 60 * 12) set_json(CacheKey.DEVICES.value, devices, ex=60 * 60)
# 同步按 serial 建立单设备缓存,便于高并发单设备查询 # 同步按 serial 建立单设备缓存,便于高并发单设备查询
cache_devices_by_serial(devices, ttl=60 * 60 * 12) cache_devices_by_serial(devices, ttl=60 * 60)
logger.info(f"设备列表同步完成,共 {len(devices) if devices else 0} 台设备") logger.info(f"设备列表同步完成,共 {len(devices) if devices else 0} 台设备")
except Exception as e: except Exception as e:
logger.error(f"Failed to cache devices: {e}") logger.error(f"Failed to cache devices: {e}")
# 任务完成 # 任务完成
elapsed_time = time.time() - start_time elapsed_time = time.time() - start_time
finish_ts = time.time()
set_json(TASK_STATUS_KEY, TASK_STATUS_COMPLETED, ex=3600) set_json(TASK_STATUS_KEY, TASK_STATUS_COMPLETED, ex=3600)
set_json(TASK_FINISH_TIME_KEY, finish_ts, ex=3600) set_json(TASK_FINISH_TIME_KEY, time.time(), ex=3600)
logger.info(f"数据同步任务完成,耗时 {elapsed_time:.2f} 秒") logger.info(f"数据同步任务完成,耗时 {elapsed_time:.2f} 秒")
except Exception as e: except Exception as e:
...@@ -104,17 +101,6 @@ def _execute_synchronization(): ...@@ -104,17 +101,6 @@ def _execute_synchronization():
set_json(TASK_FINISH_TIME_KEY, time.time(), ex=3600) set_json(TASK_FINISH_TIME_KEY, time.time(), ex=3600)
def _format_ts_shanghai(ts: float | None) -> str | None:
"""将时间戳格式化为上海时间字符串"""
if ts is None:
return None
try:
dt = datetime.fromtimestamp(float(ts), tz=ZoneInfo("Asia/Shanghai"))
return dt.strftime("%Y-%m-%d %H:%M:%S")
except Exception:
return None
def synchronization_data(request): def synchronization_data(request):
""" """
数据同步接口(异步执行) 数据同步接口(异步执行)
...@@ -132,7 +118,6 @@ def synchronization_data(request): ...@@ -132,7 +118,6 @@ def synchronization_data(request):
current_status = get_json(TASK_STATUS_KEY) or TASK_STATUS_IDLE current_status = get_json(TASK_STATUS_KEY) or TASK_STATUS_IDLE
start_time = get_json(TASK_START_TIME_KEY) start_time = get_json(TASK_START_TIME_KEY)
finish_time = get_json(TASK_FINISH_TIME_KEY) finish_time = get_json(TASK_FINISH_TIME_KEY)
finish_time_str = _format_ts_shanghai(finish_time)
# 如果任务正在执行中,返回当前状态 # 如果任务正在执行中,返回当前状态
if current_status == TASK_STATUS_RUNNING: if current_status == TASK_STATUS_RUNNING:
...@@ -146,7 +131,6 @@ def synchronization_data(request): ...@@ -146,7 +131,6 @@ def synchronization_data(request):
"elapsed_time": elapsed_time, "elapsed_time": elapsed_time,
"message": "数据同步任务正在执行中,请稍后查询", "message": "数据同步任务正在执行中,请稍后查询",
"last_finished_at": finish_time, "last_finished_at": finish_time,
"last_finished_at_shanghai": finish_time_str,
}, },
safe=False, safe=False,
json_dumps_params={'indent': 2, 'ensure_ascii': False}, json_dumps_params={'indent': 2, 'ensure_ascii': False},
...@@ -164,7 +148,6 @@ def synchronization_data(request): ...@@ -164,7 +148,6 @@ def synchronization_data(request):
"elapsed_time": elapsed_time, "elapsed_time": elapsed_time,
"message": f"当前任务状态: {current_status}", "message": f"当前任务状态: {current_status}",
"last_finished_at": finish_time, "last_finished_at": finish_time,
"last_finished_at_shanghai": finish_time_str,
}, },
safe=False, safe=False,
json_dumps_params={'indent': 2, 'ensure_ascii': False}, json_dumps_params={'indent': 2, 'ensure_ascii': False},
...@@ -181,7 +164,6 @@ def synchronization_data(request): ...@@ -181,7 +164,6 @@ def synchronization_data(request):
"elapsed_time": 0, "elapsed_time": 0,
"message": "数据同步任务已创建并在后台执行", "message": "数据同步任务已创建并在后台执行",
"last_finished_at": finish_time, "last_finished_at": finish_time,
"last_finished_at_shanghai": finish_time_str,
}, },
safe=False, safe=False,
json_dumps_params={'indent': 2, 'ensure_ascii': False}, json_dumps_params={'indent': 2, 'ensure_ascii': False},
...@@ -198,7 +180,6 @@ def synchronization_data(request): ...@@ -198,7 +180,6 @@ def synchronization_data(request):
"elapsed_time": elapsed_time, "elapsed_time": elapsed_time,
"message": f"当前任务状态: {current_status}", "message": f"当前任务状态: {current_status}",
"last_finished_at": finish_time, "last_finished_at": finish_time,
"last_finished_at_shanghai": finish_time_str,
}, },
safe=False, safe=False,
json_dumps_params={'indent': 2, 'ensure_ascii': False}, json_dumps_params={'indent': 2, 'ensure_ascii': False},
......
import json
from django.test import SimpleTestCase, RequestFactory
from meraki_Interface_forward.redis_utils import CacheKey
import meraki_Interface_forward.views.device_views as views
class DiscoveryFilterNameFixTests(SimpleTestCase):
def setUp(self):
self.factory = RequestFactory()
self._orig_get_json = views.get_json
self._orig_get_org_devices = views.get_organization_devices
def tearDown(self):
views.get_json = self._orig_get_json
views.get_organization_devices = self._orig_get_org_devices
def _stub_get_json(self, key):
if key == CacheKey.NETWORKS.value:
return None
if key == CacheKey.DEVICES.value:
return [
{"name": "", "serial": "Q3AL-2A5G-3AF5", "mac": "aa:bb:cc:dd:ee:ff", "networkId": "N1", "productType": "switch"},
{"name": "AS01", "serial": "Q4AV-TBV5-C2XL", "mac": "14:9f:43:47:f2:38", "networkId": "N2", "productType": "switch"},
{"name": "AS01", "serial": "Q5BX-XXXX-YYYY", "mac": "14:9f:43:47:f2:39", "networkId": "N3", "productType": "switch"},
]
return None
def test_empty_name_fixed_and_duplicates_deduped(self):
views.get_json = self._stub_get_json
views.get_organization_devices = lambda: []
req = self.factory.post(
"/discovery/host_prototype/filter",
data=json.dumps({"filter_networks": []}),
content_type="application/json",
)
res = views.discovery_host_prototype_filter(req)
data = json.loads(res.content.decode("utf-8"))
names = {d["serial"]: d.get("name") for d in data}
assert names["Q3AL-2A5G-3AF5"] == "Q3AL-2A5G-3AF5"
assert names["Q4AV-TBV5-C2XL"] == "AS01Q4AV-TBV5-C2XL"
assert names["Q5BX-XXXX-YYYY"] == "AS01Q5BX-XXXX-YYYY"
...@@ -18,12 +18,14 @@ from django.contrib import admin ...@@ -18,12 +18,14 @@ from django.contrib import admin
from django.urls import path from django.urls import path
from . import task from . import task
# 直接从子模块导入,避免 views.py 和 views/ 目录的命名冲突 # 直接从子模块导入,避免 views.py 和 views/ 目录的命名冲突
from .views import device_views, status_views, cache_views from .views import device_views, status_views, cache_views, zabbix_views, juniper_views
urlpatterns = [ urlpatterns = [
# path('admin/', admin.site.urls), # path('admin/', admin.site.urls),
path('discovery/host_prototype', device_views.discovery_host_prototype), path('discovery/host_prototype', device_views.discovery_host_prototype),
path('discovery/host_prototype/filter', device_views.discovery_host_prototype_filter), path('discovery/host_prototype/filter', device_views.discovery_host_prototype_filter),
path('zabbix/sync_hosts', zabbix_views.sync_meraki_to_zabbix),
path('zabbix/sync_juniper_hosts', juniper_views.sync_juniper_to_zabbix),
path('all_host', device_views.get_device_by_serial_or_product_type), path('all_host', device_views.get_device_by_serial_or_product_type),
path('device/status', status_views.get_device_status), path('device/status', status_views.get_device_status),
path('device/status/overview', status_views.get_device_status_overview), path('device/status/overview', status_views.get_device_status_overview),
......
...@@ -4,40 +4,44 @@ from typing import Dict ...@@ -4,40 +4,44 @@ from typing import Dict
from meraki_Interface_forward.redis_utils import get_redis_client from meraki_Interface_forward.redis_utils import get_redis_client
def collect_prefix_ttl_stats(pattern: str, sample_size: int = 10, max_scan_keys: int = 200) -> Dict[str, object]: def collect_prefix_ttl_stats(pattern: str, sample_size: int = 10, max_scan_keys: int = 50, quick_check: bool = True) -> Dict[str, object]:
""" """
收集指定前缀的 key 数量与 TTL 统计。 收集指定前缀的 key 数量与 TTL 统计(极速优化版)。
- 精确 count:全量 scan 计数(仅保存前 max_scan_keys 个用于采样) - 空前缀:只扫描 10 个 key,如果没找到立即返回(< 10ms)
- TTL 采样:最多 sample_size 个,使用 pipeline 批量查询 - 有数据前缀:限制扫描范围,不进行全量计数,只统计已扫描的 key
- 批量查询:使用 pipeline 批量获取 TTL
""" """
client = get_redis_client() client = get_redis_client()
if not client: if not client:
return {"count": 0, "sampled": 0, "minTtl": None, "avgTtl": None} return {"count": 0, "sampled": 0, "minTtl": None, "avgTtl": None}
keys = [] keys = []
count = 0
cursor = 0
try: try:
while True: # 极速检查:空前缀只扫描 10 个 key,如果没找到立即返回
cursor, batch = client.scan(cursor=cursor, match=pattern, count=500) cursor, batch = client.scan(cursor=0, match=pattern, count=10 if quick_check else 50)
batch = batch or [] batch = batch or []
count += len(batch) if not batch:
# 仅保留前 max_scan_keys 个用于采样 # 空前缀:立即返回,不继续扫描
for k in batch: return {"count": 0, "sampled": 0, "minTtl": None, "avgTtl": None}
if len(keys) < max_scan_keys:
keys.append(k) keys.extend(batch)
if cursor == 0:
break # 如果找到了数据,继续扫描但严格限制总量(不再进行全量计数)
if cursor != 0 and len(keys) < max_scan_keys:
# 只再扫描一次,获取更多 key 用于采样
cursor, batch = client.scan(cursor=cursor, match=pattern, count=max_scan_keys - len(keys))
if batch:
keys.extend(batch[:max_scan_keys - len(keys)])
except Exception: except Exception:
return {"count": 0, "sampled": 0, "minTtl": None, "avgTtl": None} return {"count": 0, "sampled": 0, "minTtl": None, "avgTtl": None}
if count == 0 or not keys: if not keys:
return {"count": count, "sampled": 0, "minTtl": None, "avgTtl": None} return {"count": 0, "sampled": 0, "minTtl": None, "avgTtl": None}
# 随机采样(进一步减少 TTL 查询) # 限制采样数量(进一步减少 TTL 查询)
actual_sample_size = min(len(keys), sample_size) actual_sample_size = min(len(keys), sample_size)
sampled_keys = random.sample(keys, actual_sample_size) sampled_keys = random.sample(keys, actual_sample_size)
# 使用 pipeline 批量查询 TTL(一次性查询所有采样 key) # 使用 pipeline 批量查询 TTL(一次性查询所有采样 key)
ttls = [] ttls = []
try: try:
...@@ -58,8 +62,10 @@ def collect_prefix_ttl_stats(pattern: str, sample_size: int = 10, max_scan_keys: ...@@ -58,8 +62,10 @@ def collect_prefix_ttl_stats(pattern: str, sample_size: int = 10, max_scan_keys:
except Exception: except Exception:
continue continue
# 对于 device 这种可能有大量 key 的情况,count 只返回实际扫描到的数量
# 不再进行全量计数,避免性能问题
return { return {
"count": count, "count": len(keys), # 只返回实际扫描到的数量,不再全量计数
"sampled": len(sampled_keys), "sampled": len(sampled_keys),
"minTtl": min(ttls) if ttls else None, "minTtl": min(ttls) if ttls else None,
"avgTtl": round(sum(ttls) / len(ttls), 2) if ttls else None, "avgTtl": round(sum(ttls) / len(ttls), 2) if ttls else None,
...@@ -68,16 +74,18 @@ def collect_prefix_ttl_stats(pattern: str, sample_size: int = 10, max_scan_keys: ...@@ -68,16 +74,18 @@ def collect_prefix_ttl_stats(pattern: str, sample_size: int = 10, max_scan_keys:
def build_prefix_stats(prefix_patterns: Dict[str, str], sample_size: int = 10) -> Dict[str, dict]: def build_prefix_stats(prefix_patterns: Dict[str, str], sample_size: int = 10) -> Dict[str, dict]:
""" """
生成按前缀的 TTL 统计结果。 生成按前缀的 TTL 统计结果(极速优化版)
- 精确 count(全量 scan - 空前缀:极速检查(只扫描 10 个 key
- device 前缀:采样 5,保留 200 个键用于采样 - device 前缀:限制扫描 50 个 key,采样 5 个
- 其他前缀:采样 5,保留 100 个键用于采样 - 其他前缀:限制扫描 30 个 key,采样 5 个
""" """
result = {} result = {}
for name, pattern in prefix_patterns.items(): for name, pattern in prefix_patterns.items():
if name == "device": if name == "device":
result[name] = collect_prefix_ttl_stats(pattern, sample_size=5, max_scan_keys=200) # device 前缀:扫描 50 个 key,采样 5 个
result[name] = collect_prefix_ttl_stats(pattern, sample_size=5, max_scan_keys=50, quick_check=True)
else: else:
result[name] = collect_prefix_ttl_stats(pattern, sample_size=5, max_scan_keys=100) # 其他前缀:极速检查,扫描 30 个 key,采样 5 个
result[name] = collect_prefix_ttl_stats(pattern, sample_size=5, max_scan_keys=30, quick_check=True)
return result return result
...@@ -17,6 +17,7 @@ from meraki_Interface_forward.services.meraki_service import ( ...@@ -17,6 +17,7 @@ from meraki_Interface_forward.services.meraki_service import (
get_organization_uplinks, get_organization_uplinks,
get_organization_channel_utilization, get_organization_channel_utilization,
get_device_switch_ports_status as fetch_device_switch_ports_status, get_device_switch_ports_status as fetch_device_switch_ports_status,
get_filtered_devices,
) )
from meraki_Interface_forward.services.cache_service import ( from meraki_Interface_forward.services.cache_service import (
DEVICE_CACHE_PREFIX, DEVICE_CACHE_PREFIX,
...@@ -42,8 +43,8 @@ def discovery_host_prototype(request): ...@@ -42,8 +43,8 @@ def discovery_host_prototype(request):
# 缓存未命中,回退 Meraki API,并顺便按 serial 建立索引 # 缓存未命中,回退 Meraki API,并顺便按 serial 建立索引
devices = get_organization_devices() devices = get_organization_devices()
if devices: if devices:
set_json(CacheKey.DEVICES.value, devices, ex=43200) set_json(CacheKey.DEVICES.value, devices, ex=60 * 60)
cache_devices_by_serial(devices, ttl=43200) cache_devices_by_serial(devices, ttl=60 * 60)
return JsonResponse(devices, safe=False, json_dumps_params={'indent': 2, 'ensure_ascii': False}) return JsonResponse(devices, safe=False, json_dumps_params={'indent': 2, 'ensure_ascii': False})
return JsonResponse([], safe=False, json_dumps_params={'indent': 2, 'ensure_ascii': False}) return JsonResponse([], safe=False, json_dumps_params={'indent': 2, 'ensure_ascii': False})
...@@ -69,47 +70,8 @@ def discovery_host_prototype_filter(request): ...@@ -69,47 +70,8 @@ def discovery_host_prototype_filter(request):
if request.body: if request.body:
body = json.loads(request.body.decode("utf-8")) body = json.loads(request.body.decode("utf-8"))
filter_networks = body.get("filter_networks") or [] filter_networks = body.get("filter_networks") or []
filter_names = set(filter_networks) if isinstance(filter_networks, list) else set()
devices = get_filtered_devices(filter_networks)
# 获取网络列表(缓存优先)
networks = get_json(CacheKey.NETWORKS.value) or []
if isinstance(networks, str):
try:
networks = json.loads(networks)
except Exception:
networks = []
# 网络名 -> id 映射,用于过滤
exclude_network_ids = set()
if filter_names and isinstance(networks, list):
for net in networks:
if isinstance(net, dict) and net.get("name") in filter_names:
nid = net.get("id")
if nid:
exclude_network_ids.add(nid)
# 获取设备(缓存优先,未命中则回源)
devices = get_json(CacheKey.DEVICES.value)
if not devices:
devices = get_organization_devices()
if devices:
set_json(CacheKey.DEVICES.value, devices, ex=43200)
cache_devices_by_serial(devices, ttl=43200)
if not devices:
return JsonResponse([], safe=False, json_dumps_params={'indent': 2, 'ensure_ascii': False})
# 过滤:networkId 命中需要排除的网络则丢弃
if exclude_network_ids:
filtered = []
for dev in devices:
if not isinstance(dev, dict):
continue
nid = dev.get("networkId")
if nid in exclude_network_ids:
continue
filtered.append(dev)
devices = filtered
return JsonResponse(devices, safe=False, json_dumps_params={'indent': 2, 'ensure_ascii': False}) return JsonResponse(devices, safe=False, json_dumps_params={'indent': 2, 'ensure_ascii': False})
...@@ -171,7 +133,7 @@ def get_device_by_serial_or_product_type(request): ...@@ -171,7 +133,7 @@ def get_device_by_serial_or_product_type(request):
and dev.get("productType") == product_type and dev.get("productType") == product_type
): ):
# 命中后,顺便写回单设备缓存 # 命中后,顺便写回单设备缓存
cache_devices_by_serial([dev], ttl=43200) cache_devices_by_serial([dev], ttl=60 * 60)
net_id = dev.get("networkId") net_id = dev.get("networkId")
if network_map and net_id: if network_map and net_id:
dev["networkName"] = network_map.get(net_id) dev["networkName"] = network_map.get(net_id)
...@@ -187,14 +149,14 @@ def get_device_by_serial_or_product_type(request): ...@@ -187,14 +149,14 @@ def get_device_by_serial_or_product_type(request):
logger.info("单设备缓存未命中,准备触发全量设备加载: serial=%s", serial) logger.info("单设备缓存未命中,准备触发全量设备加载: serial=%s", serial)
devs = get_organization_devices() devs = get_organization_devices()
if devs: if devs:
set_json(CacheKey.DEVICES.value, devs, ex=43200) set_json(CacheKey.DEVICES.value, devs, ex=60 * 60)
cache_devices_by_serial(devs, ttl=43200) cache_devices_by_serial(devs, ttl=60 * 60)
return devs return devs
_, _ = get_or_set_json_with_lock( _, _ = get_or_set_json_with_lock(
CacheKey.DEVICES.value, CacheKey.DEVICES.value,
loader=loader, loader=loader,
ex=43200, ex=60 * 60,
lock_ttl=30, lock_ttl=30,
wait_timeout=5.0, wait_timeout=5.0,
wait_interval=0.2, wait_interval=0.2,
...@@ -249,13 +211,13 @@ def get_device_uplink(request): ...@@ -249,13 +211,13 @@ def get_device_uplink(request):
def loader(): def loader():
data = get_organization_uplinks() data = get_organization_uplinks()
if data: if data:
cache_uplinks_by_serial(data, ttl=43200) cache_uplinks_by_serial(data, ttl=60 * 60)
return data return data
res, _ = get_or_set_json_with_lock( res, _ = get_or_set_json_with_lock(
CacheKey.DEVICES_UPLINKS_BY_DEVICE.value, CacheKey.DEVICES_UPLINKS_BY_DEVICE.value,
loader=loader, loader=loader,
ex=43200, ex=60 * 60,
) )
if not res: if not res:
...@@ -272,7 +234,7 @@ def get_device_uplink(request): ...@@ -272,7 +234,7 @@ def get_device_uplink(request):
and device.get('serial') == serial and device.get('serial') == serial
and device.get('productType') == product_type and device.get('productType') == product_type
): ):
cache_uplinks_by_serial([device], ttl=43200) cache_uplinks_by_serial([device], ttl=60 * 60)
return JsonResponse( return JsonResponse(
device, device,
safe=False, safe=False,
...@@ -334,14 +296,13 @@ def get_device_ap_channelUtilization(request): ...@@ -334,14 +296,13 @@ def get_device_ap_channelUtilization(request):
res, _ = get_or_set_json_with_lock( res, _ = get_or_set_json_with_lock(
CacheKey.WIRELESS_CHANNEL_UTILIZATION_BY_DEVICE.value, CacheKey.WIRELESS_CHANNEL_UTILIZATION_BY_DEVICE.value,
loader=loader, loader=loader,
ex=43200, ex=60 * 5,
) )
if not res: if not res:
# 冷启动/源数据为空,返回 503 提示稍后重试
return JsonResponse( return JsonResponse(
{"error": "信道利用率数据尚未准备好,请稍后重试"}, {"error": "无可用的信道利用率数据"},
status=503, status=404,
json_dumps_params={'indent': 2, 'ensure_ascii': False}, json_dumps_params={'indent': 2, 'ensure_ascii': False},
) )
...@@ -356,8 +317,8 @@ def get_device_ap_channelUtilization(request): ...@@ -356,8 +317,8 @@ def get_device_ap_channelUtilization(request):
# 5. 依然未找到设备信道利用率数据 # 5. 依然未找到设备信道利用率数据
return JsonResponse( return JsonResponse(
{"error": "未找到指定设备的信道利用率数据", "serial": serial}, { "error": "未找到指定设备的信道利用率数据", "serial": serial},
status=404, status = 404,
json_dumps_params={'indent': 2, 'ensure_ascii': False}, json_dumps_params={'indent': 2, 'ensure_ascii': False},
) )
......
...@@ -80,7 +80,7 @@ def get_device_status(request): ...@@ -80,7 +80,7 @@ def get_device_status(request):
res, _ = get_or_set_json_with_lock( res, _ = get_or_set_json_with_lock(
CacheKey.DEVICES_AVAILABILITIES.value, CacheKey.DEVICES_AVAILABILITIES.value,
loader=loader, loader=loader,
ex=43200, ex=65,
) )
if not res: if not res:
...@@ -101,8 +101,8 @@ def get_device_status(request): ...@@ -101,8 +101,8 @@ def get_device_status(request):
# 5. 依然未找到设备状态 # 5. 依然未找到设备状态
return JsonResponse( return JsonResponse(
{"error": "未找到指定设备的状态信息", "serial": serial, "productType": product_type}, { "error": "未找到指定设备的状态信息", "serial": serial, "productType": product_type},
status=404, status = 404,
json_dumps_params={'indent': 2, 'ensure_ascii': False}, json_dumps_params={'indent': 2, 'ensure_ascii': False},
) )
except Exception as e: except Exception as e:
...@@ -120,7 +120,7 @@ def get_device_status_overview(request): ...@@ -120,7 +120,7 @@ def get_device_status_overview(request):
res, _ = get_or_set_json_with_lock( res, _ = get_or_set_json_with_lock(
CacheKey.DEVICES_STATUSES_OVERVIEW.value, CacheKey.DEVICES_STATUSES_OVERVIEW.value,
loader=loader, loader=loader,
ex=43200, ex=60,
) )
if res: if res:
return JsonResponse(res, safe=False, json_dumps_params={'indent': 2, 'ensure_ascii': False}) return JsonResponse(res, safe=False, json_dumps_params={'indent': 2, 'ensure_ascii': False})
...@@ -144,7 +144,7 @@ def get_device_alert(request): ...@@ -144,7 +144,7 @@ def get_device_alert(request):
alert, _ = get_or_set_json_with_lock( alert, _ = get_or_set_json_with_lock(
CacheKey.ASSURANCE_ALERTS.value, CacheKey.ASSURANCE_ALERTS.value,
loader=loader, loader=loader,
ex=43200, ex=60,
) )
result = [] result = []
if alert: if alert:
......
import os
import sys
from datetime import date, timedelta
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if PROJECT_ROOT not in sys.path:
sys.path.insert(0, PROJECT_ROOT)
from meraki_Interface_forward.settings import CustomTimedRotatingFileHandler
ROOT = os.path.dirname(os.path.abspath(__file__))
LOG_DIR = os.path.join(ROOT, "logs_verify")
os.makedirs(LOG_DIR, exist_ok=True)
BASE_FILE = os.path.join(LOG_DIR, "meraki.log")
with open(BASE_FILE, "w", encoding="utf-8") as f:
f.write("")
for i in range(10):
d = date.today() - timedelta(days=i + 1)
fn = os.path.join(LOG_DIR, f"meraki.{d.strftime('%Y%m%d')}.log")
with open(fn, "w", encoding="utf-8") as f:
f.write(f"{d.isoformat()}\n")
handler = CustomTimedRotatingFileHandler(BASE_FILE, when="midnight", backupCount=6)
to_delete = handler.getFilesToDelete()
for p in to_delete:
try:
os.remove(p)
except OSError:
pass
remaining = []
for f in os.listdir(LOG_DIR):
if f.startswith("meraki.") and f.endswith(".log") and f != "meraki.log":
remaining.append(f)
print(f"remaining_history={len(remaining)}")
print("\n".join(sorted(remaining)))
if len(remaining) == 6:
print("OK")
exit(0)
else:
print("FAILED")
exit(1)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment