爆改了数据可视化
This commit is contained in:
@@ -17,9 +17,8 @@ class ElasticConfig(AppConfig):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# 延迟导入,避免循环导入或过早加载
|
# 延迟导入,避免循环导入或过早加载
|
||||||
from .es_connect import create_index_with_mapping, start_daily_analytics_scheduler
|
from .es_connect import create_index_with_mapping
|
||||||
try:
|
try:
|
||||||
create_index_with_mapping()
|
create_index_with_mapping()
|
||||||
start_daily_analytics_scheduler()
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"❌ ES 初始化失败: {e}")
|
print(f"❌ ES 初始化失败: {e}")
|
||||||
@@ -9,8 +9,8 @@ from .documents import AchievementDocument, UserDocument, GlobalDocument
|
|||||||
from .indexes import ACHIEVEMENT_INDEX_NAME, USER_INDEX_NAME, GLOBAL_INDEX_NAME
|
from .indexes import ACHIEVEMENT_INDEX_NAME, USER_INDEX_NAME, GLOBAL_INDEX_NAME
|
||||||
import hashlib
|
import hashlib
|
||||||
import time
|
import time
|
||||||
from datetime import datetime, timezone, timedelta
|
from datetime import datetime, timezone
|
||||||
import threading
|
import json
|
||||||
|
|
||||||
# 使用环境变量配置ES连接,默认为本机
|
# 使用环境变量配置ES连接,默认为本机
|
||||||
_ES_URL = os.environ.get('ELASTICSEARCH_URL', 'http://localhost:9200')
|
_ES_URL = os.environ.get('ELASTICSEARCH_URL', 'http://localhost:9200')
|
||||||
@@ -309,67 +309,193 @@ def search_by_any_field(keyword):
|
|||||||
print(f"模糊搜索失败: {str(e)}")
|
print(f"模糊搜索失败: {str(e)}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
ANALYTICS_CACHE = {"data": None, "ts": 0}
|
|
||||||
|
def _type_filters_from_list(limit: int = None):
|
||||||
def _compute_hist(range_gte: str, interval: str, fmt: str):
|
try:
|
||||||
from elasticsearch_dsl import Search
|
types = get_type_list()
|
||||||
s = AchievementDocument.search()
|
except Exception:
|
||||||
s = s.filter('range', time={'gte': range_gte, 'lte': 'now'})
|
types = ['软著', '专利', '奖状']
|
||||||
s = s.extra(size=0)
|
if isinstance(limit, int) and limit > 0:
|
||||||
s.aggs.bucket('b', 'date_histogram', field='time', calendar_interval=interval, format=fmt, min_doc_count=0)
|
types = types[:limit]
|
||||||
resp = s.execute()
|
filters = {}
|
||||||
buckets = getattr(resp.aggs, 'b').buckets
|
|
||||||
return [{"label": b.key_as_string, "count": b.doc_count} for b in buckets]
|
|
||||||
|
|
||||||
def _compute_type_counts(range_gte: str, types: list):
|
|
||||||
counts = []
|
|
||||||
for t in types:
|
for t in types:
|
||||||
s = AchievementDocument.search()
|
key = str(t)
|
||||||
s = s.filter('range', time={'gte': range_gte, 'lte': 'now'})
|
# 精确匹配键与值之间的关系,避免其它字段中的同名值造成误匹配
|
||||||
s = s.query('match_phrase', data=str(t))
|
pattern = f'*"数据类型": "{key}"*'
|
||||||
total = s.count()
|
filters[key] = {"wildcard": {"data.keyword": {"value": pattern}}}
|
||||||
counts.append({"type": str(t), "count": int(total)})
|
return filters
|
||||||
return counts
|
|
||||||
|
|
||||||
def compute_analytics():
|
def analytics_trend(gte: str = None, lte: str = None, interval: str = "day"):
|
||||||
types = get_type_list()
|
try:
|
||||||
days = _compute_hist('now-10d/d', 'day', 'yyyy-MM-dd')
|
search = AchievementDocument.search()
|
||||||
weeks = _compute_hist('now-10w/w', 'week', 'yyyy-ww')
|
body = {
|
||||||
months = _compute_hist('now-10M/M', 'month', 'yyyy-MM')
|
"size": 0,
|
||||||
pie_1m = _compute_type_counts('now-1M/M', types)
|
"aggs": {
|
||||||
pie_12m = _compute_type_counts('now-12M/M', types)
|
"trend": {
|
||||||
return {
|
"date_histogram": {
|
||||||
"last_10_days": days[-10:],
|
"field": "time",
|
||||||
"last_10_weeks": weeks[-10:],
|
"calendar_interval": interval,
|
||||||
"last_10_months": months[-10:],
|
"min_doc_count": 0
|
||||||
"type_pie_1m": pie_1m,
|
}
|
||||||
"type_pie_12m": pie_12m,
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
if gte or lte:
|
||||||
|
rng = {}
|
||||||
|
if gte:
|
||||||
|
rng["gte"] = gte
|
||||||
|
if lte:
|
||||||
|
rng["lte"] = lte
|
||||||
|
body["query"] = {"range": {"time": rng}}
|
||||||
|
search = search.update_from_dict(body)
|
||||||
|
resp = search.execute()
|
||||||
|
buckets = resp.aggregations.trend.buckets if hasattr(resp, 'aggregations') else []
|
||||||
|
return [{"key_as_string": b.key_as_string, "key": b.key, "doc_count": b.doc_count} for b in buckets]
|
||||||
|
except Exception as e:
|
||||||
|
print(f"分析趋势失败: {str(e)}")
|
||||||
|
return []
|
||||||
|
|
||||||
def get_analytics_overview(force: bool = False):
|
def analytics_types(gte: str = None, lte: str = None, size: int = 10):
|
||||||
now_ts = time.time()
|
try:
|
||||||
if force or ANALYTICS_CACHE["data"] is None or (now_ts - ANALYTICS_CACHE["ts"]) > 3600:
|
filters = _type_filters_from_list(limit=size)
|
||||||
ANALYTICS_CACHE["data"] = compute_analytics()
|
body = {
|
||||||
ANALYTICS_CACHE["ts"] = now_ts
|
"size": 0,
|
||||||
return ANALYTICS_CACHE["data"]
|
"aggs": {
|
||||||
|
"by_type": {
|
||||||
|
"filters": {
|
||||||
|
"filters": filters
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if gte or lte:
|
||||||
|
rng = {}
|
||||||
|
if gte:
|
||||||
|
rng["gte"] = gte
|
||||||
|
if lte:
|
||||||
|
rng["lte"] = lte
|
||||||
|
body["query"] = {"range": {"time": rng}}
|
||||||
|
resp = es.search(index=DATA_INDEX_NAME, body=body)
|
||||||
|
buckets = resp.get("aggregations", {}).get("by_type", {}).get("buckets", {})
|
||||||
|
out = []
|
||||||
|
for k, v in buckets.items():
|
||||||
|
try:
|
||||||
|
out.append({"key": k, "doc_count": int(v.get("doc_count", 0))})
|
||||||
|
except Exception:
|
||||||
|
out.append({"key": str(k), "doc_count": 0})
|
||||||
|
return out
|
||||||
|
except Exception as e:
|
||||||
|
print(f"分析类型占比失败: {str(e)}")
|
||||||
|
return []
|
||||||
|
|
||||||
def _seconds_until_hour(h: int):
|
def analytics_types_trend(gte: str = None, lte: str = None, interval: str = "week", size: int = 8):
|
||||||
now = datetime.now()
|
try:
|
||||||
tgt = now.replace(hour=h, minute=0, second=0, microsecond=0)
|
filters = _type_filters_from_list(limit=size)
|
||||||
if tgt <= now:
|
body = {
|
||||||
tgt = tgt + timedelta(days=1)
|
"size": 0,
|
||||||
return max(0, int((tgt - now).total_seconds()))
|
"aggs": {
|
||||||
|
"by_interval": {
|
||||||
|
"date_histogram": {
|
||||||
|
"field": "time",
|
||||||
|
"calendar_interval": interval,
|
||||||
|
"min_doc_count": 0
|
||||||
|
},
|
||||||
|
"aggs": {
|
||||||
|
"by_type": {
|
||||||
|
"filters": {"filters": filters}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if gte or lte:
|
||||||
|
rng = {}
|
||||||
|
if gte:
|
||||||
|
rng["gte"] = gte
|
||||||
|
if lte:
|
||||||
|
rng["lte"] = lte
|
||||||
|
body["query"] = {"range": {"time": rng}}
|
||||||
|
resp = es.search(index=DATA_INDEX_NAME, body=body)
|
||||||
|
by_interval = resp.get("aggregations", {}).get("by_interval", {}).get("buckets", [])
|
||||||
|
out = []
|
||||||
|
for ib in by_interval:
|
||||||
|
t_buckets = ib.get("by_type", {}).get("buckets", {})
|
||||||
|
types_arr = []
|
||||||
|
for k, v in t_buckets.items():
|
||||||
|
types_arr.append({"key": k, "doc_count": int(v.get("doc_count", 0))})
|
||||||
|
out.append({
|
||||||
|
"key_as_string": ib.get("key_as_string"),
|
||||||
|
"key": ib.get("key"),
|
||||||
|
"doc_count": ib.get("doc_count", 0),
|
||||||
|
"types": types_arr
|
||||||
|
})
|
||||||
|
return out
|
||||||
|
except Exception as e:
|
||||||
|
print(f"分析类型变化失败: {str(e)}")
|
||||||
|
return []
|
||||||
|
|
||||||
def start_daily_analytics_scheduler():
|
def analytics_recent(limit: int = 10, gte: str = None, lte: str = None):
|
||||||
def _run_and_reschedule():
|
try:
|
||||||
try:
|
def _extract_type(s: str):
|
||||||
get_analytics_overview(force=True)
|
if not s:
|
||||||
except Exception as e:
|
return ""
|
||||||
print(f"分析任务失败: {e}")
|
try:
|
||||||
finally:
|
obj = json.loads(s)
|
||||||
threading.Timer(24 * 3600, _run_and_reschedule).start()
|
if isinstance(obj, dict):
|
||||||
delay = _seconds_until_hour(3)
|
v = obj.get("数据类型")
|
||||||
threading.Timer(delay, _run_and_reschedule).start()
|
if isinstance(v, str) and v:
|
||||||
|
return v
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
m = re.search(r'"数据类型"\s*:\s*"([^"]+)"', s)
|
||||||
|
if m:
|
||||||
|
return m.group(1)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return ""
|
||||||
|
|
||||||
|
search = AchievementDocument.search()
|
||||||
|
body = {
|
||||||
|
"size": max(1, min(limit, 100)),
|
||||||
|
"sort": [{"time": {"order": "desc"}}]
|
||||||
|
}
|
||||||
|
if gte or lte:
|
||||||
|
rng = {}
|
||||||
|
if gte:
|
||||||
|
rng["gte"] = gte
|
||||||
|
if lte:
|
||||||
|
rng["lte"] = lte
|
||||||
|
body["query"] = {"range": {"time": rng}}
|
||||||
|
search = search.update_from_dict(body)
|
||||||
|
resp = search.execute()
|
||||||
|
results = []
|
||||||
|
for hit in resp:
|
||||||
|
w = getattr(hit, 'writer_id', '')
|
||||||
|
uname = None
|
||||||
|
try:
|
||||||
|
uname_lookup = get_user_by_id(w)
|
||||||
|
uname = (uname_lookup or {}).get("username")
|
||||||
|
except Exception:
|
||||||
|
uname = None
|
||||||
|
if not uname:
|
||||||
|
try:
|
||||||
|
uname_lookup = get_user_by_id(int(w))
|
||||||
|
uname = (uname_lookup or {}).get("username")
|
||||||
|
except Exception:
|
||||||
|
uname = None
|
||||||
|
tval = _extract_type(getattr(hit, 'data', ''))
|
||||||
|
results.append({
|
||||||
|
"_id": hit.meta.id,
|
||||||
|
"writer_id": w,
|
||||||
|
"username": uname or "",
|
||||||
|
"type": tval or "",
|
||||||
|
"time": getattr(hit, 'time', None)
|
||||||
|
})
|
||||||
|
return results
|
||||||
|
except Exception as e:
|
||||||
|
print(f"获取最近活动失败: {str(e)}")
|
||||||
|
return []
|
||||||
|
|
||||||
def write_user_data(user_data):
|
def write_user_data(user_data):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
INDEX_NAME = "wordsearch266666"
|
INDEX_NAME = "wordsearch2666661"
|
||||||
USER_NAME = "users11111"
|
USER_NAME = "users11111"
|
||||||
ACHIEVEMENT_INDEX_NAME = INDEX_NAME
|
ACHIEVEMENT_INDEX_NAME = INDEX_NAME
|
||||||
USER_INDEX_NAME = USER_NAME
|
USER_INDEX_NAME = USER_NAME
|
||||||
GLOBAL_INDEX_NAME = "global11111"
|
GLOBAL_INDEX_NAME = "global11111111211"
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ urlpatterns = [
|
|||||||
path('search/', views.search, name='search'),
|
path('search/', views.search, name='search'),
|
||||||
path('fuzzy-search/', views.fuzzy_search, name='fuzzy_search'),
|
path('fuzzy-search/', views.fuzzy_search, name='fuzzy_search'),
|
||||||
path('all-data/', views.get_all_data, name='get_all_data'),
|
path('all-data/', views.get_all_data, name='get_all_data'),
|
||||||
path('analytics/overview/', views.analytics_overview, name='analytics_overview'),
|
|
||||||
|
|
||||||
# 用户管理
|
# 用户管理
|
||||||
path('users/', views.get_users, name='get_users'),
|
path('users/', views.get_users, name='get_users'),
|
||||||
@@ -33,4 +32,10 @@ urlpatterns = [
|
|||||||
# 管理页面
|
# 管理页面
|
||||||
path('manage/', views.manage_page, name='manage_page'),
|
path('manage/', views.manage_page, name='manage_page'),
|
||||||
path('user_manage/', views.user_manage, name='user_manage'),
|
path('user_manage/', views.user_manage, name='user_manage'),
|
||||||
|
|
||||||
|
# 分析接口
|
||||||
|
path('analytics/trend/', views.analytics_trend_view, name='analytics_trend'),
|
||||||
|
path('analytics/types/', views.analytics_types_view, name='analytics_types'),
|
||||||
|
path('analytics/types_trend/', views.analytics_types_trend_view, name='analytics_types_trend'),
|
||||||
|
path('analytics/recent/', views.analytics_recent_view, name='analytics_recent'),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -14,6 +14,12 @@ from django.views.decorators.csrf import ensure_csrf_cookie
|
|||||||
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie, csrf_protect
|
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie, csrf_protect
|
||||||
from .es_connect import *
|
from .es_connect import *
|
||||||
from .es_connect import update_user_by_id as es_update_user_by_id, delete_user_by_id as es_delete_user_by_id
|
from .es_connect import update_user_by_id as es_update_user_by_id, delete_user_by_id as es_delete_user_by_id
|
||||||
|
from .es_connect import (
|
||||||
|
analytics_trend as es_analytics_trend,
|
||||||
|
analytics_types as es_analytics_types,
|
||||||
|
analytics_types_trend as es_analytics_types_trend,
|
||||||
|
analytics_recent as es_analytics_recent,
|
||||||
|
)
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
|
|
||||||
@@ -80,16 +86,6 @@ def get_all_data(request):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
return JsonResponse({"status": "error", "message": str(e)}, status=500)
|
return JsonResponse({"status": "error", "message": str(e)}, status=500)
|
||||||
|
|
||||||
@require_http_methods(["GET"])
|
|
||||||
def analytics_overview(request):
|
|
||||||
try:
|
|
||||||
force = request.GET.get("force") == "1"
|
|
||||||
data = get_analytics_overview(force=force)
|
|
||||||
return JsonResponse({"status": "success", "data": data})
|
|
||||||
except Exception as e:
|
|
||||||
return JsonResponse({"status": "error", "message": str(e)}, status=500)
|
|
||||||
|
|
||||||
|
|
||||||
@require_http_methods(["DELETE"])
|
@require_http_methods(["DELETE"])
|
||||||
@csrf_exempt
|
@csrf_exempt
|
||||||
def delete_data(request, doc_id):
|
def delete_data(request, doc_id):
|
||||||
@@ -159,7 +155,11 @@ def update_data(request, doc_id):
|
|||||||
if isinstance(v, dict):
|
if isinstance(v, dict):
|
||||||
updated["data"] = json.dumps(v, ensure_ascii=False)
|
updated["data"] = json.dumps(v, ensure_ascii=False)
|
||||||
else:
|
else:
|
||||||
updated["data"] = str(v)
|
try:
|
||||||
|
obj = json.loads(str(v))
|
||||||
|
updated["data"] = json.dumps(obj, ensure_ascii=False)
|
||||||
|
except Exception:
|
||||||
|
updated["data"] = str(v)
|
||||||
|
|
||||||
success = update_by_id(doc_id, updated)
|
success = update_by_id(doc_id, updated)
|
||||||
if success:
|
if success:
|
||||||
@@ -511,6 +511,67 @@ def manage_page(request):
|
|||||||
context = {"items": results, "user_id": user_id_qs or session_user_id}
|
context = {"items": results, "user_id": user_id_qs or session_user_id}
|
||||||
return render(request, "elastic/manage.html", context)
|
return render(request, "elastic/manage.html", context)
|
||||||
|
|
||||||
|
|
||||||
|
@require_http_methods(["GET"])
|
||||||
|
def analytics_trend_view(request):
|
||||||
|
try:
|
||||||
|
gte = request.GET.get("from")
|
||||||
|
lte = request.GET.get("to")
|
||||||
|
interval = request.GET.get("interval", "day")
|
||||||
|
data = es_analytics_trend(gte=gte, lte=lte, interval=interval)
|
||||||
|
return JsonResponse({"status": "success", "data": data})
|
||||||
|
except Exception as e:
|
||||||
|
return JsonResponse({"status": "error", "message": str(e)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
@require_http_methods(["GET"])
|
||||||
|
def analytics_types_view(request):
|
||||||
|
try:
|
||||||
|
gte = request.GET.get("from")
|
||||||
|
lte = request.GET.get("to")
|
||||||
|
size = request.GET.get("size")
|
||||||
|
try:
|
||||||
|
size_int = int(size) if size is not None else 10
|
||||||
|
except Exception:
|
||||||
|
size_int = 10
|
||||||
|
data = es_analytics_types(gte=gte, lte=lte, size=size_int)
|
||||||
|
return JsonResponse({"status": "success", "data": data})
|
||||||
|
except Exception as e:
|
||||||
|
return JsonResponse({"status": "error", "message": str(e)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
@require_http_methods(["GET"])
|
||||||
|
def analytics_types_trend_view(request):
|
||||||
|
try:
|
||||||
|
gte = request.GET.get("from")
|
||||||
|
lte = request.GET.get("to")
|
||||||
|
interval = request.GET.get("interval", "week")
|
||||||
|
size = request.GET.get("size")
|
||||||
|
try:
|
||||||
|
size_int = int(size) if size is not None else 8
|
||||||
|
except Exception:
|
||||||
|
size_int = 8
|
||||||
|
data = es_analytics_types_trend(gte=gte, lte=lte, interval=interval, size=size_int)
|
||||||
|
return JsonResponse({"status": "success", "data": data})
|
||||||
|
except Exception as e:
|
||||||
|
return JsonResponse({"status": "error", "message": str(e)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
@require_http_methods(["GET"])
|
||||||
|
def analytics_recent_view(request):
|
||||||
|
try:
|
||||||
|
limit = request.GET.get("limit")
|
||||||
|
gte = request.GET.get("from")
|
||||||
|
lte = request.GET.get("to")
|
||||||
|
try:
|
||||||
|
limit_int = int(limit) if limit is not None else 10
|
||||||
|
except Exception:
|
||||||
|
limit_int = 10
|
||||||
|
data = es_analytics_recent(limit=limit_int, gte=gte, lte=lte)
|
||||||
|
return JsonResponse({"status": "success", "data": data})
|
||||||
|
except Exception as e:
|
||||||
|
return JsonResponse({"status": "error", "message": str(e)}, status=500)
|
||||||
|
|
||||||
@require_http_methods(["GET"])
|
@require_http_methods(["GET"])
|
||||||
@ensure_csrf_cookie
|
@ensure_csrf_cookie
|
||||||
def user_manage(request):
|
def user_manage(request):
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<title>数据管理系统</title>
|
<title>数据管理系统</title>
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/echarts@5/dist/echarts.min.js"></script>
|
||||||
<style>
|
<style>
|
||||||
body {
|
body {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
@@ -154,42 +155,31 @@
|
|||||||
<div class="main-content">
|
<div class="main-content">
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<div class="header">
|
<div class="header">
|
||||||
<h2>数据概览</h2>
|
<h2>主页</h2>
|
||||||
<div style="display:flex; gap:8px; align-items:center;">
|
<span class="badge">用户:{{ user_id }}</span>
|
||||||
<span class="badge">用户:{{ user_id }}</span>
|
|
||||||
{% if is_admin %}
|
|
||||||
<button id="triggerAnalyze" class="btn btn-primary">手动开始分析</button>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="grid-3">
|
|
||||||
<div>
|
|
||||||
<div class="legend"><span class="dot" style="background:#4f46e5;"></span><span class="muted">最近十天录入</span></div>
|
|
||||||
<canvas id="chartDays" height="140"></canvas>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<div class="legend"><span class="dot" style="background:#16a34a;"></span><span class="muted">最近十周录入</span></div>
|
|
||||||
<canvas id="chartWeeks" height="140"></canvas>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<div class="legend"><span class="dot" style="background:#ea580c;"></span><span class="muted">最近十个月录入</span></div>
|
|
||||||
<canvas id="chartMonths" height="140"></canvas>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
<div class="muted">数据可视化概览:录入量变化、类型占比、类型变化、最近活动</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="grid" style="margin-top:16px;">
|
<div class="grid" style="margin-top:16px;">
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<div class="header"><h2>近1个月成果类型</h2></div>
|
<div class="header"><h3>录入量变化(近90天)</h3></div>
|
||||||
<canvas id="pie1m" height="200"></canvas>
|
<div id="chartTrend" style="width:100%;height:320px;"></div>
|
||||||
</div>
|
</div>
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<div class="header"><h2>近12个月成果类型</h2></div>
|
<div class="header"><h3>类型占比(近30天)</h3></div>
|
||||||
<canvas id="pie12m" height="200"></canvas>
|
<div id="chartTypes" style="width:100%;height:320px;"></div>
|
||||||
|
</div>
|
||||||
|
<div class="card">
|
||||||
|
<div class="header"><h3>类型变化(近180天,按周)</h3></div>
|
||||||
|
<div id="chartTypesTrend" style="width:100%;height:320px;"></div>
|
||||||
|
</div>
|
||||||
|
<div class="card">
|
||||||
|
<div class="header"><h3>最近活动(近7天)</h3></div>
|
||||||
|
<ul id="recentList" style="list-style:none;padding:0;margin:0;"></ul>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
|
|
||||||
<script>
|
<script>
|
||||||
// 获取CSRF令牌的函数
|
// 获取CSRF令牌的函数
|
||||||
function getCookie(name) {
|
function getCookie(name) {
|
||||||
@@ -256,88 +246,101 @@
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
async function loadAnalytics() {
|
|
||||||
const resp = await fetch('/elastic/analytics/overview/');
|
function fetchJSON(url){ return fetch(url, {credentials:'same-origin'}).then(r=>r.json()); }
|
||||||
const d = await resp.json();
|
function qs(params){ const u = new URLSearchParams(params); return u.toString(); }
|
||||||
if (!resp.ok || d.status !== 'success') return;
|
|
||||||
const data = d.data || {};
|
|
||||||
renderLine('chartDays', data.last_10_days || [], '#4f46e5');
|
|
||||||
renderLine('chartWeeks', data.last_10_weeks || [], '#16a34a');
|
|
||||||
renderLine('chartMonths', data.last_10_months || [], '#ea580c');
|
|
||||||
renderPie('pie1m', data.type_pie_1m || []);
|
|
||||||
renderPie('pie12m', data.type_pie_12m || []);
|
|
||||||
}
|
|
||||||
|
|
||||||
const btn = document.getElementById('triggerAnalyze');
|
const trendChart = echarts.init(document.getElementById('chartTrend'));
|
||||||
if (btn) {
|
const typesChart = echarts.init(document.getElementById('chartTypes'));
|
||||||
btn.addEventListener('click', async () => {
|
const typesTrendChart = echarts.init(document.getElementById('chartTypesTrend'));
|
||||||
btn.disabled = true;
|
|
||||||
btn.textContent = '分析中…';
|
async function loadTrend(){
|
||||||
try {
|
const url = '/elastic/analytics/trend/?' + qs({ from:'now-90d', to:'now', interval:'day' });
|
||||||
const resp = await fetch('/elastic/analytics/overview/?force=1');
|
const res = await fetchJSON(url);
|
||||||
const d = await resp.json();
|
if(res.status!=='success') return;
|
||||||
if (!resp.ok || d.status !== 'success') throw new Error('分析失败');
|
const buckets = res.data || [];
|
||||||
window.location.reload();
|
const x = buckets.map(b=>b.key_as_string||'');
|
||||||
} catch (e) {
|
const y = buckets.map(b=>b.doc_count||0);
|
||||||
btn.textContent = '重试';
|
trendChart.setOption({
|
||||||
btn.disabled = false;
|
tooltip:{trigger:'axis'},
|
||||||
}
|
xAxis:{type:'category', data:x},
|
||||||
|
yAxis:{type:'value'},
|
||||||
|
series:[{ type:'line', areaStyle:{}, data:y, smooth:true, color:'#4f46e5' }]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function hexWithAlpha(hex, alphaHex) {
|
async function loadTypes(){
|
||||||
if (!hex || !hex.startsWith('#')) return hex;
|
const url = '/elastic/analytics/types/?' + qs({ from:'now-30d', to:'now', size:10 });
|
||||||
if (hex.length === 7) return hex + alphaHex;
|
const res = await fetchJSON(url);
|
||||||
return hex;
|
if(res.status!=='success') return;
|
||||||
}
|
const buckets = res.data || [];
|
||||||
function renderLine(id, items, color) {
|
const data = buckets.map(b=>({ name: String(b.key||'未知'), value: b.doc_count||0 }));
|
||||||
const ctx = document.getElementById(id);
|
typesChart.setOption({
|
||||||
const labels = items.map(x => x.label);
|
tooltip:{trigger:'item'},
|
||||||
const values = items.map(x => x.count);
|
legend:{type:'scroll'},
|
||||||
new Chart(ctx, {
|
series:[{ type:'pie', radius:['40%','70%'], data }]
|
||||||
type: 'line',
|
|
||||||
data: {
|
|
||||||
labels,
|
|
||||||
datasets: [{
|
|
||||||
data: values,
|
|
||||||
borderColor: color,
|
|
||||||
backgroundColor: hexWithAlpha(color, '26'),
|
|
||||||
tension: 0.25,
|
|
||||||
fill: true,
|
|
||||||
pointRadius: 3,
|
|
||||||
}]
|
|
||||||
},
|
|
||||||
options: {
|
|
||||||
responsive: true,
|
|
||||||
plugins: { legend: { display: false } },
|
|
||||||
animation: { duration: 800, easing: 'easeOutQuart' },
|
|
||||||
scales: {
|
|
||||||
x: { grid: { display: false } },
|
|
||||||
y: { grid: { color: 'rgba(31,35,40,0.06)' }, beginAtZero: true }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function renderPie(id, items) {
|
|
||||||
const ctx = document.getElementById(id);
|
|
||||||
const labels = items.map(x => x.type);
|
|
||||||
const values = items.map(x => x.count);
|
|
||||||
const colors = ['#2563eb','#22c55e','#f59e0b','#ef4444','#a855f7','#06b6d4','#84cc16','#ec4899','#475569','#d946ef'];
|
|
||||||
new Chart(ctx, {
|
|
||||||
type: 'doughnut',
|
|
||||||
data: {
|
|
||||||
labels,
|
|
||||||
datasets: [{ data: values, backgroundColor: colors.slice(0, labels.length) }]
|
|
||||||
},
|
|
||||||
options: {
|
|
||||||
responsive: true,
|
|
||||||
animation: { duration: 900, easing: 'easeOutQuart' },
|
|
||||||
plugins: { legend: { position: 'bottom' } }
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
loadAnalytics();
|
async function loadTypesTrend(){
|
||||||
|
const url = '/elastic/analytics/types_trend/?' + qs({ from:'now-180d', to:'now', interval:'week', size:6 });
|
||||||
|
const res = await fetchJSON(url);
|
||||||
|
if(res.status!=='success') return;
|
||||||
|
const rows = res.data || [];
|
||||||
|
const x = rows.map(r=>r.key_as_string||'');
|
||||||
|
const typeSet = new Set();
|
||||||
|
rows.forEach(r=> (r.types||[]).forEach(t=> typeSet.add(String(t.key||'未知'))));
|
||||||
|
const types = Array.from(typeSet);
|
||||||
|
const series = types.map(tp=>({
|
||||||
|
name: tp,
|
||||||
|
type:'line',
|
||||||
|
smooth:true,
|
||||||
|
data: rows.map(r=>{
|
||||||
|
const b = (r.types||[]).find(x=>String(x.key||'')===tp);
|
||||||
|
return b? b.doc_count||0 : 0;
|
||||||
|
})
|
||||||
|
}));
|
||||||
|
typesTrendChart.setOption({
|
||||||
|
tooltip:{trigger:'axis'},
|
||||||
|
legend:{type:'scroll'},
|
||||||
|
xAxis:{type:'category', data:x},
|
||||||
|
yAxis:{type:'value'},
|
||||||
|
series
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatTime(t){
|
||||||
|
try{
|
||||||
|
const d = new Date(t);
|
||||||
|
if(String(d) !== 'Invalid Date'){
|
||||||
|
const pad = n=> String(n).padStart(2,'0');
|
||||||
|
return `${d.getFullYear()}-${pad(d.getMonth()+1)}-${pad(d.getDate())} ${pad(d.getHours())}:${pad(d.getMinutes())}`;
|
||||||
|
}
|
||||||
|
}catch(e){}
|
||||||
|
return t||'';
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadRecent(){
|
||||||
|
const listEl = document.getElementById('recentList');
|
||||||
|
const url = '/elastic/analytics/recent/?' + qs({ from:'now-7d', to:'now', limit:10 });
|
||||||
|
const res = await fetchJSON(url);
|
||||||
|
if(res.status!=='success') return;
|
||||||
|
const items = res.data || [];
|
||||||
|
listEl.innerHTML = '';
|
||||||
|
items.forEach(it=>{
|
||||||
|
const li = document.createElement('li');
|
||||||
|
const t = formatTime(it.time);
|
||||||
|
const u = it.username || '';
|
||||||
|
const ty = it.type || '未知';
|
||||||
|
li.textContent = `${t},${u},${ty}`;
|
||||||
|
listEl.appendChild(li);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
loadTrend();
|
||||||
|
loadTypes();
|
||||||
|
loadTypesTrend();
|
||||||
|
loadRecent();
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
Reference in New Issue
Block a user