Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2026-04-25 08:29:11

0001 from django.shortcuts import render, redirect, get_object_or_404
0002 from django.http import HttpResponse, JsonResponse
0003 from django.urls import reverse
0004 from django.db.models import Count, Max
0005 from django.core.paginator import Paginator
0006 from rest_framework import viewsets, generics
0007 from rest_framework.decorators import action, api_view, authentication_classes, permission_classes
0008 from rest_framework.response import Response
0009 from rest_framework import status
0010 from rest_framework.permissions import AllowAny, IsAuthenticated
0011 from rest_framework.authentication import SessionAuthentication, TokenAuthentication
0012 from django.contrib.auth.decorators import login_required
0013 from django.contrib.auth.forms import PasswordChangeForm
0014 from django.contrib.auth import update_session_auth_hash
0015 from django.contrib import messages
0016 from django.contrib.auth.decorators import user_passes_test
0017 from django.core.exceptions import PermissionDenied
0018 from .models import SystemAgent, AppLog, Run, StfFile, Subscriber, FastMonFile, PersistentState, PandaQueue, RucioEndpoint, TFSlice, Worker, RunState, SystemStateEvent
0019 from .workflow_models import STFWorkflow, AgentWorkflowStage, WorkflowMessage, WorkflowStatus, AgentType, WorkflowDefinition, WorkflowExecution
0020 from .serializers import (
0021     SystemAgentSerializer, AppLogSerializer, LogSummarySerializer,
0022     STFWorkflowSerializer, AgentWorkflowStageSerializer, WorkflowMessageSerializer,
0023     RunSerializer, StfFileSerializer, SubscriberSerializer, FastMonFileSerializer,
0024     WorkflowDefinitionSerializer, WorkflowExecutionSerializer,
0025     TFSliceSerializer, WorkerSerializer, RunStateSerializer, SystemStateEventSerializer
0026 )
0027 from .forms import SystemAgentForm
0028 from rest_framework.views import APIView
0029 from django.apps import apps
0030 from django.db import connection
0031 from django.utils import timezone
0032 from django.conf import settings as django_settings
0033 import logging
0034 
0035 logger = logging.getLogger(__name__)
0036 
0037 
0038 def oauth_protected_resource(request):
0039     """
0040     OAuth 2.0 Protected Resource Metadata (RFC 9728).
0041 
0042     Returns metadata about this protected resource, including
0043     the authorization server URL for OAuth discovery.
0044     """
0045     auth0_domain = getattr(django_settings, 'AUTH0_DOMAIN', None)
0046     if not auth0_domain:
0047         return JsonResponse({"error": "OAuth not configured"}, status=503)
0048 
0049     scheme = "https" if request.is_secure() else "http"
0050     host = request.get_host()
0051     script_name = getattr(django_settings, 'FORCE_SCRIPT_NAME', None) or ""
0052     resource = f"{scheme}://{host}{script_name}/mcp"
0053 
0054     metadata = {
0055         "resource": resource,
0056         "authorization_servers": [f"https://{auth0_domain}/"],
0057         "scopes_supported": ["openid", "profile", "email"],
0058         "bearer_methods_supported": ["header"],
0059     }
0060     return JsonResponse(metadata)
0061 
0062 
0063 # Create your views here.
0064 def home(request):
0065     from django.conf import settings
0066     prefix = getattr(settings, 'FORCE_SCRIPT_NAME', '') or ''
0067     return HttpResponse(f"""<!DOCTYPE html><html><head><script>
0068 var mode = localStorage.getItem('navMode') || 'production';
0069 window.location.replace(mode === 'testbed' ? '{prefix}/testbed/' : '{prefix}/prod/');
0070 </script></head><body></body></html>""", content_type='text/html')
0071 
0072 def authenticated_home(request):
0073     return redirect('monitor_app:prod_hub')
0074 
0075 def about(request):
0076     return render(request, 'monitor_app/about.html')
0077 
0078 @login_required
0079 def index(request):
0080     """A simple landing page for authenticated users."""
0081     return render(request, 'monitor_app/index.html')
0082 
0083 def staff_member_required(view_func):
0084     def _wrapped_view(request, *args, **kwargs):
0085         if not request.user.is_staff:
0086             raise PermissionDenied
0087         return view_func(request, *args, **kwargs)
0088     return _wrapped_view
0089 
0090 @login_required
0091 @staff_member_required
0092 def system_agent_create(request):
0093     if request.method == 'POST':
0094         form = SystemAgentForm(request.POST)
0095         if form.is_valid():
0096             form.save()
0097             return redirect('monitor_app:index')
0098     else:
0099         form = SystemAgentForm()
0100     return render(request, 'monitor_app/system_agent_form.html', {'form': form})
0101 
0102 @login_required
0103 @staff_member_required
0104 def system_agent_update(request, pk):
0105     agent = get_object_or_404(SystemAgent, pk=pk)
0106     if request.method == 'POST':
0107         form = SystemAgentForm(request.POST, instance=agent)
0108         if form.is_valid():
0109             form.save()
0110             return redirect('monitor_app:index')
0111     else:
0112         form = SystemAgentForm(instance=agent)
0113     return render(request, 'monitor_app/system_agent_form.html', {'form': form})
0114 
0115 @login_required
0116 @staff_member_required
0117 def system_agent_delete(request, pk):
0118     agent = get_object_or_404(SystemAgent, pk=pk)
0119     if request.method == 'POST':
0120         agent.delete()
0121         return redirect('monitor_app:index')
0122     return render(request, 'monitor_app/system_agent_confirm_delete.html', {'agent': agent})
0123 
0124 @login_required
0125 def get_system_agents_data(request):
0126     agents = SystemAgent.objects.all()
0127     data = {
0128         'agents': [{'id': agent.id, 'name': agent.instance_name, 'status': agent.status} for agent in agents]
0129     }
0130     return JsonResponse(data)
0131 
0132 @login_required
0133 def account_view(request):
0134     if request.method == 'POST':
0135         form = PasswordChangeForm(request.user, request.POST)
0136         if form.is_valid():
0137             user = form.save()
0138             update_session_auth_hash(request, user)  # Important!
0139             messages.success(request, 'Your password was successfully updated!')
0140             return redirect('monitor_app:account')
0141         else:
0142             messages.error(request, 'Please correct the error below.')
0143     else:
0144         form = PasswordChangeForm(request.user)
0145     return render(request, 'monitor_app/account.html', {
0146         'form': form,
0147         'user': request.user
0148     })
0149 
0150 
0151 class SystemAgentViewSet(viewsets.ModelViewSet):
0152     queryset = SystemAgent.objects.all()
0153     serializer_class = SystemAgentSerializer
0154     authentication_classes = [SessionAuthentication, TokenAuthentication]
0155     permission_classes = [IsAuthenticated]
0156 
0157     @action(detail=False, methods=['post'], url_path='heartbeat')
0158     def heartbeat(self, request):
0159         """
0160         Custom action for agents to register themselves and send heartbeats.
0161         This will create or update an agent entry.
0162         """
0163         instance_name = request.data.get('instance_name')
0164         if not instance_name:
0165             return Response({"instance_name": ["This field is required."]}, status=status.HTTP_400_BAD_REQUEST)
0166         
0167         # Use update_or_create to handle both registration and heartbeats
0168         # This ensures all fields are updated on every heartbeat, not just on creation
0169         defaults = {
0170             'agent_type': request.data.get('agent_type', 'other'),
0171             'description': request.data.get('description', ''),
0172             'status': request.data.get('status', 'OK'),
0173             'agent_url': request.data.get('agent_url', None),
0174             'namespace': request.data.get('namespace'),
0175             'last_heartbeat': timezone.now(),
0176         }
0177         # Only update optional fields if explicitly provided in the heartbeat
0178         if 'workflow_enabled' in request.data:
0179             defaults['workflow_enabled'] = request.data['workflow_enabled']
0180         if 'operational_state' in request.data:
0181             defaults['operational_state'] = request.data['operational_state']
0182         if 'pid' in request.data:
0183             defaults['pid'] = request.data['pid']
0184         if 'hostname' in request.data:
0185             defaults['hostname'] = request.data['hostname']
0186 
0187         agent, created = SystemAgent.objects.update_or_create(
0188             instance_name=instance_name,
0189             defaults=defaults
0190         )
0191 
0192         # Piggyback: mark any stale agents as EXITED
0193         SystemAgent.mark_stale_agents()
0194 
0195         return Response(self.get_serializer(agent).data, status=status.HTTP_201_CREATED if created else status.HTTP_200_OK)
0196 
0197 
0198 class STFWorkflowViewSet(viewsets.ModelViewSet):
0199     """API endpoint for STF Workflows."""
0200     queryset = STFWorkflow.objects.all()
0201     serializer_class = STFWorkflowSerializer
0202     authentication_classes = [SessionAuthentication, TokenAuthentication]
0203     permission_classes = [IsAuthenticated]
0204 
0205 class AgentWorkflowStageViewSet(viewsets.ModelViewSet):
0206     """API endpoint for Agent Workflow Stages."""
0207     queryset = AgentWorkflowStage.objects.all()
0208     serializer_class = AgentWorkflowStageSerializer
0209     authentication_classes = [SessionAuthentication, TokenAuthentication]
0210     permission_classes = [IsAuthenticated]
0211 
0212 class WorkflowMessageViewSet(viewsets.ModelViewSet):
0213     """API endpoint for Workflow Messages."""
0214     queryset = WorkflowMessage.objects.all()
0215     serializer_class = WorkflowMessageSerializer
0216     authentication_classes = [SessionAuthentication, TokenAuthentication]
0217     permission_classes = [IsAuthenticated]
0218 
0219 
0220 class AppLogViewSet(viewsets.ModelViewSet):
0221     """
0222     API endpoint that allows logs to be viewed or created.
0223     """
0224     queryset = AppLog.objects.all()
0225     serializer_class = AppLogSerializer
0226     permission_classes = [AllowAny] # For now, allow any client to post logs
0227 
0228 
0229 class RunViewSet(viewsets.ModelViewSet):
0230     """API endpoint for data-taking runs."""
0231     queryset = Run.objects.all()
0232     serializer_class = RunSerializer
0233     authentication_classes = [SessionAuthentication, TokenAuthentication]
0234     permission_classes = [IsAuthenticated]
0235 
0236 
0237 class StfFileViewSet(viewsets.ModelViewSet):
0238     """API endpoint for STF file tracking."""
0239     queryset = StfFile.objects.all()
0240     serializer_class = StfFileSerializer
0241     authentication_classes = [SessionAuthentication, TokenAuthentication]
0242     permission_classes = [IsAuthenticated]
0243 
0244 
0245 class SubscriberViewSet(viewsets.ModelViewSet):
0246     """API endpoint for message queue subscribers."""
0247     queryset = Subscriber.objects.all()
0248     serializer_class = SubscriberSerializer
0249     authentication_classes = [SessionAuthentication, TokenAuthentication]
0250     permission_classes = [IsAuthenticated]
0251 
0252 
0253 
0254 class FastMonFileViewSet(viewsets.ModelViewSet):
0255     """API endpoint for Fast Monitoring files."""
0256     queryset = FastMonFile.objects.all()
0257     serializer_class = FastMonFileSerializer
0258     authentication_classes = [SessionAuthentication, TokenAuthentication]
0259     permission_classes = [IsAuthenticated]
0260 
0261 
0262 class WorkflowDefinitionViewSet(viewsets.ModelViewSet):
0263     """API endpoint for Workflow Definitions."""
0264     queryset = WorkflowDefinition.objects.all()
0265     serializer_class = WorkflowDefinitionSerializer
0266     authentication_classes = [SessionAuthentication, TokenAuthentication]
0267     permission_classes = [IsAuthenticated]
0268 
0269     def get_queryset(self):
0270         qs = super().get_queryset()
0271         workflow_name = self.request.query_params.get('workflow_name')
0272         version = self.request.query_params.get('version')
0273         if workflow_name:
0274             qs = qs.filter(workflow_name=workflow_name)
0275         if version:
0276             qs = qs.filter(version=version)
0277         return qs
0278 
0279 
0280 class WorkflowExecutionViewSet(viewsets.ModelViewSet):
0281     """API endpoint for Workflow Executions."""
0282     queryset = WorkflowExecution.objects.all()
0283     serializer_class = WorkflowExecutionSerializer
0284     authentication_classes = [SessionAuthentication, TokenAuthentication]
0285     permission_classes = [IsAuthenticated]
0286     filterset_fields = ['status', 'namespace', 'executed_by']
0287 
0288 
0289 # Fast Processing API ViewSets
0290 
0291 class TFSliceViewSet(viewsets.ModelViewSet):
0292     """API endpoint for TF Slices (fast processing workflow)."""
0293     queryset = TFSlice.objects.all()
0294     serializer_class = TFSliceSerializer
0295     authentication_classes = [SessionAuthentication, TokenAuthentication]
0296     permission_classes = [IsAuthenticated]
0297     filterset_fields = ['run_number', 'status', 'stf_filename', 'assigned_worker', 'tf_filename', 'slice_id']
0298 
0299 
0300 class WorkerViewSet(viewsets.ModelViewSet):
0301     """API endpoint for Workers (fast processing workflow)."""
0302     queryset = Worker.objects.all()
0303     serializer_class = WorkerSerializer
0304     authentication_classes = [SessionAuthentication, TokenAuthentication]
0305     permission_classes = [IsAuthenticated]
0306     filterset_fields = ['run_number', 'status', 'location']
0307 
0308 
0309 class RunStateViewSet(viewsets.ModelViewSet):
0310     """API endpoint for Run State (fast processing workflow)."""
0311     queryset = RunState.objects.all()
0312     serializer_class = RunStateSerializer
0313     authentication_classes = [SessionAuthentication, TokenAuthentication]
0314     permission_classes = [IsAuthenticated]
0315 
0316 
0317 class SystemStateEventViewSet(viewsets.ModelViewSet):
0318     """API endpoint for System State Events (fast processing workflow)."""
0319     queryset = SystemStateEvent.objects.all()
0320     serializer_class = SystemStateEventSerializer
0321     authentication_classes = [SessionAuthentication, TokenAuthentication]
0322     permission_classes = [IsAuthenticated]
0323     filterset_fields = ['run_number', 'event_type', 'state']
0324 
0325 
0326 @login_required
0327 def log_summary(request):
0328     """
0329     Professional log summary view using server-side DataTables for optimal performance.
0330     Replaced the old client-side implementation.
0331     """
0332     # Get filter parameters (for initial state)
0333     app_name = request.GET.get('app_name')
0334     instance_type = request.GET.get('instance_type')
0335     instance_name = request.GET.get('instance_name')
0336     levelname = request.GET.get('levelname')
0337 
0338     # Get distinct app and instance names for filter links
0339     app_names_qs = AppLog.objects.values_list('app_name', flat=True)
0340     instance_names_qs = AppLog.objects.values_list('instance_name', flat=True)
0341     app_names = sorted(set([name for name in app_names_qs if name]), key=lambda x: x.lower())
0342     instance_names = sorted(set([name for name in instance_names_qs if name]), key=lambda x: x.lower())
0343 
0344     # Extract agent types by stripping trailing -number (e.g., "workflow_runner-agent-wenauseic-25" -> "workflow_runner-agent-wenauseic")
0345     def extract_type(name):
0346         if not name:
0347             return None
0348         parts = name.rsplit('-', 1)  # Split from right, max 1 split
0349         if len(parts) == 2 and parts[1].isdigit():
0350             return parts[0]  # Return everything except the -number
0351         return name  # If no trailing number, return as-is
0352 
0353     instance_types = sorted(set([extract_type(name) for name in instance_names if extract_type(name)]), key=lambda x: x.lower())
0354 
0355     # Column definitions for DataTables
0356     columns = [
0357         {'name': 'app_name', 'title': 'Application Name', 'orderable': True},
0358         {'name': 'instance_name', 'title': 'Instance Name', 'orderable': True},
0359         {'name': 'latest_timestamp', 'title': 'Latest Timestamp', 'orderable': True},
0360         {'name': 'info_count', 'title': 'INFO', 'orderable': True},
0361         {'name': 'warning_count', 'title': 'WARNING', 'orderable': True},
0362         {'name': 'error_count', 'title': 'ERROR', 'orderable': True},
0363         {'name': 'critical_count', 'title': 'CRITICAL', 'orderable': True},
0364         {'name': 'debug_count', 'title': 'DEBUG', 'orderable': True},
0365         {'name': 'total_count', 'title': 'Total', 'orderable': True},
0366         {'name': 'actions', 'title': 'Actions', 'orderable': False},
0367     ]
0368 
0369     context = {
0370         'table_title': 'Log Summary',
0371         'table_description': 'Server-side aggregated log counts by application and instance, with level breakdowns and drill-down access.',
0372         'ajax_url': reverse('monitor_app:log_summary_datatable_ajax'),
0373         'columns': columns,
0374         'app_names': app_names,
0375         'instance_types': instance_types,
0376         'instance_names': instance_names,
0377         'selected_app': app_name,
0378         'selected_instance_type': instance_type,
0379         'selected_instance': instance_name,
0380         'selected_levelname': levelname,
0381     }
0382     return render(request, 'monitor_app/log_summary_ajax.html', context)
0383 
0384 
0385 
0386 
0387 def log_summary_datatable_ajax(request):
0388     """
0389     AJAX endpoint for server-side DataTables processing of log summary data.
0390     Handles pagination, searching, ordering, and filtering.
0391     """
0392     from .utils import DataTablesProcessor, get_filter_params, apply_filters, format_datetime
0393     from django.db.models import Q, Count, Max
0394     from urllib.parse import urlencode
0395     
0396     # Initialize DataTables processor
0397     columns = ['app_name', 'instance_name', 'latest_timestamp', 'info_count', 'warning_count', 'error_count', 'critical_count', 'debug_count', 'total_count', 'actions']
0398     dt = DataTablesProcessor(request, columns, default_order_column=2, default_order_direction='desc')
0399     
0400     # Apply filters to base queryset
0401     base_queryset = AppLog.objects.all()
0402     filters = get_filter_params(request, ['app_name', 'instance_name', 'levelname'])
0403     base_queryset = apply_filters(base_queryset, filters)
0404 
0405     # Apply instance_type filter (match instances with this base name, with or without trailing -number)
0406     instance_type = request.GET.get('instance_type')
0407     if instance_type:
0408         base_queryset = base_queryset.filter(
0409             Q(instance_name__startswith=instance_type + '-') | Q(instance_name=instance_type)
0410         )
0411     
0412     # Create summary queryset - one row per app/instance pair
0413     summary_queryset = (
0414         base_queryset.values('app_name', 'instance_name')
0415         .annotate(
0416             latest_timestamp=Max('timestamp'),
0417             info_count=Count('id', filter=Q(levelname='INFO')),
0418             warning_count=Count('id', filter=Q(levelname='WARNING')),
0419             error_count=Count('id', filter=Q(levelname='ERROR')),
0420             critical_count=Count('id', filter=Q(levelname='CRITICAL')),
0421             debug_count=Count('id', filter=Q(levelname='DEBUG')),
0422             total_count=Count('id')
0423         )
0424     )
0425     
0426     # Get counts and apply search
0427     records_total = AppLog.objects.values('app_name', 'instance_name').annotate(count=Count('id')).count()
0428     search_fields = ['app_name', 'instance_name']
0429     summary_queryset = dt.apply_search(summary_queryset, search_fields)
0430     records_filtered = summary_queryset.count()
0431     
0432     # Apply ordering (all columns can use default ordering)
0433     summary_queryset = summary_queryset.order_by(dt.get_order_by())
0434     summary_data = dt.apply_pagination(summary_queryset)
0435     
0436     # Helper function for drill-down links
0437     def create_level_link(count, level, app_name, instance_name):
0438         if count == 0:
0439             return str(count)
0440         params = {'app_name': app_name, 'instance_name': instance_name, 'levelname': level}
0441         base_url = reverse('monitor_app:log_list')
0442         url = f'{base_url}?{urlencode(params)}'
0443         return f'<a href="{url}">{count}</a>'
0444     
0445     # Format data for DataTables
0446     data = []
0447     for item in summary_data:
0448         timestamp_str = format_datetime(item['latest_timestamp'])
0449         
0450         # Create filter-preserving links
0451         app_filter_url = f"?app_name={item['app_name']}"
0452         if filters['instance_name']:
0453             app_filter_url += f"&instance_name={filters['instance_name']}"
0454         logs_url = reverse('monitor_app:log_list')
0455         app_name_link = f'<a href="{logs_url}?{app_filter_url}">{item["app_name"]}</a>'
0456         
0457         instance_filter_url = f"?instance_name={item['instance_name']}"
0458         if filters['app_name']:
0459             instance_filter_url += f"&app_name={filters['app_name']}"
0460         logs_url = reverse('monitor_app:log_list')
0461         instance_name_link = f'<a href="{logs_url}?{instance_filter_url}">{item["instance_name"]}</a>'
0462         
0463         logs_url = reverse('monitor_app:log_list')
0464         view_logs_url = f'{logs_url}?app_name={item["app_name"]}&instance_name={item["instance_name"]}'
0465         view_logs_link = f'<a href="{view_logs_url}">View Logs</a>'
0466         
0467         data.append([
0468             app_name_link, instance_name_link, timestamp_str,
0469             create_level_link(item['info_count'], 'INFO', item['app_name'], item['instance_name']),
0470             create_level_link(item['warning_count'], 'WARNING', item['app_name'], item['instance_name']),
0471             create_level_link(item['error_count'], 'ERROR', item['app_name'], item['instance_name']),
0472             create_level_link(item['critical_count'], 'CRITICAL', item['app_name'], item['instance_name']),
0473             create_level_link(item['debug_count'], 'DEBUG', item['app_name'], item['instance_name']),
0474             item['total_count'], view_logs_link
0475         ])
0476     
0477     return dt.create_response(data, records_total, records_filtered)
0478 
0479 
0480 @login_required
0481 def log_list(request):
0482     """
0483     Professional log list view using server-side DataTables.
0484     Replaced the old pagination-based view for better performance and UX.
0485     """
0486     from django.utils.dateparse import parse_datetime
0487 
0488     # Get filter parameters (for initial state)
0489     app_name = request.GET.get('app_name')
0490     username = request.GET.get('username')
0491     levelname = request.GET.get('levelname')
0492 
0493     # Column definitions for DataTables
0494     columns = [
0495         {'name': 'timestamp', 'title': 'Timestamp', 'orderable': True},
0496         {'name': 'app_name', 'title': 'App Name', 'orderable': True},
0497         {'name': 'instance_name', 'title': 'Instance Name', 'orderable': True},
0498         {'name': 'levelname', 'title': 'Level', 'orderable': True},
0499         {'name': 'message', 'title': 'Message', 'orderable': False},
0500         {'name': 'module', 'title': 'Module', 'orderable': True},
0501         {'name': 'funcname', 'title': 'Function', 'orderable': True},
0502     ]
0503 
0504     # Filter field definitions for dynamic filtering
0505     filter_fields = [
0506         {'name': 'app_name', 'label': 'Applications', 'type': 'select'},
0507         {'name': 'username', 'label': 'Users', 'type': 'select'},
0508         {'name': 'levelname', 'label': 'Levels', 'type': 'select'},
0509     ]
0510 
0511     context = {
0512         'table_title': 'Log List',
0513         'table_description': 'View and search application logs with dynamic filtering by source, user, and level.',
0514         'ajax_url': reverse('monitor_app:logs_datatable_ajax'),
0515         'filter_counts_url': reverse('monitor_app:log_filter_counts'),
0516         'columns': columns,
0517         'filter_fields': filter_fields,
0518         'selected_app': app_name,
0519         'selected_username': username,
0520         'selected_levelname': levelname,
0521     }
0522     return render(request, 'monitor_app/log_list_dynamic.html', context)
0523 
0524 
0525 @login_required
0526 def log_detail(request, log_id):
0527     """Display details for a specific log entry."""
0528     log = get_object_or_404(AppLog, id=log_id)
0529     return render(request, 'monitor_app/log_detail.html', {'log': log})
0530 
0531 
0532 def logs_datatable_ajax(request):
0533     """
0534     AJAX endpoint for server-side DataTables processing of logs.
0535     Handles pagination, searching, ordering, and filtering.
0536     """
0537     from .utils import DataTablesProcessor, get_filter_params, apply_filters, format_datetime
0538     from django.utils.dateparse import parse_datetime
0539 
0540     # Initialize DataTables processor
0541     columns = ['timestamp', 'app_name', 'instance_name', 'levelname', 'message', 'module', 'funcname']
0542     dt = DataTablesProcessor(request, columns, default_order_column=0, default_order_direction='desc')
0543 
0544     # Build base queryset and apply standard filters (app_name, levelname)
0545     queryset = AppLog.objects.all()
0546     filters = get_filter_params(request, ['app_name', 'levelname'])
0547     queryset = apply_filters(queryset, filters)
0548 
0549     # Handle username filter (username is segment before trailing numeric ID)
0550     username = request.GET.get('username')
0551     if username:
0552         queryset = queryset.filter(instance_name__regex=rf'-{username}-\d+$')
0553     filters['username'] = username
0554 
0555     # Handle time range filters
0556     start_time = request.GET.get('start_time')
0557     end_time = request.GET.get('end_time')
0558     if start_time:
0559         dt_parsed = parse_datetime(start_time)
0560         if dt_parsed:
0561             queryset = queryset.filter(timestamp__gte=dt_parsed)
0562     if end_time:
0563         dt_parsed = parse_datetime(end_time)
0564         if dt_parsed:
0565             queryset = queryset.filter(timestamp__lte=dt_parsed)
0566 
0567     # Get counts and apply search/pagination
0568     records_total = AppLog.objects.count()
0569     search_fields = ['app_name', 'instance_name', 'levelname', 'message', 'module', 'funcname']
0570     queryset = dt.apply_search(queryset, search_fields)
0571     records_filtered = queryset.count()
0572 
0573     queryset = queryset.order_by(dt.get_order_by())
0574     logs = dt.apply_pagination(queryset)
0575 
0576     # Format data for DataTables
0577     data = []
0578     for log in logs:
0579         timestamp_str = format_datetime(log.timestamp)
0580         # Link timestamp to detail page
0581         timestamp_link = f'<a href="{reverse("monitor_app:log_detail", args=[log.id])}">{timestamp_str}</a>'
0582 
0583         # Create filter-preserving links
0584         app_filter_url = f"?app_name={log.app_name}"
0585         if filters['username']:
0586             app_filter_url += f"&username={filters['username']}"
0587         app_name_link = f'<a href="{app_filter_url}">{log.app_name}</a>'
0588 
0589         # Instance name displayed but not filterable (too many entries)
0590         instance_name_display = log.instance_name
0591 
0592         # Use plain text level (consistent with other views)
0593         level_text = log.levelname
0594 
0595         # Truncate message if too long
0596         message = log.message[:200] + '...' if len(log.message) > 200 else log.message
0597         func_display = f"{log.funcname}:{log.lineno}"
0598 
0599         data.append([
0600             timestamp_link, app_name_link, instance_name_display,
0601             level_text, message, log.module, func_display
0602         ])
0603 
0604     return dt.create_response(data, records_total, records_filtered)
0605 
0606 
0607 def get_log_filter_counts(request):
0608     """
0609     AJAX endpoint that returns dynamic filter options with counts.
0610     Only shows options that have >0 matches in the current filtered dataset.
0611     """
0612     from .utils import get_filter_counts, get_filter_params
0613     from django.db.models import Count
0614     import re
0615 
0616     # Get current filters for actual model fields only
0617     current_filters = get_filter_params(request, ['app_name', 'levelname'])
0618     username = request.GET.get('username')
0619 
0620     # Build base queryset with username filter applied if set
0621     base_queryset = AppLog.objects.all()
0622     if username:
0623         base_queryset = base_queryset.filter(instance_name__regex=rf'-{username}-\d+$')
0624 
0625     # Use standard utility for app_name and levelname counts
0626     # NOTE: current_filters only contains model fields, not username
0627     filter_counts = get_filter_counts(base_queryset, ['app_name', 'levelname'], current_filters)
0628 
0629     # Extract usernames from instance_name (segment before trailing numeric ID)
0630     username_pattern = re.compile(r'-([^-]+)-\d+$')
0631 
0632     # Build queryset for username counts (apply app_name and levelname filters)
0633     qs_for_username = AppLog.objects.all()
0634     if current_filters.get('app_name'):
0635         qs_for_username = qs_for_username.filter(app_name=current_filters['app_name'])
0636     if current_filters.get('levelname'):
0637         qs_for_username = qs_for_username.filter(levelname=current_filters['levelname'])
0638 
0639     # Count usernames extracted from instance_name
0640     username_counts = {}
0641     for item in qs_for_username.values('instance_name').annotate(count=Count('id')):
0642         match = username_pattern.search(item['instance_name'])
0643         if match:
0644             uname = match.group(1)
0645             username_counts[uname] = username_counts.get(uname, 0) + item['count']
0646 
0647     # Convert to sorted list of tuples (matching expected format)
0648     filter_counts['username'] = sorted(username_counts.items(), key=lambda x: (-x[1], x[0]))
0649 
0650     # Add username to current_filters for UI state (after utility call, not before)
0651     current_filters['username'] = username
0652 
0653     return JsonResponse({
0654         'filter_counts': filter_counts,
0655         'current_filters': current_filters
0656     })
0657 
0658 
0659 
0660 
0661 class LogSummaryView(generics.ListAPIView):
0662     """
0663     API endpoint that provides a summary of logs grouped by app and instance, with error rollups.
0664     """
0665     serializer_class = LogSummarySerializer
0666     authentication_classes = [SessionAuthentication, TokenAuthentication]
0667     permission_classes = [IsAuthenticated]
0668     queryset = AppLog.objects.all()  # Provide a queryset for DRF permissions
0669 
0670     def get(self, request, format=None):
0671         # Get all unique app/instance pairs
0672         logs = AppLog.objects.all()
0673         summary = {}
0674         for log in logs.values('app_name', 'instance_name').distinct():
0675             app = log['app_name']
0676             instance = log['instance_name']
0677             if app not in summary:
0678                 summary[app] = {}
0679             # Aggregate error counts by level for this app/instance
0680             error_counts = (
0681                 AppLog.objects.filter(app_name=app, instance_name=instance)
0682                 .values('levelname')
0683                 .annotate(count=Count('id'))
0684             )
0685             # Get recent errors (last 5)
0686             recent_errors = list(
0687                 AppLog.objects.filter(app_name=app, instance_name=instance, levelname__in=['ERROR', 'CRITICAL'])
0688                 .order_by('-timestamp')[:5]
0689                 .values('timestamp', 'levelname', 'message', 'module', 'funcname', 'lineno')
0690             )
0691             summary[app][instance] = {
0692                 'error_counts': {e['levelname']: e['count'] for e in error_counts},
0693                 'recent_errors': recent_errors,
0694             }
0695         return Response(summary, status=status.HTTP_200_OK)
0696 
0697 @login_required
0698 def database_tables_list(request):
0699     """
0700     Modern database tables list view using server-side DataTables.
0701     Shows all swf_ tables with counts and last insert times.
0702     """
0703     from django.urls import reverse
0704     
0705     # Column definitions for DataTables
0706     columns = [
0707         {'name': 'name', 'title': 'Table Name', 'orderable': True},
0708         {'name': 'count', 'title': 'Row Count', 'orderable': True},
0709         {'name': 'last_insert', 'title': 'Last Insert', 'orderable': True},
0710     ]
0711     
0712     context = {
0713         'table_title': 'Database Overview',
0714         'table_description': 'Server-side processing view of all swf_ tables in the database with row counts and last insert times.',
0715         'ajax_url': reverse('monitor_app:database_tables_datatable_ajax'),
0716         'columns': columns,
0717     }
0718     return render(request, 'monitor_app/database_tables_server.html', context)
0719 
0720 
0721 def database_tables_datatable_ajax(request):
0722     """
0723     AJAX endpoint for server-side DataTables processing of database tables.
0724     Uses proper DataTables pattern with simulated queryset for table metadata.
0725     """
0726     from .utils import DataTablesProcessor, format_datetime
0727     
0728     # Initialize DataTables processor
0729     columns = ['name', 'count', 'last_insert']
0730     dt = DataTablesProcessor(request, columns, default_order_column=0, default_order_direction='asc')
0731     
0732     # Build table metadata as a list of dict objects (simulating queryset records)
0733     table_records = []
0734     for model in apps.get_models():
0735         if not model._meta.db_table.startswith('swf_'):
0736             continue
0737             
0738         record = {
0739             'name': model._meta.db_table,
0740             'count': 0,
0741             'last_insert': None
0742         }
0743         
0744         try:
0745             record['count'] = model.objects.count()
0746             # Try to get last insertion time if a DateTimeField exists
0747             dt_fields = [f.name for f in model._meta.fields if f.get_internal_type() == 'DateTimeField']
0748             if dt_fields:
0749                 last_obj = model.objects.order_by('-' + dt_fields[0]).first()
0750                 if last_obj:
0751                     record['last_insert'] = getattr(last_obj, dt_fields[0])
0752         except Exception:
0753             pass  # Table may not exist or be accessible
0754         
0755         table_records.append(record)
0756     
0757     # Get total counts
0758     records_total = len(table_records)
0759     
0760     # Apply search filtering using DataTables pattern
0761     if dt.search_value:
0762         search_term = dt.search_value.lower()
0763         table_records = [r for r in table_records if search_term in r['name'].lower()]
0764     
0765     records_filtered = len(table_records)
0766     
0767     # Apply ordering using standard DataTables approach
0768     # Python's sort handles None values naturally - they sort before all other values
0769     table_records.sort(key=lambda r: (r[dt.order_column] is None, r[dt.order_column]), reverse=(dt.order_direction == 'desc'))
0770     
0771     # Apply pagination using DataTables pattern
0772     start = dt.start
0773     length = dt.length if dt.length > 0 else len(table_records)
0774     paginated_records = table_records[start:start + length]
0775     
0776     # Format data for DataTables
0777     data = []
0778     for record in paginated_records:
0779         table_url = reverse('monitor_app:database_table_list', args=[record['name']])
0780         table_link = f'<a href="{table_url}">{record["name"]}</a>'
0781         count_str = str(record['count'])
0782         last_insert_str = format_datetime(record['last_insert'])
0783         
0784         data.append([table_link, count_str, last_insert_str])
0785     
0786     return dt.create_response(data, records_total, records_filtered)
0787 
0788 
0789 from django.http import Http404
0790 
0791 @login_required
0792 def database_table_list(request, table_name):
0793     if not table_name.startswith('swf_'):
0794         raise Http404()
0795     with connection.cursor() as cursor:
0796         cursor.execute(f'SELECT * FROM "{table_name}" LIMIT 100')
0797         columns = [col[0] for col in cursor.description]
0798         rows = [dict(zip(columns, row)) for row in cursor.fetchall()]
0799     # Identify datetime columns using Django model if available
0800     dt_columns = []
0801     for model in apps.get_models():
0802         if model._meta.db_table == table_name:
0803             dt_columns = [f.name for f in model._meta.fields if f.get_internal_type() == 'DateTimeField']
0804             break
0805     def get_item(row, key):
0806         return row.get(key, '')
0807     from django.template.defaulttags import register
0808     register.filter('get_item', get_item)
0809     from django.urls import reverse
0810     
0811     # Convert columns for DataTables format
0812     datatable_columns = [{'name': col, 'title': col.replace('_', ' ').title(), 'orderable': True} for col in columns]
0813     
0814     context = {
0815         'table_title': f'Table: {table_name}',
0816         'table_description': f'Database table contents for {table_name} with search, sorting, and pagination.',
0817         'ajax_url': reverse('monitor_app:database_table_datatable_ajax', kwargs={'table_name': table_name}),
0818         'columns': datatable_columns,
0819         'table_name': table_name,
0820     }
0821     return render(request, 'monitor_app/database_table_list.html', context)
0822 
0823 
0824 @login_required
0825 def database_table_datatable_ajax(request, table_name):
0826     """
0827     AJAX endpoint for server-side DataTables processing of individual database table.
0828     Provides pagination, search, and sorting for any swf_ table.
0829     """
0830     if not table_name.startswith('swf_'):
0831         raise Http404()
0832     
0833     from .utils import DataTablesProcessor, format_datetime
0834     
0835     # Get column information
0836     with connection.cursor() as cursor:
0837         cursor.execute(f'SELECT * FROM "{table_name}" LIMIT 1')
0838         columns = [col[0] for col in cursor.description]
0839     
0840     # Initialize DataTables processor
0841     dt = DataTablesProcessor(request, columns, default_order_column=0, default_order_direction='asc')
0842     
0843     # Build base query
0844     query = f'SELECT * FROM "{table_name}"'
0845     count_query = f'SELECT COUNT(*) FROM "{table_name}"'
0846     params = []
0847     
0848     # Get total count
0849     with connection.cursor() as cursor:
0850         cursor.execute(count_query)
0851         records_total = cursor.fetchone()[0]
0852     
0853     # Apply search filtering
0854     where_conditions = []
0855     if dt.search_value:
0856         search_conditions = []
0857         for column in columns:
0858             search_conditions.append(f'CAST("{column}" AS TEXT) ILIKE %s')
0859             params.append(f'%{dt.search_value}%')
0860         where_conditions.append(f"({' OR '.join(search_conditions)})")
0861     
0862     # Build filtered query
0863     filtered_query = query
0864     filtered_count_query = count_query
0865     if where_conditions:
0866         where_clause = ' WHERE ' + ' AND '.join(where_conditions)
0867         filtered_query += where_clause
0868         filtered_count_query += where_clause
0869     
0870     # Get filtered count
0871     with connection.cursor() as cursor:
0872         cursor.execute(filtered_count_query, params)
0873         records_filtered = cursor.fetchone()[0]
0874     
0875     # Apply ordering
0876     if dt.order_column and dt.order_column in columns:
0877         order_clause = f' ORDER BY "{dt.order_column}" {dt.order_direction.upper()}'
0878         filtered_query += order_clause
0879     
0880     # Apply pagination
0881     filtered_query += f' LIMIT {dt.length} OFFSET {dt.start}'
0882     
0883     # Execute final query
0884     with connection.cursor() as cursor:
0885         cursor.execute(filtered_query, params)
0886         results = cursor.fetchall()
0887     
0888     # Identify datetime columns using Django model if available
0889     dt_columns = []
0890     for model in apps.get_models():
0891         if model._meta.db_table == table_name:
0892             dt_columns = [f.name for f in model._meta.fields if f.get_internal_type() == 'DateTimeField']
0893             break
0894     
0895     # Format results for DataTables
0896     data = []
0897     for row in results:
0898         row_data = []
0899         for i, value in enumerate(row):
0900             column_name = columns[i]
0901             if column_name in dt_columns and value:
0902                 # Format datetime values
0903                 row_data.append(format_datetime(value))
0904             else:
0905                 row_data.append(str(value) if value is not None else '')
0906         data.append(row_data)
0907     
0908     return dt.create_response(data, records_total, records_filtered)
0909 
0910 
0911 # Views for SWF Data Models
0912 
0913 @login_required
0914 def runs_list(request):
0915     """
0916     Professional runs list view using server-side DataTables.
0917     Provides high-performance access to all run records with filtering.
0918     """
0919     from django.urls import reverse
0920     
0921     # Column definitions for DataTables
0922     columns = [
0923         {'name': 'run_number', 'title': 'Run Number', 'orderable': True},
0924         {'name': 'start_time', 'title': 'Start Time', 'orderable': True},
0925         {'name': 'end_time', 'title': 'End Time', 'orderable': True},
0926         {'name': 'duration', 'title': 'Duration', 'orderable': True},
0927         {'name': 'stf_files_count', 'title': 'STF Files', 'orderable': True},
0928         {'name': 'actions', 'title': 'Actions', 'orderable': False},
0929     ]
0930     
0931     context = {
0932         'table_title': 'Simulation Runs',
0933         'table_description': 'Monitor testbed runs with start/end times, duration, and associated STF files.',
0934         'ajax_url': reverse('monitor_app:runs_datatable_ajax'),
0935         'columns': columns,
0936     }
0937     return render(request, 'monitor_app/runs_list.html', context)
0938 
0939 
0940 def runs_datatable_ajax(request):
0941     """
0942     AJAX endpoint for server-side DataTables processing of runs.
0943     Handles pagination, searching, ordering, and filtering.
0944     """
0945     from .utils import DataTablesProcessor, format_run_duration, format_datetime
0946     from django.db.models import Count, Case, When, F, DurationField
0947     from django.utils import timezone
0948     
0949     # Initialize DataTables processor
0950     columns = ['run_number', 'start_time', 'end_time', 'duration', 'stf_files_count', 'actions']
0951     special_order_cases = {
0952         'stf_files_count': 'stf_files_count',
0953         'duration': 'calculated_duration'  # Sort by the calculated duration field
0954     }
0955     dt = DataTablesProcessor(request, columns, default_order_column=1, default_order_direction='desc')
0956     
0957     # Build base queryset with STF file count and calculated duration
0958     queryset = Run.objects.annotate(
0959         stf_files_count=Count('stf_files'),
0960         calculated_duration=Case(
0961             # If end_time exists, calculate duration: end_time - start_time
0962             When(end_time__isnull=False, then=F('end_time') - F('start_time')),
0963             # If still in progress (end_time is NULL), duration = now - start_time
0964             default=timezone.now() - F('start_time'),
0965             output_field=DurationField()
0966         )
0967     ).all()
0968     
0969     # Get counts and apply search/pagination
0970     records_total = Run.objects.count()
0971     search_fields = ['run_number', 'start_time', 'end_time']
0972     queryset = dt.apply_search(queryset, search_fields)
0973     records_filtered = queryset.count()
0974     
0975     queryset = queryset.order_by(dt.get_order_by(special_order_cases))
0976     runs = dt.apply_pagination(queryset)
0977     
0978     # Format data for DataTables
0979     data = []
0980     for run in runs:
0981         start_time_str = format_datetime(run.start_time)
0982         end_time_str = format_datetime(run.end_time) if run.end_time else '—'
0983         duration_str = format_run_duration(run.start_time, run.end_time)
0984         run_detail_url = reverse('monitor_app:run_detail', args=[run.run_number])
0985         run_number_link = f'<a href="{run_detail_url}">{run.run_number}</a>'
0986         
0987         # Make STF files count clickable to filter STF files by this run
0988         if run.stf_files_count > 0:
0989             stf_files_url = reverse('monitor_app:stf_files_list')
0990             stf_files_link = f'<a href="{stf_files_url}?run_number={run.run_number}">{run.stf_files_count}</a>'
0991         else:
0992             stf_files_link = str(run.stf_files_count)
0993         
0994         run_detail_url = reverse('monitor_app:run_detail', args=[run.run_number])
0995         view_link = f'<a href="{run_detail_url}">View</a>'
0996         
0997         data.append([
0998             run_number_link, start_time_str, end_time_str,
0999             duration_str, stf_files_link, view_link
1000         ])
1001     
1002     return dt.create_response(data, records_total, records_filtered)
1003 
1004 @login_required
1005 def run_detail(request, run_number):
1006     """Display detailed view of a specific run"""
1007     run = get_object_or_404(Run, run_number=run_number)
1008     stf_files = run.stf_files.all().order_by('-created_at')
1009 
1010     # Count TF files for this run across all STF files
1011     tf_files_count = FastMonFile.objects.filter(stf_file__run=run).count()
1012 
1013     # Count files by status
1014     file_stats = {}
1015     for status_choice in StfFile._meta.get_field('status').choices:
1016         status_value = status_choice[0]
1017         file_stats[status_value] = stf_files.filter(status=status_value).count()
1018 
1019     context = {
1020         'run': run,
1021         'stf_files': stf_files,
1022         'file_stats': file_stats,
1023         'tf_files_count': tf_files_count,
1024     }
1025     return render(request, 'monitor_app/run_detail.html', context)
1026 
1027 @login_required
1028 def stf_files_list(request):
1029     """
1030     Professional STF files list view using server-side DataTables.
1031     Provides high-performance access to all STF file records with filtering.
1032     """
1033     from django.urls import reverse
1034     
1035     # Get filter parameters (for initial state)
1036     run_number = request.GET.get('run_number')
1037     status_filter = request.GET.get('status')
1038     machine_state = request.GET.get('machine_state')
1039     
1040     # Get filter options for dropdown links
1041     run_numbers = Run.objects.values_list('run_number', flat=True).distinct()
1042     statuses = [choice[0] for choice in StfFile._meta.get_field('status').choices]
1043     machine_states = StfFile.objects.values_list('machine_state', flat=True).distinct()
1044     
1045     # Column definitions for DataTables
1046     columns = [
1047         {'name': 'stf_filename', 'title': 'STF Filename', 'orderable': True},
1048         {'name': 'run__run_number', 'title': 'Run', 'orderable': True},
1049         {'name': 'tf_files_count', 'title': 'TF Files', 'orderable': True},
1050         {'name': 'machine_state', 'title': 'State', 'orderable': True},
1051         {'name': 'status', 'title': 'Status', 'orderable': True},
1052         {'name': 'created_at', 'title': 'Created', 'orderable': True},
1053         {'name': 'actions', 'title': 'Actions', 'orderable': False},
1054     ]
1055     
1056     context = {
1057         'table_title': 'STF Files',
1058         'table_description': 'Track STF files by run, machine state, and processing status.',
1059         'ajax_url': reverse('monitor_app:stf_files_datatable_ajax'),
1060         'columns': columns,
1061         'run_numbers': sorted(run_numbers, reverse=True),
1062         'statuses': statuses,
1063         'machine_states': sorted([s for s in machine_states if s]),
1064         'selected_run_number': run_number,
1065         'selected_status': status_filter,
1066         'selected_machine_state': machine_state,
1067     }
1068     return render(request, 'monitor_app/stf_files_list.html', context)
1069 
1070 
1071 def stf_files_datatable_ajax(request):
1072     """
1073     AJAX endpoint for server-side DataTables processing of STF files.
1074     Handles pagination, searching, ordering, and filtering.
1075     """
1076     from .utils import DataTablesProcessor, get_filter_params, format_datetime
1077     
1078     # Initialize DataTables processor
1079     columns = ['stf_filename', 'run__run_number', 'tf_files_count', 'machine_state', 'status', 'created_at', 'actions']
1080     dt = DataTablesProcessor(request, columns, default_order_column=5, default_order_direction='desc')
1081 
1082     # Build base queryset with TF files count
1083     from django.db.models import Count
1084     queryset = StfFile.objects.select_related('run').annotate(
1085         tf_files_count=Count('tf_files')
1086     )
1087     
1088     # Apply filters using utility
1089     filter_mapping = {
1090         'run_number': 'run__run_number',  # Map filter param to actual field
1091         'status': 'status',
1092         'machine_state': 'machine_state'
1093     }
1094     filters = get_filter_params(request, filter_mapping.keys())
1095     # Apply filters with correct field names
1096     for param_name, field_name in filter_mapping.items():
1097         if filters[param_name]:
1098             queryset = queryset.filter(**{field_name: filters[param_name]})
1099     
1100     # Get counts and apply search/pagination
1101     records_total = StfFile.objects.count()
1102     search_fields = ['stf_filename', 'run__run_number', 'machine_state', 'status']
1103     queryset = dt.apply_search(queryset, search_fields)
1104     records_filtered = queryset.count()
1105     
1106     queryset = queryset.order_by(dt.get_order_by())
1107     stf_files = dt.apply_pagination(queryset)
1108     
1109     # Format data for DataTables
1110     data = []
1111     for file in stf_files:
1112         # Use plain text status (consistent with runs view)
1113         status_text = file.get_status_display()
1114         timestamp_str = format_datetime(file.created_at)
1115         run_link = f'<a href="{reverse("monitor_app:run_detail", args=[file.run.run_number])}">{file.run.run_number}</a>' if file.run else 'N/A'
1116 
1117         # Make TF files count clickable to filter TF files by this STF
1118         if file.tf_files_count > 0:
1119             tf_files_url = reverse('monitor_app:fastmon_files_list')
1120             tf_files_link = f'<a href="{tf_files_url}?stf_filename={file.stf_filename}">{file.tf_files_count}</a>'
1121         else:
1122             tf_files_link = str(file.tf_files_count)
1123 
1124         stf_file_detail_url = reverse('monitor_app:stf_file_detail', args=[file.file_id])
1125         view_link = f'<a href="{stf_file_detail_url}">View</a>'
1126 
1127         data.append([
1128             file.stf_filename, run_link, tf_files_link, file.machine_state or '',
1129             status_text, timestamp_str, view_link
1130         ])
1131     
1132     return dt.create_response(data, records_total, records_filtered)
1133 
1134 
1135 @login_required
1136 def stf_file_detail(request, file_id):
1137     """Display detailed view of a specific STF file"""
1138     stf_file = get_object_or_404(StfFile, file_id=file_id)
1139 
1140     context = {
1141         'stf_file': stf_file,
1142     }
1143     return render(request, 'monitor_app/stf_file_detail.html', context)
1144 
1145 @login_required
1146 def subscribers_list(request):
1147     """Professional subscribers list view using server-side DataTables."""
1148     from django.urls import reverse
1149     
1150     # Column definitions for DataTables
1151     columns = [
1152         {'name': 'subscriber_name', 'title': 'Subscriber Name', 'orderable': True},
1153         {'name': 'description', 'title': 'Description', 'orderable': True},
1154         {'name': 'fraction', 'title': 'Fraction', 'orderable': True},
1155         {'name': 'is_active', 'title': 'is_active', 'orderable': True},
1156         {'name': 'created_at', 'title': 'Created', 'orderable': True},
1157         {'name': 'updated_at', 'title': 'Updated', 'orderable': True},
1158         {'name': 'actions', 'title': 'Actions', 'orderable': False},
1159     ]
1160     
1161     # Filter field definitions for dynamic filtering using generic auto-discovery
1162     filter_fields = [
1163         {'name': 'is_active', 'label': 'is_active', 'type': 'select'},
1164     ]
1165     
1166     context = {
1167         'table_title': 'Message Queue Subscribers',
1168         'table_description': 'Monitor message queue subscribers and their activity status.',
1169         'ajax_url': reverse('monitor_app:subscribers_datatable_ajax'),
1170         'filter_counts_url': reverse('monitor_app:subscribers_filter_counts'),
1171         'columns': columns,
1172         'filter_fields': filter_fields,
1173         'selected_is_active': request.GET.get('is_active'),
1174     }
1175     return render(request, 'monitor_app/subscribers_list_dynamic.html', context)
1176 
1177 def subscribers_datatable_ajax(request):
1178     """
1179     AJAX endpoint for server-side DataTables processing of subscribers.
1180     Handles pagination, searching, ordering, and filtering using utils.py functions.
1181     """
1182     from .utils import DataTablesProcessor, get_filter_params, apply_filters, format_datetime
1183     
1184     # Initialize DataTables processor
1185     columns = ['subscriber_name', 'description', 'fraction', 'is_active', 'created_at', 'updated_at', 'actions']
1186     dt = DataTablesProcessor(request, columns, default_order_column=0, default_order_direction='asc')
1187     
1188     # Build base queryset and apply filters using utils.py
1189     queryset = Subscriber.objects.all()
1190     filters = get_filter_params(request, ['is_active'])
1191     queryset = apply_filters(queryset, filters)
1192     
1193     # Get counts and apply search/pagination
1194     records_total = Subscriber.objects.count()
1195     search_fields = ['subscriber_name', 'description']
1196     queryset = dt.apply_search(queryset, search_fields)
1197     records_filtered = queryset.count()
1198     
1199     queryset = queryset.order_by(dt.get_order_by())
1200     subscribers = dt.apply_pagination(queryset)
1201     
1202     # Format data for DataTables
1203     data = []
1204     for subscriber in subscribers:
1205         subscriber_detail_url = reverse('monitor_app:subscriber_detail', args=[subscriber.subscriber_id])
1206         subscriber_name_link = f'<a href="{subscriber_detail_url}">{subscriber.subscriber_name}</a>'
1207         description = subscriber.description[:100] + '...' if subscriber.description and len(subscriber.description) > 100 else (subscriber.description or '')
1208         fraction_str = f"{subscriber.fraction:.3f}" if subscriber.fraction is not None else 'N/A'
1209         # Show raw DB value, not massaged badges
1210         is_active_value = str(subscriber.is_active).lower()  # True -> 'true', False -> 'false'
1211         created_str = format_datetime(subscriber.created_at)
1212         updated_str = format_datetime(subscriber.updated_at)
1213         subscriber_detail_url = reverse('monitor_app:subscriber_detail', args=[subscriber.subscriber_id])
1214         view_link = f'<a href="{subscriber_detail_url}">View</a>'
1215         
1216         data.append([
1217             subscriber_name_link, description, fraction_str, is_active_value,
1218             created_str, updated_str, view_link
1219         ])
1220     
1221     return dt.create_response(data, records_total, records_filtered)
1222 
1223 
1224 def get_subscribers_filter_counts(request):
1225     """
1226     AJAX endpoint that returns dynamic filter options with counts for subscribers.
1227     Uses utils.py get_filter_counts() for generic auto-discovery of field values.
1228     """
1229     from .utils import get_filter_counts, get_filter_params, apply_filters
1230     
1231     # Get current filters
1232     current_filters = get_filter_params(request, ['is_active'])
1233     
1234     # Build base queryset
1235     base_queryset = Subscriber.objects.all()
1236     
1237     # Use generic get_filter_counts to auto-discover field values
1238     filter_fields = ['is_active']
1239     filter_counts = get_filter_counts(base_queryset, filter_fields, current_filters)
1240     
1241     return JsonResponse({
1242         'filter_counts': filter_counts,
1243         'current_filters': current_filters
1244     })
1245 
1246 
1247 @login_required
1248 def subscriber_detail(request, subscriber_id):
1249     """Display details for a specific subscriber."""
1250     subscriber = get_object_or_404(Subscriber, subscriber_id=subscriber_id)
1251     
1252     context = {
1253         'subscriber': subscriber,
1254     }
1255     
1256     return render(request, 'monitor_app/subscriber_detail.html', context)
1257 
1258 
1259 # ==================== WORKFLOW VIEWS ====================
1260 
1261 @login_required
1262 def workflow_dashboard(request):
1263     """Main workflow dashboard showing pipeline status and statistics."""
1264     
1265     # Get workflow statistics
1266     total_workflows = STFWorkflow.objects.count()
1267     active_workflows = STFWorkflow.objects.exclude(
1268         current_status__in=[WorkflowStatus.WORKFLOW_COMPLETE, WorkflowStatus.FAILED]
1269     ).count()
1270     completed_workflows = STFWorkflow.objects.filter(
1271         current_status=WorkflowStatus.WORKFLOW_COMPLETE
1272     ).count()
1273     failed_workflows = STFWorkflow.objects.filter(
1274         current_status=WorkflowStatus.FAILED
1275     ).count()
1276     
1277     # Get recent workflows
1278     recent_workflows = STFWorkflow.objects.all().order_by('-created_at')[:20]
1279     
1280     # Get workflow status distribution
1281     status_counts = STFWorkflow.objects.values('current_status').annotate(
1282         count=Count('current_status')
1283     ).order_by('current_status')
1284     
1285     # Get agent statistics
1286     workflow_agents = SystemAgent.objects.filter(workflow_enabled=True)
1287     
1288     # Get DAQ state distribution
1289     daq_state_counts = STFWorkflow.objects.values('daq_state').annotate(
1290         count=Count('daq_state')
1291     ).order_by('daq_state')
1292     
1293     context = {
1294         'total_workflows': total_workflows,
1295         'active_workflows': active_workflows,
1296         'completed_workflows': completed_workflows,
1297         'failed_workflows': failed_workflows,
1298         'recent_workflows': recent_workflows,
1299         'status_counts': status_counts,
1300         'workflow_agents': workflow_agents,
1301         'daq_state_counts': daq_state_counts,
1302     }
1303     
1304     return render(request, 'monitor_app/workflow_dashboard.html', context)
1305 
1306 
1307 @login_required
1308 def workflow_list(request):
1309     """Professional workflow list view using server-side DataTables with dynamic filtering."""
1310     from django.urls import reverse
1311     from .utils import get_filter_counts
1312     from .workflow_models import STFWorkflow
1313     
1314     # Column definitions for DataTables
1315     columns = [
1316         {'name': 'filename', 'title': 'Filename', 'orderable': True},
1317         {'name': 'msg_type', 'title': 'Type', 'orderable': True},
1318         {'name': 'current_status', 'title': 'Status', 'orderable': True},
1319         {'name': 'current_agent', 'title': 'Current Agent', 'orderable': True},
1320         {'name': 'daq_state', 'title': 'DAQ State', 'orderable': True},
1321         {'name': 'generated_time', 'title': 'Generated', 'orderable': True},
1322         {'name': 'updated_at', 'title': 'Updated', 'orderable': True},
1323     ]
1324     
1325     # Get filter counts for dynamic filtering
1326     filter_fields = ['current_status', 'current_agent', 'daq_state']
1327     filter_counts = get_filter_counts(STFWorkflow.objects.all(), filter_fields)
1328     
1329     context = {
1330         'table_title': 'Workflow List',
1331         'table_description': 'Monitor workflow progress through the processing pipeline from generation to completion.',
1332         'ajax_url': reverse('monitor_app:workflow_datatable_ajax'),
1333         'columns': columns,
1334         'filter_counts': filter_counts,
1335     }
1336     return render(request, 'monitor_app/workflow_list.html', context)
1337 
1338 
1339 def workflow_datatable_ajax(request):
1340     """
1341     AJAX endpoint for server-side DataTables processing of workflows.
1342     Handles pagination, searching, ordering, and filtering.
1343     """
1344     from .utils import DataTablesProcessor, format_datetime, apply_filters, get_filter_params
1345     from .workflow_models import STFWorkflow
1346     
1347     # Initialize DataTables processor
1348     columns = ['filename', 'msg_type', 'current_status', 'current_agent', 'daq_state', 'generated_time', 'updated_at']
1349     dt = DataTablesProcessor(request, columns, default_order_column=5, default_order_direction='desc')
1350     
1351     # Build base queryset
1352     queryset = STFWorkflow.objects.all()
1353     
1354     # Apply dynamic filters
1355     filter_fields = ['current_status', 'current_agent', 'daq_state']
1356     filters = get_filter_params(request, filter_fields)
1357     queryset = apply_filters(queryset, filters)
1358     
1359     # Get counts and apply search/pagination
1360     records_total = STFWorkflow.objects.count()
1361     search_fields = ['filename', 'current_status', 'current_agent', 'daq_state']
1362     queryset = dt.apply_search(queryset, search_fields)
1363     records_filtered = queryset.count()
1364     
1365     queryset = queryset.order_by(dt.get_order_by())
1366     workflows = dt.apply_pagination(queryset)
1367     
1368     # Format data for DataTables
1369     data = []
1370     for workflow in workflows:
1371         workflow_detail_url = reverse('monitor_app:workflow_detail', args=[workflow.workflow_id])
1372         filename_link = f'<a href="{workflow_detail_url}">{workflow.filename}</a>'
1373         
1374         # Extract msg_type from JSON metadata safely
1375         msg_type = 'N/A'
1376         if workflow.stf_metadata and isinstance(workflow.stf_metadata, dict):
1377             msg_type = workflow.stf_metadata.get('msg_type', 'N/A')
1378         
1379         status_display = workflow.get_current_status_display()
1380         agent_display = workflow.get_current_agent_display()
1381         daq_state_str = f"{workflow.daq_state} / {workflow.daq_substate}"
1382         generated_time_str = format_datetime(workflow.generated_time)
1383         updated_time_str = format_datetime(workflow.updated_at)
1384         
1385         data.append([
1386             filename_link, msg_type, status_display, agent_display,
1387             daq_state_str, generated_time_str, updated_time_str
1388         ])
1389     
1390     return dt.create_response(data, records_total, records_filtered)
1391 
1392 
1393 @login_required
1394 def workflow_detail(request, workflow_id):
1395     """Detailed view of a specific workflow including all stages and messages."""
1396     
1397     workflow = get_object_or_404(STFWorkflow, workflow_id=workflow_id)
1398     
1399     # Get all stages for this workflow
1400     stages = AgentWorkflowStage.objects.filter(
1401         workflow=workflow
1402     ).order_by('created_at')
1403     
1404     # Get all messages for this workflow
1405     messages = WorkflowMessage.objects.filter(
1406         workflow=workflow
1407     ).order_by('sent_at')
1408     
1409     # Calculate workflow timing
1410     workflow_duration = None
1411     if workflow.completed_at:
1412         workflow_duration = (workflow.completed_at - workflow.created_at).total_seconds()
1413     elif workflow.failed_at:
1414         workflow_duration = (workflow.failed_at - workflow.created_at).total_seconds()
1415     
1416     context = {
1417         'workflow': workflow,
1418         'stages': stages,
1419         'messages': messages,
1420         'workflow_duration': workflow_duration,
1421     }
1422     
1423     return render(request, 'monitor_app/workflow_detail.html', context)
1424 
1425 
1426 @login_required
1427 def workflow_agents_list(request):
1428     """View showing the status of all workflow agents using server-side DataTables."""
1429     from django.urls import reverse
1430 
1431     # Get filters from URL params
1432     selected_type = request.GET.get('agent_type', '')
1433     selected_status = request.GET.get('status', '')
1434     selected_namespace = request.GET.get('namespace', '')
1435 
1436     # Get distinct values for filter bars
1437     agent_types = list(SystemAgent.objects.values_list(
1438         'agent_type', flat=True
1439     ).distinct().order_by('agent_type'))
1440 
1441     statuses = list(SystemAgent.objects.values_list(
1442         'status', flat=True
1443     ).distinct().order_by('status'))
1444 
1445     namespaces = list(SystemAgent.objects.exclude(
1446         namespace__isnull=True
1447     ).exclude(
1448         namespace=''
1449     ).values_list('namespace', flat=True).distinct().order_by('namespace'))
1450 
1451     context = {
1452         'table_title': 'Agent Status',
1453         'table_description': 'Status and statistics for all agents.',
1454         'ajax_url': reverse('monitor_app:workflow_agents_datatable_ajax'),
1455         'columns': [
1456             {'title': 'Agent Name', 'orderable': True},
1457             {'title': 'Type', 'orderable': True},
1458             {'title': 'Status', 'orderable': True},
1459             {'title': 'Namespace', 'orderable': True},
1460             {'title': 'Last Heartbeat', 'orderable': True},
1461             {'title': 'Currently Processing', 'orderable': True},
1462             {'title': 'Recently Completed (1hr)', 'orderable': True},
1463             {'title': 'Total Processed', 'orderable': True},
1464         ],
1465         'filter_fields': [],  # Handled in template
1466         'default_order': [[4, 'desc']],  # Default sort by Last Heartbeat descending
1467         'selected_type': selected_type,
1468         'selected_status': selected_status,
1469         'selected_namespace': selected_namespace,
1470         'agent_types': agent_types,
1471         'statuses': statuses,
1472         'namespaces': namespaces,
1473     }
1474 
1475     return render(request, 'monitor_app/workflow_agents_list_dynamic.html', context)
1476 
1477 
1478 @login_required
1479 def workflow_agents_datatable_ajax(request):
1480     """AJAX endpoint for workflow agents DataTable server-side processing."""
1481     from datetime import timedelta
1482     from .utils import DataTablesProcessor, format_datetime
1483     
1484     # Column definitions matching the template order
1485     columns = ['instance_name', 'agent_type', 'status', 'namespace', 'last_heartbeat', 'current_processing', 'recent_completed', 'total_stf_processed']
1486 
1487     dt = DataTablesProcessor(request, columns, default_order_column=4, default_order_direction='desc')  # Sort by last_heartbeat descending
1488     
1489     # Base queryset - show all agents, not just workflow-enabled ones
1490     queryset = SystemAgent.objects.all()
1491 
1492     # Apply filters if provided
1493     agent_type = request.GET.get('agent_type')
1494     if agent_type:
1495         queryset = queryset.filter(agent_type=agent_type)
1496 
1497     status = request.GET.get('status')
1498     if status:
1499         queryset = queryset.filter(status=status)
1500 
1501     namespace = request.GET.get('namespace')
1502     if namespace:
1503         queryset = queryset.filter(namespace=namespace)
1504 
1505     # For sorting current_processing and recent_completed, we need to annotate
1506     # But for now, let's keep it simple and sort by the basic fields
1507     special_cases = {
1508         'current_processing': 'instance_name',  # Fallback to instance name
1509         'recent_completed': 'instance_name',     # Fallback to instance name
1510     }
1511     
1512     order_by = dt.get_order_by(special_cases)
1513     queryset = queryset.order_by(order_by)
1514     
1515     # Apply search if provided
1516     search_fields = ['instance_name', 'agent_type', 'status']
1517     queryset = dt.apply_search(queryset, search_fields)
1518     
1519     # Get counts
1520     records_total = SystemAgent.objects.count()
1521     records_filtered = queryset.count()
1522     
1523     # Apply pagination
1524     agents = dt.apply_pagination(queryset)
1525     
1526     # Build data rows
1527     data = []
1528     for agent in agents:
1529         # Calculate current processing stages
1530         current_stages = AgentWorkflowStage.objects.filter(
1531             agent_name=agent.instance_name,
1532             status__in=[
1533                 WorkflowStatus.DATA_RECEIVED,
1534                 WorkflowStatus.DATA_PROCESSING,
1535                 WorkflowStatus.PROCESSING_RECEIVED,
1536                 WorkflowStatus.PROCESSING_PROCESSING,
1537                 WorkflowStatus.FASTMON_RECEIVED,
1538             ]
1539         ).count()
1540         
1541         # Calculate recent completion rate (last hour)
1542         recent_completed = AgentWorkflowStage.objects.filter(
1543             agent_name=agent.instance_name,
1544             completed_at__gte=timezone.now() - timedelta(hours=1)
1545         ).count()
1546         
1547         # Format status badge
1548         status_class = {
1549             'OK': 'success',
1550             'WARNING': 'warning', 
1551             'ERROR': 'danger'
1552         }.get(agent.status, 'secondary')
1553         
1554         status_badge = f'<span class="badge bg-{status_class}">{agent.status}</span>'
1555         
1556         # Create agent name link
1557         agent_detail_url = reverse('monitor_app:agent_detail', args=[agent.instance_name])
1558         agent_link = f'<a href="{agent_detail_url}">{agent.instance_name}</a>'
1559 
1560         # Format heartbeat - sorting is now handled at database level
1561         heartbeat_cell = format_datetime(agent.last_heartbeat) if agent.last_heartbeat else 'Never'
1562 
1563         # Format namespace as link if set
1564         if agent.namespace:
1565             namespace_url = reverse('monitor_app:namespace_detail', args=[agent.namespace])
1566             namespace_cell = f'<a href="{namespace_url}">{agent.namespace}</a>'
1567         else:
1568             namespace_cell = '<em class="text-muted">-</em>'
1569 
1570         row = [
1571             agent_link,
1572             agent.get_agent_type_display(),
1573             status_badge,
1574             namespace_cell,
1575             heartbeat_cell,
1576             str(current_stages),
1577             str(recent_completed),
1578             str(agent.total_stf_processed or 0),
1579         ]
1580         data.append(row)
1581     
1582     return dt.create_response(data, records_total, records_filtered)
1583 
1584 
1585 @login_required
1586 def agent_detail(request, instance_name):
1587     """Display details for a specific agent."""
1588     agent = get_object_or_404(SystemAgent, instance_name=instance_name)
1589     return render(request, 'monitor_app/agent_detail.html', {'agent': agent})
1590 
1591 
1592 @login_required
1593 def namespace_detail(request, namespace):
1594     """Display details for a namespace."""
1595     from .workflow_models import WorkflowMessage, WorkflowExecution, Namespace
1596 
1597     # Try to get namespace record from database
1598     try:
1599         ns_record = Namespace.objects.get(name=namespace)
1600     except Namespace.DoesNotExist:
1601         ns_record = None
1602 
1603     # Count agents, messages and executions for this namespace
1604     agent_count = SystemAgent.objects.filter(namespace=namespace).count()
1605     message_count = WorkflowMessage.objects.filter(namespace=namespace).count()
1606     execution_count = WorkflowExecution.objects.filter(namespace=namespace).count()
1607 
1608     return render(request, 'monitor_app/namespace_detail.html', {
1609         'namespace': namespace,
1610         'ns_record': ns_record,
1611         'agent_count': agent_count,
1612         'message_count': message_count,
1613         'execution_count': execution_count,
1614     })
1615 
1616 
1617 @login_required
1618 def message_detail(request, message_id):
1619     """Display details for a specific workflow message."""
1620     from .workflow_models import WorkflowMessage
1621     message = get_object_or_404(WorkflowMessage, message_id=message_id)
1622 
1623     # Flatten JSON content for display
1624     content_items = []
1625     if message.message_content and isinstance(message.message_content, dict):
1626         for key, value in message.message_content.items():
1627             content_items.append({'key': key, 'value': value})
1628 
1629     # Flatten metadata for display
1630     metadata_items = []
1631     if message.message_metadata and isinstance(message.message_metadata, dict):
1632         for key, value in message.message_metadata.items():
1633             metadata_items.append({'key': key, 'value': value})
1634 
1635     context = {
1636         'message': message,
1637         'content_items': content_items,
1638         'metadata_items': metadata_items,
1639     }
1640     return render(request, 'monitor_app/message_detail.html', context)
1641 
1642 
1643 @login_required
1644 def workflow_messages(request):
1645     """View showing all workflow messages with dynamic filtering."""
1646     from django.urls import reverse
1647     
1648     context = {
1649         'table_title': 'Workflow Messages',
1650         'table_description': 'All messages exchanged in the workflow system with filtering capabilities.',
1651         'ajax_url': reverse('monitor_app:workflow_messages_datatable_ajax'),
1652         'filter_counts_url': reverse('monitor_app:workflow_messages_filter_counts'),
1653         'columns': [
1654             {'title': 'Timestamp', 'orderable': True},
1655             {'title': 'namespace', 'orderable': True},
1656             {'title': 'message_type', 'orderable': True},
1657             {'title': 'sender_agent', 'orderable': True},
1658             {'title': 'source', 'orderable': True},
1659             {'title': 'workflow', 'orderable': True},
1660             {'title': 'is_successful', 'orderable': True},
1661         ],
1662         'filter_fields': [
1663             {'name': 'namespace', 'label': 'namespace', 'type': 'select'},
1664             {'name': 'execution_id', 'label': 'execution_id', 'type': 'select'},
1665             {'name': 'message_type', 'label': 'message_type', 'type': 'select'},
1666             {'name': 'sender_agent', 'label': 'sender_agent', 'type': 'select'},
1667             {'name': 'workflow', 'label': 'workflow', 'type': 'select'},
1668             {'name': 'is_successful', 'label': 'is_successful', 'type': 'select'},
1669         ],
1670         # Add current filter values for initial state
1671         'selected_namespace': request.GET.get('namespace'),
1672         'selected_execution_id': request.GET.get('execution_id'),
1673         'selected_message_type': request.GET.get('message_type'),
1674         'selected_sender_agent': request.GET.get('sender_agent'),
1675         'selected_workflow': request.GET.get('workflow'),
1676         'selected_is_successful': request.GET.get('is_successful'),
1677     }
1678     
1679     return render(request, 'monitor_app/workflow_messages_dynamic.html', context)
1680 
1681 
1682 @login_required
1683 def workflow_messages_datatable_ajax(request):
1684     """AJAX endpoint for workflow messages DataTable server-side processing."""
1685     from .utils import DataTablesProcessor, get_filter_params, apply_filters, format_datetime
1686     
1687     # Column definitions matching the template order
1688     columns = ['sent_at', 'namespace', 'message_type', 'sender_agent', 'source', 'workflow', 'is_successful']
1689     
1690     dt = DataTablesProcessor(request, columns, default_order_column=0, default_order_direction='desc')
1691     
1692     # Base queryset
1693     queryset = WorkflowMessage.objects.select_related('workflow')
1694     
1695     # Apply filters
1696     filter_params = get_filter_params(request, ['namespace', 'execution_id', 'message_type', 'sender_agent', 'recipient_agent', 'workflow', 'is_successful'])
1697     
1698     # Handle workflow filter - need to map workflow display names to IDs
1699     if filter_params.get('workflow'):
1700         workflow_value = filter_params['workflow']
1701         # Try to find workflow by filename
1702         try:
1703             if workflow_value != 'N/A':
1704                 workflow_obj = STFWorkflow.objects.filter(filename=workflow_value).first()
1705                 if workflow_obj:
1706                     filter_params['workflow'] = workflow_obj.workflow_id
1707                 else:
1708                     # Filter out all results if workflow not found
1709                     queryset = queryset.none()
1710             else:
1711                 # Filter for null workflow
1712                 filter_params['workflow__isnull'] = True
1713                 del filter_params['workflow']
1714         except:
1715             pass
1716     
1717     queryset = apply_filters(queryset, filter_params)
1718     
1719     # Apply search if provided
1720     search_fields = ['message_type', 'sender_agent', 'recipient_agent']
1721     queryset = dt.apply_search(queryset, search_fields)
1722     
1723     # Apply ordering
1724     order_by = dt.get_order_by()
1725     queryset = queryset.order_by(order_by)
1726     
1727     # Get counts
1728     records_total = WorkflowMessage.objects.count()
1729     records_filtered = queryset.count()
1730     
1731     # Apply pagination
1732     messages = dt.apply_pagination(queryset)
1733     
1734     # Build data rows
1735     data = []
1736     for message in messages:
1737         # Format status
1738         if message.is_successful is True:
1739             status = '<span class="badge bg-success">Success</span>'
1740         elif message.is_successful is False:
1741             status = '<span class="badge bg-danger">Failed</span>'
1742         else:
1743             status = '<span class="badge bg-secondary">Unknown</span>'
1744         
1745         # Format workflow link
1746         if message.workflow:
1747             workflow_detail_url = reverse('monitor_app:workflow_detail', args=[message.workflow.workflow_id])
1748             workflow_link = f'<a href="{workflow_detail_url}" style="font-size: 0.8rem;">{message.workflow.filename}</a>'
1749         else:
1750             workflow_link = 'N/A'
1751         
1752         # Format agent links
1753         sender_link = f'<a href="{reverse("monitor_app:agent_detail", args=[message.sender_agent])}">{message.sender_agent}</a>' if message.sender_agent else 'N/A'
1754 
1755         # Extract source from message_metadata
1756         source = 'Unknown'
1757         if message.message_metadata and isinstance(message.message_metadata, dict):
1758             source = message.message_metadata.get('created_by', 'Unknown')
1759         
1760         # Apply smaller font to source for less column width
1761         source = f'<span style="font-size: 0.8rem;">{source}</span>'
1762         
1763         # Format timestamp as link to message detail
1764         message_detail_url = reverse('monitor_app:message_detail', args=[message.message_id])
1765         timestamp_link = f'<a href="{message_detail_url}">{format_datetime(message.sent_at)}</a>'
1766 
1767         # Format namespace as link
1768         if message.namespace:
1769             namespace_url = reverse('monitor_app:namespace_detail', args=[message.namespace])
1770             namespace_link = f'<a href="{namespace_url}">{message.namespace}</a>'
1771         else:
1772             namespace_link = ''
1773 
1774         row = [
1775             timestamp_link,
1776             namespace_link,
1777             message.message_type,
1778             sender_link,
1779             source,
1780             workflow_link,
1781             status,
1782         ]
1783         data.append(row)
1784     
1785     return dt.create_response(data, records_total, records_filtered)
1786 
1787 
1788 @login_required
1789 def get_workflow_messages_filter_counts(request):
1790     """Get filter counts for workflow messages filters."""
1791     from .utils import get_filter_params, apply_filters, get_filter_counts
1792     from django.http import JsonResponse
1793     
1794     # Get current filters
1795     current_filters = get_filter_params(request, ['namespace', 'execution_id', 'message_type', 'sender_agent', 'workflow', 'is_successful'])
1796 
1797     # Base queryset
1798     queryset = WorkflowMessage.objects.select_related('workflow')
1799 
1800     # Calculate counts for each filter
1801     filter_fields = ['namespace', 'execution_id', 'message_type', 'sender_agent', 'is_successful']
1802     filter_counts = get_filter_counts(queryset, filter_fields, current_filters)
1803     
1804     # Handle workflow filter specially - show filenames instead of IDs
1805     workflow_queryset = queryset
1806     temp_filters = {k: v for k, v in current_filters.items() if k != 'workflow' and v}
1807     workflow_queryset = apply_filters(workflow_queryset, temp_filters)
1808     
1809     # Get workflow counts with filenames
1810     workflow_counts = []
1811     
1812     # Count messages with workflows
1813     workflow_msgs = workflow_queryset.filter(workflow__isnull=False).values('workflow__filename').annotate(count=Count('message_id')).filter(count__gt=0).order_by('-count', 'workflow__filename')
1814     for item in workflow_msgs:
1815         workflow_counts.append((item['workflow__filename'], item['count']))
1816     
1817     # Count messages without workflows
1818     null_count = workflow_queryset.filter(workflow__isnull=True).count()
1819     if null_count > 0:
1820         workflow_counts.append(('N/A', null_count))
1821     
1822     filter_counts['workflow'] = workflow_counts
1823     
1824     return JsonResponse({'filter_counts': filter_counts})
1825 
1826 
1827 @login_required
1828 def workflow_performance(request):
1829     """View showing workflow performance metrics and analytics."""
1830     
1831     # Get processing time statistics
1832     from django.db.models import Avg, Min, Max, Count
1833     
1834     # Overall workflow completion times
1835     completed_workflows = STFWorkflow.objects.filter(
1836         current_status=WorkflowStatus.WORKFLOW_COMPLETE,
1837         completed_at__isnull=False
1838     )
1839     
1840     # Agent performance statistics
1841     agent_performance = []
1842     for agent_type in AgentType.choices:
1843         agent_code = agent_type[0]
1844         agent_name = agent_type[1]
1845         
1846         stages = AgentWorkflowStage.objects.filter(
1847             agent_type=agent_code,
1848             processing_time_seconds__isnull=False
1849         )
1850         
1851         if stages.exists():
1852             stats = stages.aggregate(
1853                 avg_time=Avg('processing_time_seconds'),
1854                 min_time=Min('processing_time_seconds'),
1855                 max_time=Max('processing_time_seconds'),
1856                 count=Count('stage_id')
1857             )
1858             
1859             agent_performance.append({
1860                 'agent_type': agent_name,
1861                 'agent_code': agent_code,
1862                 'avg_time': stats['avg_time'],
1863                 'min_time': stats['min_time'],
1864                 'max_time': stats['max_time'],
1865                 'count': stats['count']
1866             })
1867     
1868     # Recent throughput (last 24 hours)
1869     from datetime import timedelta
1870     recent_time = timezone.now() - timedelta(hours=24)
1871     
1872     recent_workflows = STFWorkflow.objects.filter(
1873         created_at__gte=recent_time
1874     ).count()
1875     
1876     recent_completed = STFWorkflow.objects.filter(
1877         completed_at__gte=recent_time
1878     ).count()
1879     
1880     context = {
1881         'completed_workflows': completed_workflows,
1882         'agent_performance': agent_performance,
1883         'recent_workflows': recent_workflows,
1884         'recent_completed': recent_completed,
1885     }
1886     
1887     return render(request, 'monitor_app/workflow_performance.html', context)
1888 
1889 
1890 @login_required
1891 def workflow_realtime_dashboard(request):
1892     """Real-time workflow dashboard with live updates."""
1893     
1894     # Get initial data (same as regular dashboard)
1895     total_workflows = STFWorkflow.objects.count()
1896     active_workflows = STFWorkflow.objects.exclude(
1897         current_status__in=[WorkflowStatus.WORKFLOW_COMPLETE, WorkflowStatus.FAILED]
1898     ).count()
1899     completed_workflows = STFWorkflow.objects.filter(
1900         current_status=WorkflowStatus.WORKFLOW_COMPLETE
1901     ).count()
1902     failed_workflows = STFWorkflow.objects.filter(
1903         current_status=WorkflowStatus.FAILED
1904     ).count()
1905     
1906     workflow_agents = SystemAgent.objects.filter(workflow_enabled=True)
1907     
1908     context = {
1909         'total_workflows': total_workflows,
1910         'active_workflows': active_workflows,
1911         'completed_workflows': completed_workflows,
1912         'failed_workflows': failed_workflows,
1913         'workflow_agents': workflow_agents,
1914     }
1915     
1916     return render(request, 'monitor_app/workflow_realtime_dashboard.html', context)
1917 
1918 
1919 @login_required
1920 def workflow_realtime_data_api(request):
1921     """API endpoint providing real-time data for dashboard updates."""
1922     
1923     from datetime import timedelta
1924     
1925     # Basic metrics
1926     total_workflows = STFWorkflow.objects.count()
1927     active_workflows = STFWorkflow.objects.exclude(
1928         current_status__in=[WorkflowStatus.WORKFLOW_COMPLETE, WorkflowStatus.FAILED]
1929     ).count()
1930     completed_workflows = STFWorkflow.objects.filter(
1931         current_status=WorkflowStatus.WORKFLOW_COMPLETE
1932     ).count()
1933     failed_workflows = STFWorkflow.objects.filter(
1934         current_status=WorkflowStatus.FAILED
1935     ).count()
1936     
1937     # Pipeline stage counts
1938     pipeline_counts = {
1939         'daqsim': STFWorkflow.objects.filter(current_status=WorkflowStatus.GENERATED).count(),
1940         'data': STFWorkflow.objects.filter(
1941             current_status__in=[
1942                 WorkflowStatus.DATA_RECEIVED, 
1943                 WorkflowStatus.DATA_PROCESSING, 
1944                 WorkflowStatus.DATA_COMPLETE
1945             ]
1946         ).count(),
1947         'processing': STFWorkflow.objects.filter(
1948             current_status__in=[
1949                 WorkflowStatus.PROCESSING_RECEIVED, 
1950                 WorkflowStatus.PROCESSING_PROCESSING, 
1951                 WorkflowStatus.PROCESSING_COMPLETE
1952             ]
1953         ).count(),
1954         'fastmon': STFWorkflow.objects.filter(
1955             current_status__in=[
1956                 WorkflowStatus.FASTMON_RECEIVED, 
1957                 WorkflowStatus.FASTMON_COMPLETE
1958             ]
1959         ).count(),
1960     }
1961     
1962     # Agent status
1963     agents_data = []
1964     for agent in SystemAgent.objects.filter(workflow_enabled=True):
1965         agents_data.append({
1966             'instance_name': agent.instance_name,
1967             'agent_type': agent.agent_type,
1968             'status': agent.status,
1969             'current_stf_count': agent.current_stf_count,
1970             'total_stf_processed': agent.total_stf_processed,
1971             'last_heartbeat': agent.last_heartbeat.isoformat() if agent.last_heartbeat else None,
1972         })
1973     
1974     # Recent messages (last 10)
1975     recent_messages = []
1976     for message in WorkflowMessage.objects.all().order_by('-sent_at')[:10]:
1977         recent_messages.append({
1978             'message_type': message.message_type,
1979             'sender_agent': message.sender_agent,
1980             'recipient_agent': message.recipient_agent,
1981             'timestamp': message.sent_at.strftime('%H:%M:%S'),
1982             'filename': message.workflow.filename if message.workflow else None,
1983             'is_successful': message.is_successful,
1984         })
1985     
1986     # Chart data
1987     # Throughput over last 10 minutes (data points every minute)
1988     now = timezone.now()
1989     throughput_labels = []
1990     throughput_data = []
1991     
1992     for i in range(10, 0, -1):
1993         time_point = now - timedelta(minutes=i)
1994         label = time_point.strftime('%H:%M')
1995         throughput_labels.append(label)
1996         
1997         # Count workflows created in this minute
1998         count = STFWorkflow.objects.filter(
1999             created_at__gte=time_point,
2000             created_at__lt=time_point + timedelta(minutes=1)
2001         ).count()
2002         throughput_data.append(count)
2003     
2004     # Processing times by agent type
2005     from django.db.models import Avg
2006     processing_times = []
2007     for agent_type in [AgentType.DATA, AgentType.PROCESSING, AgentType.FASTMON]:
2008         avg_time = AgentWorkflowStage.objects.filter(
2009             agent_type=agent_type,
2010             processing_time_seconds__isnull=False
2011         ).aggregate(avg=Avg('processing_time_seconds'))['avg']
2012         processing_times.append(round(avg_time, 2) if avg_time else 0)
2013     
2014     data = {
2015         'metrics': {
2016             'total_workflows': total_workflows,
2017             'active_workflows': active_workflows,
2018             'completed_workflows': completed_workflows,
2019             'failed_workflows': failed_workflows,
2020         },
2021         'pipeline': pipeline_counts,
2022         'agents': agents_data,
2023         'recent_messages': recent_messages,
2024         'charts': {
2025             'throughput': {
2026                 'labels': throughput_labels,
2027                 'data': throughput_data,
2028             },
2029             'processing_times': processing_times,
2030         }
2031     }
2032     
2033     return JsonResponse(data)
2034 
2035 
2036 @login_required
2037 def persistent_state_view(request):
2038     """View current persistent state data."""
2039     import json
2040     from .utils import format_datetime
2041     
2042     state_data = PersistentState.get_state()
2043     
2044     # Get the actual database record for metadata
2045     try:
2046         state_obj = PersistentState.objects.get(id=1)
2047         updated_at = format_datetime(state_obj.updated_at)
2048     except PersistentState.DoesNotExist:
2049         updated_at = None
2050     
2051     # Format any timestamp values in the state data for display
2052     from monitor_app.utils import format_timestamp_fields
2053     formatted_state_data = format_timestamp_fields(state_data)
2054     
2055     # Use the same formatted data for JSON display
2056     formatted_json_data = formatted_state_data
2057     
2058     context = {
2059         'state_data': formatted_state_data,
2060         'updated_at': updated_at,
2061         'state_json': json.dumps(formatted_json_data, indent=2),  # Use formatted data for JSON view too
2062     }
2063     
2064     return render(request, 'monitor_app/persistent_state.html', context)
2065 
2066 
2067 @api_view(['POST'])
2068 @authentication_classes([TokenAuthentication])
2069 @permission_classes([IsAuthenticated])
2070 def get_next_run_number(request):
2071     """API endpoint to get the next run number atomically."""
2072     try:
2073         run_number = PersistentState.get_next_run_number()
2074         return Response({
2075             'run_number': run_number,
2076             'status': 'success'
2077         })
2078     except Exception as e:
2079         return Response({
2080             'error': str(e),
2081             'status': 'error'
2082         }, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
2083 
2084 
2085 @api_view(['POST'])
2086 @authentication_classes([TokenAuthentication])
2087 @permission_classes([IsAuthenticated])
2088 def get_next_agent_id(request):
2089     """API endpoint to get the next agent ID atomically."""
2090     try:
2091         agent_id = PersistentState.get_next_agent_id()
2092         return Response({
2093             'agent_id': agent_id,
2094             'status': 'success'
2095         })
2096     except Exception as e:
2097         return Response({
2098             'error': str(e),
2099             'status': 'error'
2100         }, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
2101 
2102 
2103 @api_view(['POST'])
2104 @authentication_classes([SessionAuthentication, TokenAuthentication])
2105 @permission_classes([IsAuthenticated])
2106 def get_next_workflow_execution_id(request):
2107     """API endpoint to get the next workflow execution sequence number atomically."""
2108     try:
2109         sequence = PersistentState.get_next_workflow_execution_id()
2110         return Response({
2111             'sequence': sequence,
2112             'status': 'success'
2113         })
2114     except Exception as e:
2115         return Response({
2116             'error': str(e),
2117             'status': 'error'
2118         }, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
2119 
2120 
2121 @api_view(['POST'])
2122 @authentication_classes([SessionAuthentication, TokenAuthentication])
2123 @permission_classes([IsAuthenticated])
2124 def ensure_namespace(request):
2125     """
2126     API endpoint to ensure a namespace exists, creating it if not.
2127 
2128     Request body:
2129         name: namespace name (required)
2130         owner: owner username (required)
2131         description: optional description (defaults to empty string)
2132 
2133     Returns the namespace record (created or existing).
2134     """
2135     from .workflow_models import Namespace
2136 
2137     name = request.data.get('name')
2138     owner = request.data.get('owner')
2139     description = request.data.get('description', '')
2140 
2141     if not name:
2142         return Response({
2143             'error': 'name is required',
2144             'status': 'error'
2145         }, status=status.HTTP_400_BAD_REQUEST)
2146 
2147     if not owner:
2148         return Response({
2149             'error': 'owner is required',
2150             'status': 'error'
2151         }, status=status.HTTP_400_BAD_REQUEST)
2152 
2153     try:
2154         namespace, created = Namespace.objects.get_or_create(
2155             name=name,
2156             defaults={'owner': owner, 'description': description}
2157         )
2158         return Response({
2159             'name': namespace.name,
2160             'owner': namespace.owner,
2161             'description': namespace.description,
2162             'created': created,
2163             'status': 'success'
2164         })
2165     except Exception as e:
2166         return Response({
2167             'error': str(e),
2168             'status': 'error'
2169         }, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
2170 
2171 
2172 # ==================== AI MEMORY REST ENDPOINTS ====================
2173 
2174 
2175 @api_view(['POST'])
2176 @authentication_classes([])
2177 @permission_classes([AllowAny])
2178 def ai_memory_record(request):
2179     """Record a dialogue exchange for AI memory persistence.
2180 
2181     Called by Claude Code hooks (not MCP clients) to store dialogue.
2182     No auth required — local hook scripts only.
2183     """
2184     from .models import AIMemory
2185 
2186     username = request.data.get('username')
2187     session_id = request.data.get('session_id')
2188     role = request.data.get('role')
2189     content = request.data.get('content')
2190 
2191     if not all([username, session_id, role, content]):
2192         return Response(
2193             {'error': 'username, session_id, role, and content are required'},
2194             status=status.HTTP_400_BAD_REQUEST,
2195         )
2196     if role not in ('user', 'assistant'):
2197         return Response(
2198             {'error': f"Invalid role '{role}'. Must be 'user' or 'assistant'."},
2199             status=status.HTTP_400_BAD_REQUEST,
2200         )
2201 
2202     try:
2203         record = AIMemory.objects.create(
2204             username=username,
2205             session_id=session_id,
2206             role=role,
2207             content=content,
2208             namespace=request.data.get('namespace'),
2209             project_path=request.data.get('project_path'),
2210         )
2211         return Response({
2212             'id': record.id,
2213             'status': 'success',
2214         })
2215     except Exception as e:
2216         return Response(
2217             {'error': str(e), 'status': 'error'},
2218             status=status.HTTP_500_INTERNAL_SERVER_ERROR,
2219         )
2220 
2221 
2222 @api_view(['GET'])
2223 @authentication_classes([])
2224 @permission_classes([AllowAny])
2225 def ai_memory_load(request):
2226     """Load recent dialogue history for session context.
2227 
2228     Called by Claude Code hooks at session start.
2229     No auth required — local hook scripts only.
2230     """
2231     from .models import AIMemory
2232 
2233     username = request.query_params.get('username')
2234     if not username:
2235         return Response(
2236             {'error': 'username query parameter is required'},
2237             status=status.HTTP_400_BAD_REQUEST,
2238         )
2239 
2240     try:
2241         turns = int(request.query_params.get('turns', '20'))
2242     except ValueError:
2243         turns = 20
2244 
2245     namespace = request.query_params.get('namespace')
2246     max_messages = turns * 2
2247 
2248     qs = AIMemory.objects.filter(username=username)
2249     if namespace:
2250         qs = qs.filter(namespace=namespace)
2251 
2252     recent = qs.order_by('-created_at')[:max_messages]
2253     messages = list(reversed([
2254         {
2255             'role': m.role,
2256             'content': m.content,
2257             'created_at': m.created_at.isoformat() if m.created_at else None,
2258             'session_id': m.session_id,
2259             'post_id': m.namespace or '',
2260             'root_id': m.project_path or '',
2261         }
2262         for m in recent
2263     ]))
2264 
2265     return Response({
2266         'items': messages,
2267         'count': len(messages),
2268     })
2269 
2270 
2271 # ==================== DATA PROVENANCE (DPID) ====================
2272 
2273 @api_view(['GET'])
2274 def dpid_verify(request):
2275     """Verify a Data Provenance ID or list recent DPIDs.
2276 
2277     GET /api/dpid/verify/?dpid=A7K2M9  — verify a specific DPID
2278     GET /api/dpid/verify/              — list recent DPIDs
2279     No auth required — read-only, no AI in the loop.
2280     """
2281     from .models import DataProvenance
2282 
2283     dpid = request.query_params.get('dpid')
2284     if dpid:
2285         try:
2286             record = DataProvenance.objects.get(dpid=dpid)
2287             return Response({
2288                 'dpid': record.dpid,
2289                 'tool_name': record.tool_name,
2290                 'tool_args': record.tool_args,
2291                 'created_at': record.created_at.isoformat(),
2292                 'verified': True,
2293             })
2294         except DataProvenance.DoesNotExist:
2295             return Response({'dpid': dpid, 'verified': False}, status=404)
2296 
2297     # List recent
2298     recent = DataProvenance.objects.order_by('-created_at')[:20]
2299     return Response({
2300         'items': [{
2301             'dpid': r.dpid,
2302             'tool_name': r.tool_name,
2303             'created_at': r.created_at.isoformat(),
2304         } for r in recent],
2305         'count': len(recent),
2306     })
2307 
2308 
2309 # ==================== USERS ====================
2310 
2311 @api_view(['GET'])
2312 @permission_classes([IsAuthenticated])
2313 def users_list(request):
2314     """List active users with identity fields, password hash, and timestamps.
2315 
2316     GET /api/users/ — requires authentication (session or token).
2317     Tunnel-only; password hash included for account sync to swf-remote.
2318     """
2319     from django.contrib.auth import get_user_model
2320     User = get_user_model()
2321     users = User.objects.filter(is_active=True).order_by('username').values(
2322         'username', 'email', 'first_name', 'last_name',
2323         'password', 'is_active', 'date_joined', 'last_login',
2324     )
2325     return Response({'users': list(users)})
2326 
2327 
2328 # ==================== SLASH COMMANDS (Mattermost) ====================
2329 
2330 @api_view(['POST'])
2331 @authentication_classes([])
2332 @permission_classes([AllowAny])
2333 def panda_slash_command(request):
2334     """Handle /panda slash commands from Mattermost.
2335 
2336     POST with token, command, text, user_name, channel_id.
2337     Returns JSON with text (Markdown) for Mattermost to display.
2338 
2339     Subcommands:
2340         status              — PanDA activity overview (last 24h)
2341         errors              — Top error patterns (last 7 days)
2342         jobs                — Recent jobs (pages on repeat)
2343         job <pandaid>       — Deep study of a single job
2344         tasks               — Recent tasks (pages on repeat)
2345         task <taskid>       — Single task details
2346         sites               — List EIC compute queues
2347         site <name>         — Queue configuration
2348         help                — This help text
2349     """
2350     from decouple import config as decouple_config
2351     from .panda import queries
2352 
2353     # Validate Mattermost token
2354     expected_token = decouple_config('MATTERMOST_SLASH_TOKEN', default='')
2355     received_token = request.data.get('token', '')
2356     if not expected_token or received_token != expected_token:
2357         return JsonResponse({'text': 'Unauthorized.'}, status=401)
2358 
2359     text = (request.data.get('text') or '').strip()
2360     user = request.data.get('user_name', 'unknown')
2361     parts = text.split(None, 1)
2362     subcmd = parts[0].lower() if parts else 'help'
2363     arg = parts[1].strip() if len(parts) > 1 else ''
2364 
2365     # Cursor state for pagination (per user per command)
2366     cursor_key = f'slash_cursor_{user}_{subcmd}'
2367 
2368     try:
2369         if subcmd == 'help' or not text:
2370             return JsonResponse({'text': _slash_help()})
2371 
2372         if subcmd == 'status':
2373             data = queries.get_activity(days=1)
2374             jobs = data.get('jobs', {})
2375             tasks = data.get('tasks', {})
2376             lines = ['#### PanDA Activity (last 24h)']
2377             lines.append(f"**Jobs:** {jobs.get('total', 0)}")
2378             for s, c in sorted(jobs.get('by_status', {}).items()):
2379                 lines.append(f"  {s}: {c}")
2380             lines.append(f"**Tasks:** {tasks.get('total', 0)}")
2381             for s, c in sorted(tasks.get('by_status', {}).items()):
2382                 lines.append(f"  {s}: {c}")
2383             if jobs.get('by_site'):
2384                 lines.append('**By site:**')
2385                 for entry in jobs['by_site']:
2386                     lines.append(f"  {entry['site']}: {entry['total']}")
2387             return JsonResponse({'text': '\n'.join(lines)})
2388 
2389         if subcmd == 'errors':
2390             days = int(arg) if arg.isdigit() else 7
2391             data = queries.error_summary(days=days, limit=10)
2392             lines = [f"#### Top Errors (last {days} days)"]
2393             lines.append(f"Total error occurrences: {data.get('total_errors', 0)}")
2394             for e in data.get('errors', []):
2395                 diag = (e.get('error_diag') or '')[:80]
2396                 lines.append(
2397                     f"- **{e['error_source']}:{e['error_code']}** × {e['count']} "
2398                     f"({e.get('task_count', '?')} tasks) — {diag}"
2399                 )
2400             return JsonResponse({'text': '\n'.join(lines)})
2401 
2402         if subcmd == 'jobs':
2403             status_filter = arg if arg and not arg.isdigit() else None
2404             cursor_key = f'slash_cursor_{user}_jobs_{status_filter or "all"}'
2405             cursor = _get_cursor(cursor_key)
2406             limit = 20
2407             data = queries.list_jobs(days=7, limit=limit, before_id=cursor, status=status_filter)
2408             pag = data.get('pagination', {})
2409             _set_cursor(cursor_key, pag.get('next_before_id'))
2410             label = f" ({status_filter})" if status_filter else ""
2411             lines = [f"#### Recent Jobs{label} (page {_cursor_page(cursor_key)})"]
2412             summary = data.get('summary', {})
2413             if summary:
2414                 lines.append(' | '.join(f"{s}: {c}" for s, c in sorted(summary.items())))
2415             for j in data.get('jobs', []):
2416                 ts = str(j.get('modificationtime', ''))[:16]
2417                 lines.append(
2418                     f"| {j.get('pandaid')} | {j.get('jobstatus', '')} "
2419                     f"| {j.get('computingsite', '')[:30]} | {ts} |"
2420                 )
2421             if pag.get('has_more'):
2422                 lines.append('_Type `/panda jobs` again for next page_')
2423             else:
2424                 lines.append('_End of results_')
2425                 _clear_cursor(cursor_key)
2426             return JsonResponse({'text': '\n'.join(lines)})
2427 
2428         if subcmd == 'job':
2429             if not arg or not arg.isdigit():
2430                 return JsonResponse({'text': 'Usage: `/panda job <pandaid>`'})
2431             data = queries.study_job(pandaid=int(arg))
2432             if 'error' in data:
2433                 return JsonResponse({'text': f"Error: {data['error']}"})
2434             job = data.get('job', {})
2435             task = data.get('task', {})
2436             lines = [f"#### Job {arg}"]
2437             lines.append(f"**Status:** {job.get('jobstatus', '?')}")
2438             lines.append(f"**Site:** {job.get('computingsite', '?')}")
2439             lines.append(f"**Owner:** {job.get('produsername', '?')}")
2440             lines.append(f"**Task:** {job.get('jeditaskid', '?')} — {task.get('taskname', '')}")
2441             lines.append(f"**Container:** {job.get('container_name', '?')}")
2442             lines.append(f"**Transformation:** {job.get('transformation', '?')}")
2443             lines.append(f"**Cores:** {job.get('actualcorecount') or job.get('corecount', '?')}")
2444             cpu = job.get('cpuconsumptiontime')
2445             if cpu:
2446                 lines.append(f"**CPU time:** {cpu}s")
2447             lines.append(f"**Created:** {str(job.get('creationtime', ''))[:16]}")
2448             lines.append(f"**Started:** {str(job.get('starttime', ''))[:16]}")
2449             lines.append(f"**Modified:** {str(job.get('modificationtime', ''))[:16]}")
2450             if job.get('errors'):
2451                 lines.append('**Errors:**')
2452                 for e in job['errors']:
2453                     lines.append(f"  - {e.get('component', '?')}:{e.get('code', '?')} — {e.get('diag', '')[:120]}")
2454             files = data.get('files', [])
2455             if files:
2456                 out = [f for f in files if f.get('type') == 'output']
2457                 log = [f for f in files if f.get('type') == 'log']
2458                 lines.append(f"**Files:** {len(files)} total ({len(out)} output, {len(log)} log)")
2459             la = data.get('log_analysis', {})
2460             if la.get('log_available'):
2461                 src = la.get('log_source', 'filebrowser')
2462                 lines.append(f"**Log analysis** (via {src}):")
2463                 lines.append(f"  - Failure type: **{la.get('failure_type', '?')}**")
2464                 excerpt = la.get('log_excerpt', '')
2465                 if excerpt:
2466                     # Show last few meaningful lines, truncated for Mattermost
2467                     excerpt_lines = [
2468                         l for l in excerpt.strip().splitlines() if l.strip()
2469                     ][-8:]
2470                     lines.append('```')
2471                     lines.extend(excerpt_lines)
2472                     lines.append('```')
2473             if data.get('monitor_url'):
2474                 lines.append(f"[PanDA Monitor]({data['monitor_url']})")
2475             return JsonResponse({'text': '\n'.join(lines)})
2476 
2477         if subcmd == 'tasks':
2478             status_filter = arg if arg and not arg.isdigit() else None
2479             cursor_key = f'slash_cursor_{user}_tasks_{status_filter or "all"}'
2480             cursor = _get_cursor(cursor_key)
2481             limit = 15
2482             data = queries.list_tasks(days=7, limit=limit, before_id=cursor, status=status_filter)
2483             pag = data.get('pagination', {})
2484             _set_cursor(cursor_key, pag.get('next_before_id'))
2485             label = f" ({status_filter})" if status_filter else ""
2486             lines = [f"#### Recent Tasks{label} (page {_cursor_page(cursor_key)})"]
2487             summary = data.get('summary', {})
2488             if summary:
2489                 lines.append(' | '.join(f"{s}: {c}" for s, c in sorted(summary.items())))
2490             for t in data.get('tasks', []):
2491                 ts = str(t.get('modificationtime', ''))[:16]
2492                 name = (t.get('taskname') or '')[:40]
2493                 lines.append(
2494                     f"| {t.get('jeditaskid')} | {t.get('status', '')} "
2495                     f"| {name} | {ts} |"
2496                 )
2497             if pag.get('has_more'):
2498                 lines.append('_Type `/panda tasks` again for next page_')
2499             else:
2500                 lines.append('_End of results_')
2501                 _clear_cursor(cursor_key)
2502             return JsonResponse({'text': '\n'.join(lines)})
2503 
2504         if subcmd == 'task':
2505             if not arg or not arg.isdigit():
2506                 return JsonResponse({'text': 'Usage: `/panda task <taskid>`'})
2507             data = queries.get_task(jeditaskid=int(arg))
2508             if 'error' in data:
2509                 return JsonResponse({'text': f"Error: {data['error']}"})
2510             lines = [f"#### Task {arg}"]
2511             lines.append(f"**Name:** {data.get('taskname', '?')}")
2512             lines.append(f"**Status:** {data.get('status', '?')}")
2513             lines.append(f"**Owner:** {data.get('username', '?')}")
2514             lines.append(f"**Working group:** {data.get('workinggroup', '?')}")
2515             lines.append(f"**Created:** {str(data.get('creationdate', ''))[:16]}")
2516             lines.append(f"**Modified:** {str(data.get('modificationtime', ''))[:16]}")
2517             if data.get('errordialog'):
2518                 lines.append(f"**Error:** {data['errordialog'][:200]}")
2519             return JsonResponse({'text': '\n'.join(lines)})
2520 
2521         if subcmd == 'sites':
2522             activity = queries.get_activity(days=1)
2523             by_site = {e['site']: e for e in activity.get('jobs', {}).get('by_site', [])}
2524             queues = queries.list_queues(vo='eic')
2525             lines = ['#### EIC Compute Sites']
2526             lines.append('| Site | Status | Running | Finished | Failed | Total |')
2527             lines.append('|------|--------|---------|----------|--------|-------|')
2528             for q in queues.get('queues', []):
2529                 name = q.get('panda_queue', '')
2530                 st = q.get('status', '')
2531                 s = by_site.get(name, {})
2532                 running = s.get('running', 0)
2533                 finished = s.get('finished', 0)
2534                 failed = s.get('failed', 0)
2535                 total = s.get('total', 0)
2536                 lines.append(f"| {name} | {st} | {running} | {finished} | {failed} | {total} |")
2537             return JsonResponse({'text': '\n'.join(lines)})
2538 
2539         if subcmd == 'site':
2540             if not arg:
2541                 return JsonResponse({'text': 'Usage: `/panda site <queue_name>`'})
2542             data = queries.get_queue(panda_queue=arg)
2543             if 'error' in data:
2544                 return JsonResponse({'text': f"Error: {data['error']}"})
2545             queue = data.get('queue', data)
2546             lines = [f"#### Site: {arg}"]
2547             for key in ('status', 'state', 'corecount', 'maxrss', 'maxtime',
2548                         'container_options'):
2549                 val = queue.get(key)
2550                 if val is not None:
2551                     lines.append(f"**{key}:** {val}")
2552             # Job breakdown
2553             activity = queries.get_activity(days=1, site=arg)
2554             jobs = activity.get('jobs', {})
2555             by_status = jobs.get('by_status', {})
2556             if by_status:
2557                 lines.append(f"**Jobs (24h):** {jobs.get('total', 0)}")
2558                 lines.append(' | '.join(f"{s}: {c}" for s, c in sorted(by_status.items())))
2559             else:
2560                 lines.append('**Jobs (24h):** 0')
2561             return JsonResponse({'text': '\n'.join(lines)})
2562 
2563         return JsonResponse({'text': f"Unknown subcommand `{subcmd}`. Try `/panda help`"})
2564 
2565     except Exception as e:
2566         import traceback
2567         logger.exception(f"Slash command error: {subcmd} {arg}")
2568         return JsonResponse({'text': f"Error: {e}"})
2569 
2570 
2571 # Slash command cursor state (in-memory, per-process — fine for single bot)
2572 _slash_cursors = {}
2573 _slash_pages = {}
2574 
2575 
2576 def _get_cursor(key):
2577     return _slash_cursors.get(key)
2578 
2579 
2580 def _set_cursor(key, value):
2581     if value:
2582         _slash_cursors[key] = value
2583         _slash_pages[key] = _slash_pages.get(key, 0) + 1
2584     else:
2585         _slash_cursors.pop(key, None)
2586 
2587 
2588 def _clear_cursor(key):
2589     _slash_cursors.pop(key, None)
2590     _slash_pages.pop(key, None)
2591 
2592 
2593 def _cursor_page(key):
2594     return _slash_pages.get(key, 1)
2595 
2596 
2597 def _slash_help():
2598     return """#### /panda — PanDA Slash Commands
2599 | Command | Description |
2600 |---------|-------------|
2601 | `/panda status` | Activity overview (last 24h) |
2602 | `/panda errors [days]` | Top error patterns (default 7 days) |
2603 | `/panda jobs [status]` | Recent jobs — repeat to page |
2604 | `/panda job <id>` | Deep study of a single job |
2605 | `/panda tasks [status]` | Recent tasks — repeat to page |
2606 | `/panda task <id>` | Single task details |
2607 | `/panda sites` | EIC compute queues |
2608 | `/panda site <name>` | Queue configuration |
2609 | `/panda help` | This help |"""
2610 
2611 
2612 # ==================== PANDA QUEUES AND RUCIO ENDPOINTS VIEWS ====================
2613 
2614 @login_required
2615 def panda_queues_list(request):
2616     """
2617     Professional PanDA queues list view using server-side DataTables.
2618     Displays computing queue configurations with key fields and JSON links.
2619     """
2620     from django.urls import reverse
2621     
2622     # Column definitions for DataTables
2623     columns = [
2624         {'name': 'queue_name', 'title': 'Name', 'orderable': True},
2625         {'name': 'site', 'title': 'Site', 'orderable': True},
2626         {'name': 'status', 'title': 'Status', 'orderable': True},
2627         {'name': 'queue_type', 'title': 'Type', 'orderable': True},
2628         {'name': 'updated_at', 'title': 'Updated', 'orderable': True},
2629         {'name': 'json', 'title': 'JSON', 'orderable': False},
2630     ]
2631     
2632     context = {
2633         'table_title': 'PanDA Queues',
2634         'table_description': 'Computing queue configurations for the PanDA workload management system.',
2635         'ajax_url': reverse('monitor_app:panda_queues_datatable_ajax'),
2636         'columns': columns,
2637     }
2638     return render(request, 'monitor_app/panda_queues_list.html', context)
2639 
2640 
2641 def panda_queues_datatable_ajax(request):
2642     """
2643     AJAX endpoint for server-side DataTables processing of PanDA queues.
2644     Handles pagination, searching, and ordering.
2645     """
2646     from .utils import DataTablesProcessor, format_datetime
2647     
2648     # Initialize DataTables processor
2649     columns = ['queue_name', 'site', 'status', 'queue_type', 'updated_at', 'json']
2650     dt = DataTablesProcessor(request, columns, default_order_column=0, default_order_direction='asc')
2651     
2652     # Build base queryset
2653     queryset = PandaQueue.objects.all()
2654     
2655     # Get counts and apply search/pagination
2656     records_total = PandaQueue.objects.count()
2657     search_fields = ['queue_name', 'site', 'status', 'queue_type']
2658     queryset = dt.apply_search(queryset, search_fields)
2659     records_filtered = queryset.count()
2660     
2661     queryset = queryset.order_by(dt.get_order_by())
2662     queues = dt.apply_pagination(queryset)
2663     
2664     # Format data for DataTables
2665     data = []
2666     for queue in queues:
2667         queue_name_link = f'<a href="{reverse("monitor_app:panda_queue_detail", args=[queue.queue_name])}">{queue.queue_name}</a>'
2668         
2669         # Extract key fields from config_data if not set
2670         if not queue.site and queue.config_data:
2671             queue.site = queue.config_data.get('site', '')
2672         if not queue.queue_type and queue.config_data:
2673             queue.queue_type = queue.config_data.get('type', '')
2674         
2675         updated_str = format_datetime(queue.updated_at)
2676         json_link = f'<a href="{reverse("monitor_app:panda_queue_json", args=[queue.queue_name])}">View JSON</a>'
2677         
2678         data.append([
2679             queue_name_link, queue.site or '', queue.status,
2680             queue.queue_type or '', updated_str, json_link
2681         ])
2682     
2683     return dt.create_response(data, records_total, records_filtered)
2684 
2685 
2686 @login_required
2687 def panda_queue_detail(request, queue_name):
2688     """Display detailed view of a specific PanDA queue configuration."""
2689     queue = get_object_or_404(PandaQueue, queue_name=queue_name)
2690     
2691     # Extract some key fields for summary display
2692     summary_fields = {}
2693     if queue.config_data:
2694         # Extract commonly useful fields
2695         for field in ['resource_type', 'cloud', 'country', 'site']:
2696             if field in queue.config_data:
2697                 summary_fields[field] = queue.config_data[field]
2698     
2699     context = {
2700         'queue': queue,
2701         'summary_fields': summary_fields,
2702     }
2703     return render(request, 'monitor_app/panda_queue_detail.html', context)
2704 
2705 
2706 @login_required
2707 def panda_queue_json(request, queue_name):
2708     """Display JSON view of a PanDA queue configuration using renderjson."""
2709     queue = get_object_or_404(PandaQueue, queue_name=queue_name)
2710     
2711     import json
2712     context = {
2713         'queue': queue,
2714         'config_json': json.dumps(queue.config_data, indent=2),
2715         'title': f'PanDA Queue: {queue.queue_name}',
2716     }
2717     return render(request, 'monitor_app/json_viewer.html', context)
2718 
2719 
2720 @login_required
2721 def rucio_endpoints_list(request):
2722     """
2723     Professional Rucio endpoints list view using server-side DataTables.
2724     Displays DDM endpoint configurations with key fields and JSON links.
2725     """
2726     from django.urls import reverse
2727     
2728     # Column definitions for DataTables
2729     columns = [
2730         {'name': 'endpoint_name', 'title': 'Endpoint Name', 'orderable': True},
2731         {'name': 'site', 'title': 'Site', 'orderable': True},
2732         {'name': 'endpoint_type', 'title': 'Type', 'orderable': True},
2733         {'name': 'is_tape', 'title': 'Tape', 'orderable': True},
2734         {'name': 'is_active', 'title': 'Active', 'orderable': True},
2735         {'name': 'updated_at', 'title': 'Updated', 'orderable': True},
2736         {'name': 'json', 'title': 'JSON', 'orderable': False},
2737     ]
2738     
2739     context = {
2740         'table_title': 'Rucio Endpoints',
2741         'table_description': 'Distributed data management endpoints for the Rucio system.',
2742         'ajax_url': reverse('monitor_app:rucio_endpoints_datatable_ajax'),
2743         'columns': columns,
2744     }
2745     return render(request, 'monitor_app/rucio_endpoints_list.html', context)
2746 
2747 
2748 def rucio_endpoints_datatable_ajax(request):
2749     """
2750     AJAX endpoint for server-side DataTables processing of Rucio endpoints.
2751     Handles pagination, searching, and ordering.
2752     """
2753     from .utils import DataTablesProcessor, format_datetime
2754     
2755     # Initialize DataTables processor
2756     columns = ['endpoint_name', 'site', 'endpoint_type', 'is_tape', 'is_active', 'updated_at', 'json']
2757     dt = DataTablesProcessor(request, columns, default_order_column=0, default_order_direction='asc')
2758     
2759     # Build base queryset
2760     queryset = RucioEndpoint.objects.all()
2761     
2762     # Get counts and apply search/pagination
2763     records_total = RucioEndpoint.objects.count()
2764     search_fields = ['endpoint_name', 'site', 'endpoint_type']
2765     queryset = dt.apply_search(queryset, search_fields)
2766     records_filtered = queryset.count()
2767     
2768     queryset = queryset.order_by(dt.get_order_by())
2769     endpoints = dt.apply_pagination(queryset)
2770     
2771     # Format data for DataTables
2772     data = []
2773     for endpoint in endpoints:
2774         endpoint_name_link = f'<a href="{reverse("monitor_app:rucio_endpoint_detail", args=[endpoint.endpoint_name])}">{endpoint.endpoint_name}</a>'
2775         
2776         # Extract key fields from config_data if not set
2777         if not endpoint.site and endpoint.config_data:
2778             endpoint.site = endpoint.config_data.get('rcsite', endpoint.config_data.get('site', ''))
2779         if not endpoint.endpoint_type and endpoint.config_data:
2780             endpoint.endpoint_type = 'tape' if endpoint.config_data.get('is_tape') else 'disk'
2781         
2782         # Format boolean fields
2783         is_tape_badge = '<span class="badge bg-warning">Tape</span>' if endpoint.is_tape else '<span class="badge bg-info">Disk</span>'
2784         is_active_badge = '<span class="badge bg-success">Active</span>' if endpoint.is_active else '<span class="badge bg-secondary">Inactive</span>'
2785         
2786         updated_str = format_datetime(endpoint.updated_at)
2787         json_link = f'<a href="{reverse("monitor_app:rucio_endpoint_json", args=[endpoint.endpoint_name])}">View JSON</a>'
2788         
2789         data.append([
2790             endpoint_name_link, endpoint.site or '', endpoint.endpoint_type or '',
2791             is_tape_badge, is_active_badge, updated_str, json_link
2792         ])
2793     
2794     return dt.create_response(data, records_total, records_filtered)
2795 
2796 
2797 @login_required
2798 def rucio_endpoint_detail(request, endpoint_name):
2799     """Display detailed view of a specific Rucio endpoint configuration."""
2800     endpoint = get_object_or_404(RucioEndpoint, endpoint_name=endpoint_name)
2801     
2802     # Extract some key fields for summary display
2803     summary_fields = {}
2804     if endpoint.config_data:
2805         # Extract commonly useful fields
2806         for field in ['cloud', 'rc_site']:
2807             if field in endpoint.config_data:
2808                 summary_fields[field] = endpoint.config_data[field]
2809         
2810         # Extract resource info if available
2811         if 'resource' in endpoint.config_data and isinstance(endpoint.config_data['resource'], dict):
2812             resource = endpoint.config_data['resource']
2813             summary_fields['resource_endpoint'] = resource.get('endpoint', '')
2814     
2815     context = {
2816         'endpoint': endpoint,
2817         'summary_fields': summary_fields,
2818     }
2819     return render(request, 'monitor_app/rucio_endpoint_detail.html', context)
2820 
2821 
2822 @login_required
2823 def rucio_endpoint_json(request, endpoint_name):
2824     """Display JSON view of a Rucio endpoint configuration using renderjson."""
2825     endpoint = get_object_or_404(RucioEndpoint, endpoint_name=endpoint_name)
2826     
2827     import json
2828     context = {
2829         'endpoint': endpoint,
2830         'config_json': json.dumps(endpoint.config_data, indent=2),
2831         'title': f'Rucio Endpoint: {endpoint.endpoint_name}',
2832     }
2833     return render(request, 'monitor_app/json_viewer.html', context)
2834 
2835 
2836 @login_required
2837 def panda_queues_all_json(request):
2838     """Display JSON view of all PanDA queue configurations."""
2839     queues_data = {}
2840     for queue in PandaQueue.objects.all().order_by('queue_name'):
2841         queues_data[queue.queue_name] = queue.config_data
2842     
2843     import json
2844     context = {
2845         'config_json': json.dumps(queues_data, indent=2),
2846         'title': 'All PanDA Queues Configuration',
2847     }
2848     return render(request, 'monitor_app/json_viewer.html', context)
2849 
2850 
2851 @login_required
2852 def rucio_endpoints_all_json(request):
2853     """Display JSON view of all Rucio endpoint configurations."""
2854     endpoints_data = {}
2855     for endpoint in RucioEndpoint.objects.all().order_by('endpoint_name'):
2856         endpoints_data[endpoint.endpoint_name] = endpoint.config_data
2857     
2858     import json
2859     context = {
2860         'config_json': json.dumps(endpoints_data, indent=2),
2861         'title': 'All Rucio Endpoints Configuration',
2862     }
2863     return render(request, 'monitor_app/json_viewer.html', context)
2864 
2865 
2866 @login_required
2867 @user_passes_test(lambda u: u.is_superuser)
2868 def update_panda_queues_from_github(request):
2869     """Update PanDA queues from GitHub main branch. Requires superuser."""
2870     import json
2871     import urllib.request
2872     from django.contrib import messages
2873     from django.shortcuts import redirect
2874     from datetime import datetime
2875     import email.utils
2876     
2877     github_url = "https://raw.githubusercontent.com/BNLNPPS/swf-testbed/main/config/panda_queues.json"
2878     repo_location = "BNLNPPS/swf-testbed"
2879     file_path = "config/panda_queues.json"
2880     github_file_url = "https://github.com/BNLNPPS/swf-testbed/blob/main/config/panda_queues.json"
2881     
2882     try:
2883         # Fetch JSON from GitHub
2884         with urllib.request.urlopen(github_url) as response:
2885             data = json.loads(response.read().decode())
2886         
2887         # Clear existing data and reload
2888         PandaQueue.objects.all().delete()
2889         
2890         created_count = 0
2891         for queue_name, config in data.items():
2892             # Extract key fields from config
2893             site = config.get('site', '')
2894             queue_type = config.get('type', '')
2895             
2896             # Determine status based on config
2897             status = 'active'  # Default to active
2898             if config.get('status') == 'offline':
2899                 status = 'offline'
2900             
2901             # Create queue
2902             PandaQueue.objects.create(
2903                 queue_name=queue_name,
2904                 site=site,
2905                 queue_type=queue_type,
2906                 status=status,
2907                 config_data=config,
2908             )
2909             created_count += 1
2910         
2911         messages.success(request, 
2912             f'Successfully updated {created_count} PanDA queues from GitHub<br>'
2913             f'<strong>Repository:</strong> {repo_location}<br>'
2914             f'<strong>File:</strong> {file_path}<br>'
2915             f'<strong>View on GitHub:</strong> <a href="{github_file_url}" target="_blank">Click here to see what was loaded</a>',
2916             extra_tags='safe'
2917         )
2918         
2919     except Exception as e:
2920         messages.error(request, f'Failed to update from GitHub: {str(e)}')
2921     
2922     return redirect('monitor_app:panda_queues_list')
2923 
2924 
2925 @login_required
2926 @user_passes_test(lambda u: u.is_superuser)
2927 def update_rucio_endpoints_from_github(request):
2928     """Update Rucio endpoints from GitHub main branch. Requires superuser."""
2929     import json
2930     import urllib.request
2931     from django.contrib import messages
2932     from django.shortcuts import redirect
2933     from datetime import datetime
2934     import email.utils
2935     
2936     github_url = "https://raw.githubusercontent.com/BNLNPPS/swf-testbed/main/config/ddm_endpoints.json"
2937     repo_location = "BNLNPPS/swf-testbed"
2938     file_path = "config/ddm_endpoints.json"
2939     github_file_url = "https://github.com/BNLNPPS/swf-testbed/blob/main/config/ddm_endpoints.json"
2940     
2941     try:
2942         # Fetch JSON from GitHub
2943         with urllib.request.urlopen(github_url) as response:
2944             data = json.loads(response.read().decode())
2945         
2946         # Clear existing data and reload
2947         RucioEndpoint.objects.all().delete()
2948         
2949         created_count = 0
2950         for endpoint_name, config in data.items():
2951             # Extract key fields from config
2952             site = config.get('rcsite', config.get('site', ''))
2953             is_tape = config.get('is_tape', False)
2954             
2955             # Determine endpoint type
2956             if is_tape:
2957                 endpoint_type = 'tape'
2958             elif config.get('is_cache'):
2959                 endpoint_type = 'cache'
2960             else:
2961                 endpoint_type = 'disk'
2962             
2963             # Check if active based on rc_site_state
2964             is_active = config.get('rc_site_state') == 'ACTIVE'
2965             
2966             # Create endpoint
2967             RucioEndpoint.objects.create(
2968                 endpoint_name=endpoint_name,
2969                 site=site,
2970                 endpoint_type=endpoint_type,
2971                 is_tape=is_tape,
2972                 is_active=is_active,
2973                 config_data=config,
2974             )
2975             created_count += 1
2976         
2977         messages.success(request, 
2978             f'Successfully updated {created_count} Rucio endpoints from GitHub<br>'
2979             f'<strong>Repository:</strong> {repo_location}<br>'
2980             f'<strong>File:</strong> {file_path}<br>'
2981             f'<strong>View on GitHub:</strong> <a href="{github_file_url}" target="_blank">Click here to see what was loaded</a>',
2982             extra_tags='safe'
2983         )
2984         
2985     except Exception as e:
2986         messages.error(request, f'Failed to update from GitHub: {str(e)}')
2987     
2988     return redirect('monitor_app:rucio_endpoints_list')
2989 
2990 
2991 @login_required
2992 def panda_hub(request):
2993     """PanDA & Rucio hub page — testbed-oriented, full BNL access."""
2994     return render(request, 'monitor_app/panda_hub.html')
2995 
2996 
2997 def prod_hub(request):
2998     """ePIC Production home — production monitor + PCS sections."""
2999     from pcs.views import pcs_hub_counts
3000     return render(request, 'monitor_app/prod_hub.html', pcs_hub_counts())
3001 
3002 
3003 def testbed_hub(request):
3004     """ePIC Testbed home — workflow system overview."""
3005     return render(request, 'monitor_app/testbed_hub.html')