File indexing completed on 2026-04-25 08:29:10
0001 import logging
0002 import uuid
0003 from datetime import timedelta
0004 from django.db import models
0005 from django.utils import timezone
0006
0007
0008 class SystemAgent(models.Model):
0009 STATUS_CHOICES = [
0010 ('UNKNOWN', 'Unknown'),
0011 ('OK', 'OK'),
0012 ('WARNING', 'Warning'),
0013 ('ERROR', 'Error'),
0014 ('EXITED', 'Exited'),
0015 ]
0016
0017 OPERATIONAL_STATE_CHOICES = [
0018 ('STARTING', 'Starting'),
0019 ('READY', 'Ready'),
0020 ('PROCESSING', 'Processing'),
0021 ('EXITED', 'Exited'),
0022 ]
0023
0024 AGENT_TYPE_CHOICES = [
0025 ('daqsim', 'DAQ Simulator'),
0026 ('data', 'Data Agent'),
0027 ('processing', 'Processing Agent'),
0028 ('fastmon', 'Fast Monitoring Agent'),
0029 ('workflow_runner', 'Workflow Runner'),
0030 ('monitor', 'Monitor System'),
0031 ('sse_sender', 'SSE Test Sender'),
0032 ('sse_receiver', 'SSE Client/Receiver'),
0033 ('test', 'Test Agent'),
0034 ('other', 'Other'),
0035 ]
0036
0037 instance_name = models.CharField(max_length=100, unique=True)
0038 agent_type = models.CharField(max_length=20, choices=AGENT_TYPE_CHOICES)
0039 description = models.TextField(blank=True)
0040 status = models.CharField(
0041 max_length=10,
0042 choices=STATUS_CHOICES,
0043 default='UNKNOWN',
0044 )
0045 last_heartbeat = models.DateTimeField(null=True, blank=True)
0046 agent_url = models.URLField(max_length=200, blank=True, null=True)
0047
0048
0049 workflow_enabled = models.BooleanField(default=True)
0050 current_stf_count = models.IntegerField(default=0)
0051 total_stf_processed = models.IntegerField(default=0)
0052 last_stf_processed = models.DateTimeField(null=True, blank=True)
0053
0054
0055 pid = models.IntegerField(null=True, blank=True,
0056 help_text="Process ID for kill operations")
0057 hostname = models.CharField(max_length=100, null=True, blank=True,
0058 help_text="Host where agent is running")
0059 operational_state = models.CharField(
0060 max_length=20,
0061 choices=OPERATIONAL_STATE_CHOICES,
0062 default='STARTING',
0063 help_text="What the agent is doing (STARTING/READY/PROCESSING/EXITED)"
0064 )
0065
0066
0067 namespace = models.CharField(max_length=100, null=True, blank=True, db_index=True,
0068 help_text="Testbed namespace for workflow delineation")
0069
0070
0071 metadata = models.JSONField(null=True, blank=True,
0072 help_text="Extensible metadata for agent configuration and state")
0073
0074 created_at = models.DateTimeField(auto_now_add=True)
0075 updated_at = models.DateTimeField(auto_now=True)
0076
0077 class Meta:
0078 db_table = 'swf_systemagent'
0079
0080 def __str__(self):
0081 return self.instance_name
0082
0083 def is_workflow_agent(self):
0084 """Check if this agent participates in STF workflow."""
0085 return self.agent_type in ['daqsim', 'data', 'processing', 'fastmon']
0086
0087 @classmethod
0088 def mark_stale_agents(cls):
0089 """Mark agents as EXITED if they haven't sent a heartbeat in 3+ minutes."""
0090 threshold = timezone.now() - timedelta(minutes=3)
0091 stale = cls.objects.filter(
0092 last_heartbeat__lt=threshold,
0093 ).exclude(
0094 status='EXITED',
0095 ).exclude(
0096 operational_state='EXITED',
0097 )
0098 count = stale.update(status='EXITED', operational_state='EXITED')
0099 if count:
0100 logging.getLogger(__name__).info(f"Marked {count} stale agent(s) as EXITED")
0101 return count
0102
0103 def update_stf_stats(self, increment_current=0, increment_total=0):
0104 """Update STF processing statistics."""
0105 self.current_stf_count += increment_current
0106 self.total_stf_processed += increment_total
0107 if increment_total > 0:
0108 self.last_stf_processed = timezone.now()
0109 self.save()
0110
0111 class AppLog(models.Model):
0112 LEVEL_CHOICES = [
0113 (logging.CRITICAL, 'CRITICAL'),
0114 (logging.ERROR, 'ERROR'),
0115 (logging.WARNING, 'WARNING'),
0116 (logging.INFO, 'INFO'),
0117 (logging.DEBUG, 'DEBUG'),
0118 (logging.NOTSET, 'NOTSET'),
0119 ]
0120 app_name = models.CharField(max_length=100, db_index=True)
0121 instance_name = models.CharField(max_length=100, db_index=True)
0122 timestamp = models.DateTimeField(db_index=True)
0123 level = models.IntegerField(choices=LEVEL_CHOICES, default=logging.NOTSET, db_index=True)
0124 levelname = models.CharField(max_length=50)
0125 message = models.TextField()
0126 module = models.CharField(max_length=255)
0127 funcname = models.CharField(max_length=255)
0128 lineno = models.IntegerField()
0129 process = models.IntegerField()
0130 thread = models.BigIntegerField()
0131 extra_data = models.JSONField(null=True, blank=True)
0132
0133 class Meta:
0134 db_table = 'swf_applog'
0135 ordering = ['-timestamp']
0136 verbose_name_plural = "App Logs"
0137 indexes = [
0138 models.Index(fields=['timestamp', 'app_name', 'instance_name']),
0139 ]
0140
0141 def __str__(self):
0142 return f'{self.timestamp} - {self.app_name}:{self.instance_name} - {self.get_level_display()} - {self.message}'
0143
0144 """
0145 Django models for the SWF Fast Monitoring Agent database.
0146
0147 This module defines the core data models for tracking Super Time Frame (STF) files, message queue subscribers,
0148 and dispatch operations in the ePIC streaming workflow testbed.
0149 """
0150
0151 class FileStatus(models.TextChoices):
0152 """
0153 Status choices for STF file processing lifecycle.
0154 Tracks the processing state of Super Time Frame files from initial registration through final message queue dispatch.
0155
0156 Registered: file added to the DB
0157 Processing: Any pre-treatment before dispatching to MQ
0158 Processed: Pre-treatment complete, ready to dispatch
0159 Done: sent to MQ
0160 Failed: Some problem in the workflow
0161 """
0162 REGISTERED = "registered", "Registered"
0163 PROCESSING = "processing", "Processing"
0164 PROCESSED = "processed", "Processed"
0165 FAILED = "failed", "Failed"
0166 DONE = "done", "Done"
0167
0168
0169 class Run(models.Model):
0170 """
0171 Represents a data-taking run in the ePIC detector system.
0172
0173 Attributes:
0174 run_id: Auto-incrementing primary key
0175 run_number: Unique identifier for the run, defined by DAQ
0176 start_time: When the run began
0177 end_time: When the run ended (null if still active)
0178 run_conditions: JSON field storing experimental conditions
0179 """
0180 run_id = models.AutoField(primary_key=True)
0181 run_number = models.IntegerField(unique=True)
0182 start_time = models.DateTimeField()
0183 end_time = models.DateTimeField(null=True, blank=True)
0184 run_conditions = models.JSONField(null=True, blank=True)
0185
0186 class Meta:
0187 db_table = 'swf_runs'
0188
0189 def __str__(self):
0190 return f"Run {self.run_number}"
0191
0192
0193 class StfFile(models.Model):
0194 """
0195 Represents a Super Time Frame (STF) file in the data acquisition system.
0196 Each file is tracked with metadata, processing status, and location
0197 information for monitoring and message queue dispatch.
0198
0199 Attributes:
0200 file_id: UUID primary key for unique file identification
0201 run: Foreign key to the associated Run
0202 machine_state: Detector state during data collection (e.g., "physics", "cosmics")
0203 file_url: URL location of the STF file, intended for remote access
0204 file_size_bytes: Size of the file in bytes
0205 checksum: File integrity checksum
0206 created_at: Timestamp when file record was created
0207 status: Current processing status (FileStatus enum)
0208 metadata: JSON field for additional file metadata
0209 """
0210 file_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
0211 run = models.ForeignKey(Run, on_delete=models.CASCADE, related_name='stf_files')
0212 machine_state = models.CharField(max_length=64, default="physics")
0213 stf_filename = models.CharField(max_length=255, unique=True)
0214 file_size_bytes = models.BigIntegerField(null=True, blank=True)
0215 checksum = models.CharField(max_length=64, null=True, blank=True)
0216 created_at = models.DateTimeField(auto_now_add=True)
0217 status = models.CharField(
0218 max_length=20,
0219 choices=FileStatus.choices,
0220 default=FileStatus.REGISTERED
0221 )
0222 metadata = models.JSONField(null=True, blank=True)
0223
0224
0225 workflow_id = models.UUIDField(null=True, blank=True, db_index=True)
0226 daq_state = models.CharField(max_length=20, null=True, blank=True)
0227 daq_substate = models.CharField(max_length=20, null=True, blank=True)
0228 workflow_status = models.CharField(max_length=30, null=True, blank=True)
0229
0230 tf_count = models.IntegerField(null=True, blank=True)
0231
0232 class Meta:
0233 db_table = 'swf_stf_files'
0234
0235 def __str__(self):
0236 return f"STF File {self.file_id}"
0237
0238
0239 class Subscriber(models.Model):
0240 """
0241 Represents a message queue subscriber in the monitoring system.
0242 Subscribers receive notifications about STF files via ActiveMQ directly or SSE.
0243
0244 Attributes:
0245 subscriber_id: Auto-incrementing primary key
0246 subscriber_name: Unique name identifying the subscriber
0247 fraction: Fraction of messages to receive
0248 description: Human-readable description of the subscriber
0249 is_active: Whether the subscriber is currently active
0250 created_at: Timestamp when record was created
0251 updated_at: Timestamp when record was last updated
0252 delivery_type: How messages are delivered (activemq or sse)
0253 client_ip: IP address for SSE subscribers
0254 client_location: Geographic location for SSE subscribers
0255 connected_at: When SSE subscriber connected
0256 disconnected_at: When SSE subscriber disconnected
0257 last_activity: Last activity timestamp for SSE subscribers
0258 message_filters: JSON filters for SSE message selection
0259 messages_received: Count of messages received
0260 messages_sent: Count of messages sent (SSE)
0261 messages_dropped: Count of messages dropped due to queue overflow (SSE)
0262 """
0263 DELIVERY_TYPE_CHOICES = [
0264 ('activemq', 'ActiveMQ Direct'),
0265 ('sse', 'Server-Sent Events'),
0266 ]
0267
0268 subscriber_id = models.AutoField(primary_key=True)
0269 subscriber_name = models.CharField(max_length=255, unique=True)
0270 fraction = models.FloatField(null=True, blank=True)
0271 description = models.TextField(null=True, blank=True)
0272 is_active = models.BooleanField(default=True)
0273 created_at = models.DateTimeField(auto_now_add=True)
0274 updated_at = models.DateTimeField(auto_now=True)
0275
0276
0277 delivery_type = models.CharField(
0278 max_length=20,
0279 choices=DELIVERY_TYPE_CHOICES,
0280 default='activemq'
0281 )
0282
0283
0284 client_ip = models.GenericIPAddressField(null=True, blank=True)
0285 client_location = models.CharField(max_length=255, blank=True, default='')
0286 connected_at = models.DateTimeField(null=True, blank=True)
0287 disconnected_at = models.DateTimeField(null=True, blank=True)
0288 last_activity = models.DateTimeField(null=True, blank=True)
0289
0290
0291
0292 message_filters = models.JSONField(default=dict, blank=True)
0293
0294
0295 messages_received = models.IntegerField(default=0)
0296 messages_sent = models.IntegerField(default=0)
0297 messages_dropped = models.IntegerField(default=0)
0298
0299 class Meta:
0300 db_table = 'swf_subscribers'
0301 indexes = [
0302 models.Index(fields=['delivery_type', 'is_active']),
0303 ]
0304
0305 def __str__(self):
0306 return self.subscriber_name
0307
0308
0309
0310 class FastMonFile(models.Model):
0311 """
0312 Represents a Time Frame (TF) file for fast monitoring.
0313 TF files are subsamples of Super Time Frame (STF) files, processed for rapid monitoring.
0314
0315 Attributes:
0316 tf_file_id: UUID primary key for unique TF file identification
0317 stf_file: Foreign key to the parent STF file this TF is derived from
0318 tf_filename: Unique filename for the TF file
0319 file_size_bytes: Size of the TF file in bytes
0320 checksum: File integrity checksum
0321 status: Current processing status (FileStatus enum)
0322 metadata: JSON field for flexible storage of TF-specific metadata
0323 created_at: Timestamp when TF record was created
0324 updated_at: Timestamp when TF record was last modified
0325 """
0326 tf_file_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
0327 stf_file = models.ForeignKey(StfFile, on_delete=models.CASCADE, related_name='tf_files')
0328 tf_filename = models.CharField(max_length=255, unique=True)
0329 file_size_bytes = models.BigIntegerField(null=True, blank=True)
0330 checksum = models.CharField(max_length=64, null=True, blank=True)
0331 status = models.CharField(
0332 max_length=20,
0333 choices=FileStatus.choices,
0334 default=FileStatus.REGISTERED
0335 )
0336 metadata = models.JSONField(null=True, blank=True)
0337 created_at = models.DateTimeField(auto_now_add=True)
0338 updated_at = models.DateTimeField(auto_now=True)
0339
0340 tf_count = models.IntegerField(null=True, blank=True)
0341 tf_first = models.IntegerField(null=True, blank=True)
0342 tf_last = models.IntegerField(null=True, blank=True)
0343
0344 class Meta:
0345 db_table = 'swf_fastmon_files'
0346
0347 def __str__(self):
0348 return f"TF File {self.tf_filename}"
0349
0350
0351 class TFSlice(models.Model):
0352 """
0353 Represents a Time Frame slice for fast processing workflow.
0354 Each TF slice is a small portion (~15 per STF sample) that can be
0355 processed independently by workers in ~30 seconds.
0356 """
0357 slice_id = models.IntegerField()
0358 tf_first = models.IntegerField()
0359 tf_last = models.IntegerField()
0360 tf_count = models.IntegerField()
0361 tf_filename = models.CharField(max_length=255, db_index=True)
0362 stf_filename = models.CharField(max_length=255, db_index=True)
0363 run_number = models.IntegerField(db_index=True)
0364 status = models.CharField(max_length=20, default='queued')
0365 retries = models.IntegerField(default=0)
0366 metadata = models.JSONField(null=True, blank=True)
0367 created_at = models.DateTimeField(auto_now_add=True)
0368 updated_at = models.DateTimeField(auto_now=True)
0369
0370
0371 assigned_worker = models.CharField(max_length=255, null=True, blank=True)
0372 assigned_at = models.DateTimeField(null=True, blank=True)
0373 completed_at = models.DateTimeField(null=True, blank=True)
0374
0375 class Meta:
0376 db_table = 'swf_tf_slices'
0377 indexes = [
0378 models.Index(fields=['run_number', 'status']),
0379 models.Index(fields=['stf_filename', 'status']),
0380 models.Index(fields=['status', 'created_at']),
0381 ]
0382 unique_together = [['tf_filename', 'slice_id']]
0383
0384 def __str__(self):
0385 return f"Slice {self.slice_id} of {self.tf_filename}"
0386
0387
0388 class Worker(models.Model):
0389 """
0390 Tracks workers processing TF slices in the fast processing workflow.
0391 Records both active and inactive workers for historical analysis.
0392 """
0393 worker_id = models.CharField(max_length=255, primary_key=True)
0394 run_number = models.IntegerField(db_index=True)
0395 panda_job = models.CharField(max_length=255)
0396 location = models.CharField(max_length=255)
0397 status = models.CharField(max_length=20)
0398 current_slice_id = models.IntegerField(null=True, blank=True)
0399 tf_filename = models.CharField(max_length=255, null=True, blank=True)
0400 slices_completed = models.IntegerField(default=0)
0401 last_heartbeat = models.DateTimeField(null=True, blank=True)
0402 started_at = models.DateTimeField()
0403 ended_at = models.DateTimeField(null=True, blank=True)
0404 metadata = models.JSONField(null=True, blank=True)
0405
0406 class Meta:
0407 db_table = 'swf_workers'
0408 indexes = [
0409 models.Index(fields=['run_number', 'status']),
0410 ]
0411
0412 def __str__(self):
0413 return f"Worker {self.worker_id}"
0414
0415
0416 class RunState(models.Model):
0417 """
0418 Tracks the current processing state for each run in the fast processing workflow.
0419 Provides quick access to run-level statistics and status.
0420 """
0421 run_number = models.IntegerField(primary_key=True)
0422 phase = models.CharField(max_length=20)
0423 state = models.CharField(max_length=20)
0424 substate = models.CharField(max_length=20, null=True, blank=True)
0425 target_worker_count = models.IntegerField()
0426 active_worker_count = models.IntegerField(default=0)
0427 stf_samples_received = models.IntegerField(default=0)
0428 slices_created = models.IntegerField(default=0)
0429 slices_queued = models.IntegerField(default=0)
0430 slices_processing = models.IntegerField(default=0)
0431 slices_completed = models.IntegerField(default=0)
0432 slices_failed = models.IntegerField(default=0)
0433 state_changed_at = models.DateTimeField()
0434 updated_at = models.DateTimeField(auto_now=True)
0435 metadata = models.JSONField(null=True, blank=True)
0436
0437 class Meta:
0438 db_table = 'swf_run_state'
0439
0440 def __str__(self):
0441 return f"Run {self.run_number} - {self.state}/{self.phase}"
0442
0443
0444 class SystemStateEvent(models.Model):
0445 """
0446 Event log for time-travel replay of system state.
0447 Records all significant events in the fast processing workflow.
0448 """
0449 event_id = models.AutoField(primary_key=True)
0450 timestamp = models.DateTimeField(db_index=True)
0451 run_number = models.IntegerField(db_index=True)
0452 event_type = models.CharField(max_length=50, db_index=True)
0453 state = models.CharField(max_length=20, db_index=True)
0454 substate = models.CharField(max_length=20, null=True, blank=True, db_index=True)
0455 event_data = models.JSONField()
0456
0457 class Meta:
0458 db_table = 'swf_system_state_events'
0459 indexes = [
0460 models.Index(fields=['timestamp', 'run_number']),
0461 ]
0462
0463 def __str__(self):
0464 return f"Event {self.event_id} - {self.event_type} at {self.timestamp}"
0465
0466
0467 class PersistentState(models.Model):
0468 """
0469 Persistent state store with stable schema - just stores JSON.
0470 Never modify this schema - it must remain stable across all deployments.
0471
0472 Single record stores all persistent state as JSON blob.
0473 Use get_state() and update_state() methods to access nested data.
0474 """
0475 id = models.AutoField(primary_key=True)
0476 state_data = models.JSONField(default=dict)
0477 updated_at = models.DateTimeField(auto_now=True)
0478
0479 class Meta:
0480 db_table = 'swf_persistent_state'
0481
0482 @classmethod
0483 def get_state(cls):
0484 """Get the complete state JSON object."""
0485 obj, created = cls.objects.get_or_create(id=1, defaults={'state_data': {}})
0486 return obj.state_data
0487
0488 @classmethod
0489 def update_state(cls, updates):
0490 """Update state with new values (dict merge)."""
0491 from django.db import transaction
0492 with transaction.atomic():
0493 obj, created = cls.objects.select_for_update().get_or_create(
0494 id=1,
0495 defaults={'state_data': {}}
0496 )
0497 obj.state_data.update(updates)
0498 obj.save()
0499 return obj.state_data
0500
0501 @classmethod
0502 def get_next_run_number(cls):
0503 """Get next run number atomically and update last run info."""
0504 from django.db import transaction
0505 from django.utils import timezone
0506
0507 with transaction.atomic():
0508 obj, created = cls.objects.select_for_update().get_or_create(
0509 id=1,
0510 defaults={'state_data': {
0511 'next_run_number': 100010,
0512 'last_run_number': None,
0513 'last_run_start_time': None
0514 }}
0515 )
0516
0517
0518 if 'next_run_number' not in obj.state_data:
0519 obj.state_data['next_run_number'] = 100010
0520
0521 current_run = obj.state_data['next_run_number']
0522 current_time = timezone.now().isoformat()
0523
0524
0525 obj.state_data.update({
0526 'next_run_number': current_run + 1,
0527 'last_run_number': current_run,
0528 'last_run_start_time': current_time
0529 })
0530 obj.save()
0531
0532 return current_run
0533
0534 @classmethod
0535 def get_next_agent_id(cls):
0536 """Get next agent ID atomically and update last agent info."""
0537 from django.db import transaction
0538 from django.utils import timezone
0539
0540 with transaction.atomic():
0541 obj, created = cls.objects.select_for_update().get_or_create(
0542 id=1,
0543 defaults={'state_data': {
0544 'next_agent_id': 1,
0545 'last_agent_id': None,
0546 'last_agent_registration_time': None
0547 }}
0548 )
0549
0550
0551 if 'next_agent_id' not in obj.state_data:
0552 obj.state_data['next_agent_id'] = 1
0553
0554 current_agent_id = obj.state_data['next_agent_id']
0555 current_time = timezone.now().isoformat()
0556
0557
0558 obj.state_data.update({
0559 'next_agent_id': current_agent_id + 1,
0560 'last_agent_id': current_agent_id,
0561 'last_agent_registration_time': current_time
0562 })
0563 obj.save()
0564
0565 return current_agent_id
0566
0567 @classmethod
0568 def get_next_workflow_execution_id(cls):
0569 """Get next workflow execution sequence number atomically."""
0570 from django.db import transaction
0571 from django.utils import timezone
0572
0573 with transaction.atomic():
0574 obj, created = cls.objects.select_for_update().get_or_create(
0575 id=1,
0576 defaults={'state_data': {
0577 'next_workflow_execution_id': 1,
0578 'last_workflow_execution_id': None,
0579 'last_workflow_execution_time': None
0580 }}
0581 )
0582
0583 current_time = timezone.now().isoformat()
0584 current_id = obj.state_data.get('next_workflow_execution_id', 1)
0585
0586 obj.state_data.update({
0587 'next_workflow_execution_id': current_id + 1,
0588 'last_workflow_execution_id': current_id,
0589 'last_workflow_execution_time': current_time
0590 })
0591 obj.save()
0592
0593 return current_id
0594
0595
0596 class PandaQueue(models.Model):
0597 """
0598 Represents a PanDA compute queue configuration.
0599 Stores the queue name and full configuration as JSON.
0600 """
0601 queue_name = models.CharField(max_length=100, unique=True, primary_key=True)
0602 site = models.CharField(max_length=100, blank=True)
0603 status = models.CharField(max_length=50, default='active')
0604 queue_type = models.CharField(max_length=50, blank=True)
0605 config_data = models.JSONField()
0606 created_at = models.DateTimeField(auto_now_add=True)
0607 updated_at = models.DateTimeField(auto_now=True)
0608
0609 class Meta:
0610 db_table = 'swf_panda_queues'
0611 ordering = ['queue_name']
0612 verbose_name = 'PanDA Queue'
0613 verbose_name_plural = 'PanDA Queues'
0614
0615 def __str__(self):
0616 return self.queue_name
0617
0618
0619 class RucioEndpoint(models.Model):
0620 """
0621 Represents a Rucio DDM (Distributed Data Management) endpoint configuration.
0622 Stores the endpoint name and full configuration as JSON.
0623 """
0624 endpoint_name = models.CharField(max_length=100, unique=True, primary_key=True)
0625 site = models.CharField(max_length=100, blank=True)
0626 endpoint_type = models.CharField(max_length=50, blank=True)
0627 is_tape = models.BooleanField(default=False)
0628 is_active = models.BooleanField(default=True)
0629 config_data = models.JSONField()
0630 created_at = models.DateTimeField(auto_now_add=True)
0631 updated_at = models.DateTimeField(auto_now=True)
0632
0633 class Meta:
0634 db_table = 'swf_rucio_endpoints'
0635 ordering = ['endpoint_name']
0636 verbose_name = 'Rucio Endpoint'
0637 verbose_name_plural = 'Rucio Endpoints'
0638
0639 def __str__(self):
0640 return self.endpoint_name
0641
0642
0643 class AIMemory(models.Model):
0644 """
0645 AI dialogue history for cross-session context.
0646
0647 Records exchanges between developers and AI assistants (Claude Code)
0648 to enable context continuity across sessions. Opt-in via SWF_DIALOGUE_TURNS env var.
0649 """
0650 id = models.AutoField(primary_key=True)
0651 username = models.CharField(max_length=100, db_index=True,
0652 help_text="Developer username")
0653 session_id = models.CharField(max_length=255, db_index=True,
0654 help_text="Claude Code session ID")
0655 role = models.CharField(max_length=20,
0656 help_text="'user' or 'assistant'")
0657 content = models.TextField(help_text="Message content")
0658 created_at = models.DateTimeField(auto_now_add=True, db_index=True)
0659
0660
0661 namespace = models.CharField(max_length=100, null=True, blank=True,
0662 help_text="Testbed namespace if applicable")
0663 project_path = models.CharField(max_length=500, null=True, blank=True,
0664 help_text="Project directory path")
0665
0666 class Meta:
0667 db_table = 'swf_ai_memory'
0668 ordering = ['-created_at']
0669 indexes = [
0670 models.Index(fields=['username', '-created_at']),
0671 ]
0672
0673 def __str__(self):
0674 preview = self.content[:50] + '...' if len(self.content) > 50 else self.content
0675 return f"{self.username}/{self.role}: {preview}"
0676
0677
0678 class DataProvenance(models.Model):
0679 """
0680 Data Provenance ID (DPID) — tracks every MCP tool call made by the bot.
0681 Provides an independent verification that data in a bot response
0682 came from a real tool call, not LLM fabrication.
0683 """
0684 dpid = models.CharField(max_length=12, unique=True, db_index=True)
0685 tool_name = models.CharField(max_length=100)
0686 tool_args = models.JSONField(default=dict)
0687 created_at = models.DateTimeField(auto_now_add=True)
0688
0689 class Meta:
0690 db_table = 'swf_data_provenance'
0691 ordering = ['-created_at']
0692
0693 def __str__(self):
0694 return f"DPID:{self.dpid} {self.tool_name}"
0695
0696
0697
0698 from .workflow_models import (
0699 STFWorkflow,
0700 AgentWorkflowStage,
0701 WorkflowMessage,
0702 DAQState,
0703 DAQSubstate,
0704 WorkflowStatus,
0705 AgentType,
0706 )