Job Instance
API endpoint that allows jobs to be viewed or edited.
Pass the start_now url parameter to the POST method to enqueue the job immediately.
Use the delay field to create a test job with fake duration of work (in seconds).
Actions
/jobs/{id}/run/ (POST)
Run a job (add it to the queue).
/jobs/{id}/cancel/ (POST)
Cancel a job (terminate the background task)
GET /api/v2/jobs/1121/?format=api
{ "id": 1121, "details": "https://api.beluga.insectai.org/api/v2/jobs/1121/?format=api", "name": "Sync captures for deployment 255", "delay": 0, "limit": null, "shuffle": true, "project": { "id": 90, "name": "Singapore- Labelling Project", "details": "https://api.beluga.insectai.org/api/v2/projects/90/?format=api", "user_permissions": [] }, "deployment": { "id": 255, "name": "BTNR", "details": "https://api.beluga.insectai.org/api/v2/deployments/255/?format=api", "user_permissions": [] }, "source_image_collection": null, "source_image_single": null, "pipeline": null, "status": "SUCCESS", "created_at": "2025-01-21T10:55:49.813935", "updated_at": "2025-01-21T14:01:18.764016", "started_at": "2025-01-21T13:58:08.493423", "finished_at": "2025-01-21T14:01:18.540102", "duration": "190.046679", "progress": { "summary": { "status": "SUCCESS", "progress": 1.0 }, "stages": [ { "status": "SUCCESS", "progress": 1.0, "key": "data_storage_sync", "name": "Data storage sync", "params": [ { "name": "Total files", "key": "total_files", "category": "default", "value": 53608 } ] }, { "status": "SUCCESS", "progress": 1.0, "key": "update_deployment_cache", "name": "Update deployment cache", "params": [] } ], "errors": [], "logs": [ "[2025-01-21 14:01:18] INFO Finished job #1121 \"Sync captures for deployment 255\" (SUCCESS)", "[2025-01-21 14:01:18] INFO Changing status of job 1121 to SUCCESS", "[2025-01-21 14:01:18] INFO Finished syncing captures for deployment #255 BTNR", "[2025-01-21 14:01:17] INFO Finished syncing captures for deployment #255 BTNR", "[2025-01-21 14:01:16] INFO Saving and recalculating sessions for deployment", "[2025-01-21 14:01:16] INFO Processed 53608 files", "[2025-01-21 14:01:15] INFO Processed 53608 files", "[2025-01-21 14:01:13] INFO Processed 53000 files", "[2025-01-21 14:01:12] INFO Processed 53000 files", "[2025-01-21 14:01:09] INFO Processed 52000 files", "[2025-01-21 14:01:08] INFO Processed 52000 files", "[2025-01-21 14:01:05] INFO Processed 51000 files", "[2025-01-21 14:01:01] INFO Processed 50000 files", "[2025-01-21 14:00:58] INFO Processed 49000 files", "[2025-01-21 14:00:57] INFO Processed 49000 files", "[2025-01-21 14:00:54] INFO Processed 48000 files", "[2025-01-21 14:00:51] INFO Processed 47000 files", "[2025-01-21 14:00:50] INFO Processed 47000 files", "[2025-01-21 14:00:47] INFO Processed 46000 files", "[2025-01-21 14:00:43] INFO Processed 45000 files", "[2025-01-21 14:00:40] INFO Processed 44000 files", "[2025-01-21 14:00:39] INFO Processed 44000 files", "[2025-01-21 14:00:36] INFO Processed 43000 files", "[2025-01-21 14:00:33] INFO Processed 42000 files", "[2025-01-21 14:00:32] INFO Processed 42000 files", "[2025-01-21 14:00:29] INFO Processed 41000 files", "[2025-01-21 14:00:25] INFO Processed 40000 files", "[2025-01-21 14:00:22] INFO Processed 39000 files", "[2025-01-21 14:00:18] INFO Processed 38000 files", "[2025-01-21 14:00:14] INFO Processed 37000 files", "[2025-01-21 14:00:11] INFO Processed 36000 files", "[2025-01-21 14:00:08] INFO Processed 35000 files", "[2025-01-21 14:00:07] INFO Processed 35000 files", "[2025-01-21 14:00:04] INFO Processed 34000 files", "[2025-01-21 14:00:01] INFO Processed 33000 files", "[2025-01-21 14:00:00] INFO Processed 33000 files", "[2025-01-21 13:59:57] INFO Processed 32000 files", "[2025-01-21 13:59:53] INFO Processed 31000 files", "[2025-01-21 13:59:50] INFO Processed 30000 files", "[2025-01-21 13:59:47] INFO Processed 29000 files", "[2025-01-21 13:59:46] INFO Processed 29000 files", "[2025-01-21 13:59:43] INFO Processed 28000 files", "[2025-01-21 13:59:40] INFO Processed 27000 files", "[2025-01-21 13:59:39] INFO Processed 27000 files", "[2025-01-21 13:59:36] INFO Processed 26000 files", "[2025-01-21 13:59:33] INFO Processed 25000 files", "[2025-01-21 13:59:29] INFO Processed 24000 files", "[2025-01-21 13:59:26] INFO Processed 23000 files", "[2025-01-21 13:59:23] INFO Processed 22000 files", "[2025-01-21 13:59:20] INFO Processed 21000 files", "[2025-01-21 13:59:19] INFO Processed 21000 files", "[2025-01-21 13:59:16] INFO Processed 20000 files", "[2025-01-21 13:59:13] INFO Processed 19000 files", "[2025-01-21 13:59:12] INFO Processed 19000 files", "[2025-01-21 13:59:09] INFO Processed 18000 files", "[2025-01-21 13:59:06] INFO Processed 17000 files", "[2025-01-21 13:59:02] INFO Processed 16000 files", "[2025-01-21 13:58:59] INFO Processed 15000 files", "[2025-01-21 13:58:56] INFO Processed 14000 files", "[2025-01-21 13:58:55] INFO Processed 14000 files", "[2025-01-21 13:58:52] INFO Processed 13000 files", "[2025-01-21 13:58:49] INFO Processed 12000 files", "[2025-01-21 13:58:45] INFO Processed 11000 files", "[2025-01-21 13:58:42] INFO Processed 10000 files", "[2025-01-21 13:58:39] INFO Processed 9000 files", "[2025-01-21 13:58:35] INFO Processed 8000 files", "[2025-01-21 13:58:32] INFO Processed 7000 files", "[2025-01-21 13:58:28] INFO Processed 6000 files", "[2025-01-21 13:58:25] INFO Processed 5000 files", "[2025-01-21 13:58:21] INFO Processed 4000 files", "[2025-01-21 13:58:18] INFO Processed 3000 files", "[2025-01-21 13:58:15] INFO Processed 2000 files", "[2025-01-21 13:58:14] INFO Processed 2000 files", "[2025-01-21 13:58:11] INFO Processed 1000 files", "[2025-01-21 13:58:08] INFO Syncing captures for deployment #255 BTNR", "[2025-01-21 13:58:08] INFO Changing status of job 1121 to STARTED", "[2025-01-21 13:58:08] INFO Running job #1121 \"Sync captures for deployment 255\" (PENDING)" ] }, "logs": { "stdout": [ "[2025-01-21 14:01:18] INFO Finished job #1121 \"Sync captures for deployment 255\" (SUCCESS)", "[2025-01-21 14:01:18] INFO Changing status of job 1121 to SUCCESS", "[2025-01-21 14:01:18] INFO Finished syncing captures for deployment #255 BTNR", "[2025-01-21 14:01:17] INFO Finished syncing captures for deployment #255 BTNR", "[2025-01-21 14:01:16] INFO Saving and recalculating sessions for deployment", "[2025-01-21 14:01:16] INFO Processed 53608 files", "[2025-01-21 14:01:15] INFO Processed 53608 files", "[2025-01-21 14:01:13] INFO Processed 53000 files", "[2025-01-21 14:01:12] INFO Processed 53000 files", "[2025-01-21 14:01:09] INFO Processed 52000 files", "[2025-01-21 14:01:08] INFO Processed 52000 files", "[2025-01-21 14:01:05] INFO Processed 51000 files", "[2025-01-21 14:01:01] INFO Processed 50000 files", "[2025-01-21 14:00:58] INFO Processed 49000 files", "[2025-01-21 14:00:57] INFO Processed 49000 files", "[2025-01-21 14:00:54] INFO Processed 48000 files", "[2025-01-21 14:00:51] INFO Processed 47000 files", "[2025-01-21 14:00:50] INFO Processed 47000 files", "[2025-01-21 14:00:47] INFO Processed 46000 files", "[2025-01-21 14:00:43] INFO Processed 45000 files", "[2025-01-21 14:00:40] INFO Processed 44000 files", "[2025-01-21 14:00:39] INFO Processed 44000 files", "[2025-01-21 14:00:36] INFO Processed 43000 files", "[2025-01-21 14:00:33] INFO Processed 42000 files", "[2025-01-21 14:00:32] INFO Processed 42000 files", "[2025-01-21 14:00:29] INFO Processed 41000 files", "[2025-01-21 14:00:25] INFO Processed 40000 files", "[2025-01-21 14:00:22] INFO Processed 39000 files", "[2025-01-21 14:00:18] INFO Processed 38000 files", "[2025-01-21 14:00:14] INFO Processed 37000 files", "[2025-01-21 14:00:11] INFO Processed 36000 files", "[2025-01-21 14:00:08] INFO Processed 35000 files", "[2025-01-21 14:00:07] INFO Processed 35000 files", "[2025-01-21 14:00:04] INFO Processed 34000 files", "[2025-01-21 14:00:01] INFO Processed 33000 files", "[2025-01-21 14:00:00] INFO Processed 33000 files", "[2025-01-21 13:59:57] INFO Processed 32000 files", "[2025-01-21 13:59:53] INFO Processed 31000 files", "[2025-01-21 13:59:50] INFO Processed 30000 files", "[2025-01-21 13:59:47] INFO Processed 29000 files", "[2025-01-21 13:59:46] INFO Processed 29000 files", "[2025-01-21 13:59:43] INFO Processed 28000 files", "[2025-01-21 13:59:40] INFO Processed 27000 files", "[2025-01-21 13:59:39] INFO Processed 27000 files", "[2025-01-21 13:59:36] INFO Processed 26000 files", "[2025-01-21 13:59:33] INFO Processed 25000 files", "[2025-01-21 13:59:29] INFO Processed 24000 files", "[2025-01-21 13:59:26] INFO Processed 23000 files", "[2025-01-21 13:59:23] INFO Processed 22000 files", "[2025-01-21 13:59:20] INFO Processed 21000 files", "[2025-01-21 13:59:19] INFO Processed 21000 files", "[2025-01-21 13:59:16] INFO Processed 20000 files", "[2025-01-21 13:59:13] INFO Processed 19000 files", "[2025-01-21 13:59:12] INFO Processed 19000 files", "[2025-01-21 13:59:09] INFO Processed 18000 files", "[2025-01-21 13:59:06] INFO Processed 17000 files", "[2025-01-21 13:59:02] INFO Processed 16000 files", "[2025-01-21 13:58:59] INFO Processed 15000 files", "[2025-01-21 13:58:56] INFO Processed 14000 files", "[2025-01-21 13:58:55] INFO Processed 14000 files", "[2025-01-21 13:58:52] INFO Processed 13000 files", "[2025-01-21 13:58:49] INFO Processed 12000 files", "[2025-01-21 13:58:45] INFO Processed 11000 files", "[2025-01-21 13:58:42] INFO Processed 10000 files", "[2025-01-21 13:58:39] INFO Processed 9000 files", "[2025-01-21 13:58:35] INFO Processed 8000 files", "[2025-01-21 13:58:32] INFO Processed 7000 files", "[2025-01-21 13:58:28] INFO Processed 6000 files", "[2025-01-21 13:58:25] INFO Processed 5000 files", "[2025-01-21 13:58:21] INFO Processed 4000 files", "[2025-01-21 13:58:18] INFO Processed 3000 files", "[2025-01-21 13:58:15] INFO Processed 2000 files", "[2025-01-21 13:58:14] INFO Processed 2000 files", "[2025-01-21 13:58:11] INFO Processed 1000 files", "[2025-01-21 13:58:08] INFO Syncing captures for deployment #255 BTNR", "[2025-01-21 13:58:08] INFO Changing status of job 1121 to STARTED", "[2025-01-21 13:58:08] INFO Running job #1121 \"Sync captures for deployment 255\" (PENDING)" ], "stderr": [] }, "job_type": { "name": "Data storage sync", "key": "data_storage_sync" }, "data_export": null, "dispatch_mode": "internal", "result": null, "user_permissions": [] }