Job Instance
API endpoint that allows jobs to be viewed or edited.
Pass the start_now url parameter to the POST method to enqueue the job immediately.
Use the delay field to create a test job with fake duration of work (in seconds).
Actions
/jobs/{id}/run/ (POST)
Run a job (add it to the queue).
/jobs/{id}/cancel/ (POST)
Cancel a job (terminate the background task)
GET /api/v2/jobs/1125/?format=api
{ "id": 1125, "details": "https://api.beluga.insectai.org/api/v2/jobs/1125/?format=api", "name": "Sync captures for deployment 256", "delay": 0, "limit": null, "shuffle": true, "project": { "id": 90, "name": "Singapore- Labelling Project", "details": "https://api.beluga.insectai.org/api/v2/projects/90/?format=api", "user_permissions": [] }, "deployment": { "id": 256, "name": "W47", "details": "https://api.beluga.insectai.org/api/v2/deployments/256/?format=api", "user_permissions": [] }, "source_image_collection": null, "source_image_single": null, "pipeline": null, "status": "SUCCESS", "created_at": "2025-01-21T10:57:30.145587", "updated_at": "2025-01-21T14:08:28.997160", "started_at": "2025-01-21T14:05:31.630764", "finished_at": "2025-01-21T14:08:28.243755", "duration": "176.612991", "progress": { "summary": { "status": "SUCCESS", "progress": 1.0 }, "stages": [ { "status": "SUCCESS", "progress": 1.0, "key": "data_storage_sync", "name": "Data storage sync", "params": [ { "name": "Total files", "key": "total_files", "category": "default", "value": 50389 } ] }, { "status": "SUCCESS", "progress": 1.0, "key": "update_deployment_cache", "name": "Update deployment cache", "params": [] } ], "errors": [], "logs": [ "[2025-01-21 14:08:28] INFO Finished job #1125 \"Sync captures for deployment 256\" (SUCCESS)", "[2025-01-21 14:08:28] INFO Changing status of job 1125 to SUCCESS", "[2025-01-21 14:08:27] INFO Changing status of job 1125 to SUCCESS", "[2025-01-21 14:08:27] INFO Finished syncing captures for deployment #256 W47", "[2025-01-21 14:08:26] INFO Saving and recalculating sessions for deployment", "[2025-01-21 14:08:26] INFO Processed 50389 files", "[2025-01-21 14:08:25] INFO Processed 50389 files", "[2025-01-21 14:08:23] INFO Processed 50000 files", "[2025-01-21 14:08:20] INFO Processed 49000 files", "[2025-01-21 14:08:19] INFO Processed 49000 files", "[2025-01-21 14:08:16] INFO Processed 48000 files", "[2025-01-21 14:08:13] INFO Processed 47000 files", "[2025-01-21 14:08:12] INFO Processed 47000 files", "[2025-01-21 14:08:09] INFO Processed 46000 files", "[2025-01-21 14:08:06] INFO Processed 45000 files", "[2025-01-21 14:08:05] INFO Processed 45000 files", "[2025-01-21 14:08:02] INFO Processed 44000 files", "[2025-01-21 14:07:59] INFO Processed 43000 files", "[2025-01-21 14:07:58] INFO Processed 43000 files", "[2025-01-21 14:07:55] INFO Processed 42000 files", "[2025-01-21 14:07:54] INFO Processed 42000 files", "[2025-01-21 14:07:51] INFO Processed 41000 files", "[2025-01-21 14:07:47] INFO Processed 40000 files", "[2025-01-21 14:07:44] INFO Processed 39000 files", "[2025-01-21 14:07:41] INFO Processed 38000 files", "[2025-01-21 14:07:40] INFO Processed 38000 files", "[2025-01-21 14:07:37] INFO Processed 37000 files", "[2025-01-21 14:07:34] INFO Processed 36000 files", "[2025-01-21 14:07:33] INFO Processed 36000 files", "[2025-01-21 14:07:30] INFO Processed 35000 files", "[2025-01-21 14:07:27] INFO Processed 34000 files", "[2025-01-21 14:07:23] INFO Processed 33000 files", "[2025-01-21 14:07:20] INFO Processed 32000 files", "[2025-01-21 14:07:19] INFO Processed 32000 files", "[2025-01-21 14:07:16] INFO Processed 31000 files", "[2025-01-21 14:07:12] INFO Processed 30000 files", "[2025-01-21 14:07:09] INFO Processed 29000 files", "[2025-01-21 14:07:05] INFO Processed 28000 files", "[2025-01-21 14:07:02] INFO Processed 27000 files", "[2025-01-21 14:07:01] INFO Processed 27000 files", "[2025-01-21 14:06:58] INFO Processed 26000 files", "[2025-01-21 14:06:55] INFO Processed 25000 files", "[2025-01-21 14:06:52] INFO Processed 24000 files", "[2025-01-21 14:06:51] INFO Processed 24000 files", "[2025-01-21 14:06:47] INFO Processed 23000 files", "[2025-01-21 14:06:44] INFO Processed 22000 files", "[2025-01-21 14:06:41] INFO Processed 21000 files", "[2025-01-21 14:06:37] INFO Processed 20000 files", "[2025-01-21 14:06:34] INFO Processed 19000 files", "[2025-01-21 14:06:31] INFO Processed 18000 files", "[2025-01-21 14:06:27] INFO Processed 17000 files", "[2025-01-21 14:06:24] INFO Processed 16000 files", "[2025-01-21 14:06:21] INFO Processed 15000 files", "[2025-01-21 14:06:18] INFO Processed 14000 files", "[2025-01-21 14:06:17] INFO Processed 14000 files", "[2025-01-21 14:06:14] INFO Processed 13000 files", "[2025-01-21 14:06:11] INFO Processed 12000 files", "[2025-01-21 14:06:08] INFO Processed 11000 files", "[2025-01-21 14:06:07] INFO Processed 11000 files", "[2025-01-21 14:06:04] INFO Processed 10000 files", "[2025-01-21 14:06:01] INFO Processed 9000 files", "[2025-01-21 14:05:57] INFO Processed 8000 files", "[2025-01-21 14:05:54] INFO Processed 7000 files", "[2025-01-21 14:05:51] INFO Processed 6000 files", "[2025-01-21 14:05:47] INFO Processed 5000 files", "[2025-01-21 14:05:44] INFO Processed 4000 files", "[2025-01-21 14:05:41] INFO Processed 3000 files", "[2025-01-21 14:05:37] INFO Processed 2000 files", "[2025-01-21 14:05:34] INFO Processed 1000 files", "[2025-01-21 14:05:31] INFO Syncing captures for deployment #256 W47", "[2025-01-21 14:05:31] INFO Changing status of job 1125 to STARTED", "[2025-01-21 14:05:31] INFO Running job #1125 \"Sync captures for deployment 256\" (PENDING)" ] }, "logs": { "stdout": [ "[2025-01-21 14:08:28] INFO Finished job #1125 \"Sync captures for deployment 256\" (SUCCESS)", "[2025-01-21 14:08:28] INFO Changing status of job 1125 to SUCCESS", "[2025-01-21 14:08:27] INFO Changing status of job 1125 to SUCCESS", "[2025-01-21 14:08:27] INFO Finished syncing captures for deployment #256 W47", "[2025-01-21 14:08:26] INFO Saving and recalculating sessions for deployment", "[2025-01-21 14:08:26] INFO Processed 50389 files", "[2025-01-21 14:08:25] INFO Processed 50389 files", "[2025-01-21 14:08:23] INFO Processed 50000 files", "[2025-01-21 14:08:20] INFO Processed 49000 files", "[2025-01-21 14:08:19] INFO Processed 49000 files", "[2025-01-21 14:08:16] INFO Processed 48000 files", "[2025-01-21 14:08:13] INFO Processed 47000 files", "[2025-01-21 14:08:12] INFO Processed 47000 files", "[2025-01-21 14:08:09] INFO Processed 46000 files", "[2025-01-21 14:08:06] INFO Processed 45000 files", "[2025-01-21 14:08:05] INFO Processed 45000 files", "[2025-01-21 14:08:02] INFO Processed 44000 files", "[2025-01-21 14:07:59] INFO Processed 43000 files", "[2025-01-21 14:07:58] INFO Processed 43000 files", "[2025-01-21 14:07:55] INFO Processed 42000 files", "[2025-01-21 14:07:54] INFO Processed 42000 files", "[2025-01-21 14:07:51] INFO Processed 41000 files", "[2025-01-21 14:07:47] INFO Processed 40000 files", "[2025-01-21 14:07:44] INFO Processed 39000 files", "[2025-01-21 14:07:41] INFO Processed 38000 files", "[2025-01-21 14:07:40] INFO Processed 38000 files", "[2025-01-21 14:07:37] INFO Processed 37000 files", "[2025-01-21 14:07:34] INFO Processed 36000 files", "[2025-01-21 14:07:33] INFO Processed 36000 files", "[2025-01-21 14:07:30] INFO Processed 35000 files", "[2025-01-21 14:07:27] INFO Processed 34000 files", "[2025-01-21 14:07:23] INFO Processed 33000 files", "[2025-01-21 14:07:20] INFO Processed 32000 files", "[2025-01-21 14:07:19] INFO Processed 32000 files", "[2025-01-21 14:07:16] INFO Processed 31000 files", "[2025-01-21 14:07:12] INFO Processed 30000 files", "[2025-01-21 14:07:09] INFO Processed 29000 files", "[2025-01-21 14:07:05] INFO Processed 28000 files", "[2025-01-21 14:07:02] INFO Processed 27000 files", "[2025-01-21 14:07:01] INFO Processed 27000 files", "[2025-01-21 14:06:58] INFO Processed 26000 files", "[2025-01-21 14:06:55] INFO Processed 25000 files", "[2025-01-21 14:06:52] INFO Processed 24000 files", "[2025-01-21 14:06:51] INFO Processed 24000 files", "[2025-01-21 14:06:47] INFO Processed 23000 files", "[2025-01-21 14:06:44] INFO Processed 22000 files", "[2025-01-21 14:06:41] INFO Processed 21000 files", "[2025-01-21 14:06:37] INFO Processed 20000 files", "[2025-01-21 14:06:34] INFO Processed 19000 files", "[2025-01-21 14:06:31] INFO Processed 18000 files", "[2025-01-21 14:06:27] INFO Processed 17000 files", "[2025-01-21 14:06:24] INFO Processed 16000 files", "[2025-01-21 14:06:21] INFO Processed 15000 files", "[2025-01-21 14:06:18] INFO Processed 14000 files", "[2025-01-21 14:06:17] INFO Processed 14000 files", "[2025-01-21 14:06:14] INFO Processed 13000 files", "[2025-01-21 14:06:11] INFO Processed 12000 files", "[2025-01-21 14:06:08] INFO Processed 11000 files", "[2025-01-21 14:06:07] INFO Processed 11000 files", "[2025-01-21 14:06:04] INFO Processed 10000 files", "[2025-01-21 14:06:01] INFO Processed 9000 files", "[2025-01-21 14:05:57] INFO Processed 8000 files", "[2025-01-21 14:05:54] INFO Processed 7000 files", "[2025-01-21 14:05:51] INFO Processed 6000 files", "[2025-01-21 14:05:47] INFO Processed 5000 files", "[2025-01-21 14:05:44] INFO Processed 4000 files", "[2025-01-21 14:05:41] INFO Processed 3000 files", "[2025-01-21 14:05:37] INFO Processed 2000 files", "[2025-01-21 14:05:34] INFO Processed 1000 files", "[2025-01-21 14:05:31] INFO Syncing captures for deployment #256 W47", "[2025-01-21 14:05:31] INFO Changing status of job 1125 to STARTED", "[2025-01-21 14:05:31] INFO Running job #1125 \"Sync captures for deployment 256\" (PENDING)" ], "stderr": [] }, "job_type": { "name": "Data storage sync", "key": "data_storage_sync" }, "data_export": null, "dispatch_mode": "internal", "result": null, "user_permissions": [] }