{ "type": "bundle", "id": "bundle--4fad99fd-bde0-40eb-8fbf-ade474cc4d6a", "objects": [ { "type": "attack-pattern", "spec_version": "2.1", "id": "attack-pattern--7b6c328e-b050-4d76-8e11-ff3b3fe7dea3", "created_by_ref": "identity--f1a0f560-2d9e-4c5d-bf47-7e96e805de82", "created": "2023-09-28T21:25:13.233365Z", "modified": "2023-09-28T21:25:13.233365Z", "name": "Develop AI-Generated Images (Deepfakes)", "description": "Deepfakes refer to AI-generated falsified photos, videos, or soundbites. An influence operation may use deepfakes to depict an inauthentic situation by synthetically recreating an individual\u2019s face, body, voice, and physical gestures.", "kill_chain_phases": [ { "kill_chain_name": "mitre-attack", "phase_name": "develop-content" } ], "external_references": [ { "source_name": "mitre-attack", "url": "https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0086.002.md", "external_id": "T0086.002" } ], "object_marking_refs": [ "marking-definition--f79f25d2-8b96-4580-b169-eb7b613a7c31" ], "x_mitre_is_subtechnique": true, "x_mitre_platforms": [ "Windows", "Linux", "Mac" ], "x_mitre_version": "2.1" } ] }