{ "type": "bundle", "id": "bundle--a1ebb9d0-8c55-4dae-b7c7-65a194fb844c", "objects": [ { "type": "attack-pattern", "spec_version": "2.1", "id": "attack-pattern--e60f54a3-9972-43b8-8359-ee21d781acae", "created_by_ref": "identity--f1a0f560-2d9e-4c5d-bf47-7e96e805de82", "created": "2024-08-02T17:12:32.376186Z", "modified": "2024-08-02T17:12:32.376186Z", "name": "Develop AI-Generated Audio (Deepfakes)", "description": "Deepfakes refer to AI-generated falsified photos, videos, or soundbites. An influence operation may use deepfakes to depict an inauthentic situation by synthetically recreating an individual\u2019s face, body, voice, and physical gestures.", "kill_chain_phases": [ { "kill_chain_name": "mitre-attack", "phase_name": "develop-content" } ], "external_references": [ { "source_name": "mitre-attack", "url": "https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0088.001.md", "external_id": "T0088.001" } ], "object_marking_refs": [ "marking-definition--f79f25d2-8b96-4580-b169-eb7b613a7c31" ], "x_mitre_is_subtechnique": true, "x_mitre_platforms": [ "Windows", "Linux", "Mac" ], "x_mitre_version": "2.1" } ] }