diff --git a/generated_files/disarm_blue_framework_clickable.html b/generated_files/disarm_blue_framework_clickable.html index 8a0b9c2..94624f5 100644 --- a/generated_files/disarm_blue_framework_clickable.html +++ b/generated_files/disarm_blue_framework_clickable.html @@ -56,7 +56,7 @@ function handleTechniqueClick(box) { TA14 Develop Narratives TA15 Establish Social Assets TA16 Establish Legitimacy -TA17 Maximize Exposure +TA17 Maximise Exposure TA18 Drive Online Harms @@ -190,9 +190,9 @@ function handleTechniqueClick(box) { C00070 Block access to disinformation resources C00078 Change Search Algorithms for Disinformation Content -C00195 Redirect searches away from disinformation or extremist content +C00195 Redirect searches away from disinformation or extremist content C00118 Repurpose images with new text -C00129 Use banking to cut off access +C00129 Use banking to cut off access @@ -241,11 +241,11 @@ function handleTechniqueClick(box) { C00024 Promote healthy narratives -C00156 Better tell your country or organization story +C00156 Better tell your country or organisation story C00082 Ground truthing as automated response to pollution -C00121 Tool transparency and literacy for channels people follow. +C00121 Tool transparency and literacy for channels people follow. C00200 Respected figure (influencer) disavows misinfo @@ -632,9 +632,9 @@ function handleTechniqueClick(box) { - + - + @@ -649,9 +649,9 @@ function handleTechniqueClick(box) { - + - + diff --git a/generated_files/disarm_red_framework_clickable.html b/generated_files/disarm_red_framework_clickable.html index 4ab627e..7cd54e0 100644 --- a/generated_files/disarm_red_framework_clickable.html +++ b/generated_files/disarm_red_framework_clickable.html @@ -56,14 +56,14 @@ function handleTechniqueClick(box) { TA14 Develop Narratives TA15 Establish Social Assets TA16 Establish Legitimacy -TA17 Maximize Exposure +TA17 Maximise Exposure TA18 Drive Online Harms T0073 Determine Target Audiences T0002 Facilitate State Propaganda T0016 Create Clickbait -T0015 Create Hashtags and Search Artifacts +T0015 Create Hashtags and Search Artefacts T0029 Online Polls T0020 Trial Content T0114 Deliver Ads @@ -83,7 +83,7 @@ function handleTechniqueClick(box) { T0018 Purchase Targeted Advertisements T0019 Generate Information Pollution T0043 Chat Apps -T0039 Bait Legitimate Influencers +T0039 Bait Legitimate Influencers T0114.001 Social Media T0017.001 Conduct Crowdfunding Campaigns T0060 Continue to Amplify @@ -91,19 +91,19 @@ function handleTechniqueClick(box) { T0072.001 Geographic Segmentation T0004 Develop Competing Narratives T0010 Cultivate Ignorant Agents -T0009.001 Utilize Academic/Pseudoscientific Justifications +T0009.001 Utilise Academic/Pseudoscientific Justifications T0049.001 Trolls Amplify and Manipulate T0048 Harass T0075 Dismiss -T0101 Create Localized Content +T0101 Create Localised Content T0019.001 Create Fake Research T0043.001 Use Encrypted Chat Apps T0042 Seed Kernel of Truth T0114.002 Traditional Media -T0057 Organize Events +T0057 Organise Events T0128 Conceal People T0132.002 Content Focused T0072.002 Demographic Segmentation @@ -146,7 +146,7 @@ function handleTechniqueClick(box) { T0022.002 Develop Original Conspiracy Theory Narratives T0014.001 Raise Funds from Malign Actors T0097.001 Backstop Personas -T0049.004 Utilize Spamoflauge +T0049.004 Utilise Spamoflauge T0048.003 Threaten to Dox @@ -155,11 +155,11 @@ function handleTechniqueClick(box) { T0102.002 Create Echo Chambers/Filter Bubbles T0023.001 Reframe Context T0103.001 Video Livestream -T0046 Use Search Engine Optimization +T0046 Use Search Engine Optimisation T0115.002 Post Violative Content to Provoke Takedown and Backlash T0061 Sell Merchandise T0128.003 Distance Reputable Individuals from Operation -T0133.001 Behavior Changes +T0133.001 Behaviour Changes T0072.005 Political Segmentation T0040 Demand Insurmountable Proof T0014.002 Raise Funds from Ignorant Agents @@ -225,7 +225,7 @@ function handleTechniqueClick(box) { -T0084.002 Plagiarize Content +T0084.002 Plagiarise Content T0104.002 Dating Apps T0117 Attract Traditional Media @@ -243,7 +243,7 @@ function handleTechniqueClick(box) { -T0084.003 Deceptively Labeled or Translated +T0084.003 Deceptively Labelled or Translated T0104.003 Private/Closed Social Networks @@ -326,7 +326,7 @@ function handleTechniqueClick(box) { T0091.002 Recruit Partisans T0100.003 Co-Opt Influencers -T0120.001 Use Affiliate Marketing Programs +T0120.001 Use Affiliate Marketing Programmes T0124.003 Exploit Platform TOS/Content Moderation @@ -378,7 +378,7 @@ function handleTechniqueClick(box) { T0081.005 Identify Existing Conspiracy Narratives/Suspicions -T0092.001 Create Organizations +T0092.001 Create Organisations T0121.001 Bypass Content Blocking @@ -446,7 +446,7 @@ function handleTechniqueClick(box) { -T0130.002 Utilize Bulletproof Hosting +T0130.002 Utilise Bulletproof Hosting @@ -464,7 +464,7 @@ function handleTechniqueClick(box) { -T0130.003 Use Shell Organizations +T0130.003 Use Shell Organisations @@ -522,7 +522,7 @@ function handleTechniqueClick(box) { -T0094.002 Utilize Butterfly Attacks +T0094.002 Utilise Butterfly Attacks @@ -594,7 +594,7 @@ function handleTechniqueClick(box) { -T0096.002 Outsource Content Creation to External Organizations +T0096.002 Outsource Content Creation to External Organisations @@ -625,7 +625,7 @@ function handleTechniqueClick(box) { - + @@ -643,7 +643,7 @@ function handleTechniqueClick(box) { - + @@ -651,16 +651,16 @@ function handleTechniqueClick(box) { - + - + - + @@ -697,17 +697,17 @@ function handleTechniqueClick(box) { - + - + - + @@ -754,7 +754,7 @@ function handleTechniqueClick(box) { - + @@ -765,7 +765,7 @@ function handleTechniqueClick(box) { - + @@ -808,7 +808,7 @@ function handleTechniqueClick(box) { - + @@ -827,7 +827,7 @@ function handleTechniqueClick(box) { - + @@ -847,11 +847,11 @@ function handleTechniqueClick(box) { - + - + @@ -864,7 +864,7 @@ function handleTechniqueClick(box) { - + @@ -877,7 +877,7 @@ function handleTechniqueClick(box) { - + diff --git a/generated_files/red_framework_ids.csv b/generated_files/red_framework_ids.csv index a4fee48..e8111a3 100644 --- a/generated_files/red_framework_ids.csv +++ b/generated_files/red_framework_ids.csv @@ -1,7 +1,7 @@ P01,P01,P02,P02,P02,P03,P03,P03,P03,P04,P01,P02,P02,P02,P03,P03 TA01,TA02,TA05,TA06,TA07,TA08,TA09,TA10,TA11,TA12,TA13,TA14,TA15,TA16,TA17,TA18 T0073,T0002,T0016,T0015,T0029,T0020,T0114,T0017,T0059,T0132,T0072,T0003,T0007,T0009,T0049,T0047 -T0074,T0066,T0018,T0019,T0043,T0039 ,T0114.001,T0017.001,T0060,T0132.001,T0072.001,T0004,T0010,T0009.001,T0049.001,T0048 +T0074,T0066,T0018,T0019,T0043,T0039,T0114.001,T0017.001,T0060,T0132.001,T0072.001,T0004,T0010,T0009.001,T0049.001,T0048 ,T0075,T0101,T0019.001,T0043.001,T0042,T0114.002,T0057,T0128,T0132.002,T0072.002,T0022,T0013,T0011,T0049.002,T0048.001 ,T0075.001,T0102,T0019.002,T0043.002,T0044,T0115,T0057.001,T0128.001,T0132.003,T0072.003,T0022.001,T0014,T0097,T0049.003,T0048.002 ,T0076,T0102.001,T0023,T0103,T0045,T0115.001,T0057.002,T0128.002,T0133,T0072.004,T0022.002,T0014.001,T0097.001,T0049.004,T0048.003 diff --git a/generated_pages/actortypes/A001.md b/generated_pages/actortypes/A001.md index de657ab..bbd79c0 100644 --- a/generated_pages/actortypes/A001.md +++ b/generated_pages/actortypes/A001.md @@ -1,4 +1,4 @@ -# Actor A001: data scientist +# Actor A001: data scientist * **Summary:** Person who can wrangle data, implement machine learning algorithms etc diff --git a/generated_pages/actortypes/A003.md b/generated_pages/actortypes/A003.md index 9ba29cf..628f146 100644 --- a/generated_pages/actortypes/A003.md +++ b/generated_pages/actortypes/A003.md @@ -1,4 +1,4 @@ -# Actor A003: trusted authority +# Actor A003: trusted authority * **Summary:** Influencer diff --git a/generated_pages/actortypes/A010.md b/generated_pages/actortypes/A010.md index 2f7435c..8f751d8 100644 --- a/generated_pages/actortypes/A010.md +++ b/generated_pages/actortypes/A010.md @@ -1,4 +1,4 @@ -# Actor A010: religious organisation +# Actor A010: religious organisation * **Summary:** diff --git a/generated_pages/actortypes/A011.md b/generated_pages/actortypes/A011.md index 1408338..4cdaa0f 100644 --- a/generated_pages/actortypes/A011.md +++ b/generated_pages/actortypes/A011.md @@ -1,4 +1,4 @@ -# Actor A011: school +# Actor A011: school * **Summary:** diff --git a/generated_pages/actortypes/A012.md b/generated_pages/actortypes/A012.md index 607b9a3..2fda5b6 100644 --- a/generated_pages/actortypes/A012.md +++ b/generated_pages/actortypes/A012.md @@ -4,8 +4,7 @@ * **Sector:** S006 -* **Viewpoint:** FW01 -FW02 +* **Viewpoint:** FW01 FW02 | Counters | Response types | diff --git a/generated_pages/actortypes/A013.md b/generated_pages/actortypes/A013.md index 3e919a1..d80c45a 100644 --- a/generated_pages/actortypes/A013.md +++ b/generated_pages/actortypes/A013.md @@ -1,11 +1,10 @@ -# Actor A013: content creator +# Actor A013: content creator * **Summary:** * **Sector:** S006 -* **Viewpoint:** FW01 -FW02 +* **Viewpoint:** FW01 FW02 | Counters | Response types | diff --git a/generated_pages/actortypes/A016.md b/generated_pages/actortypes/A016.md index 96a74f1..bfac5e7 100644 --- a/generated_pages/actortypes/A016.md +++ b/generated_pages/actortypes/A016.md @@ -4,8 +4,7 @@ * **Sector:** S006 -* **Viewpoint:** FW01 -FW02 +* **Viewpoint:** FW01 FW02 | Counters | Response types | diff --git a/generated_pages/actortypes/A018.md b/generated_pages/actortypes/A018.md index 9bda91f..7c5ce57 100644 --- a/generated_pages/actortypes/A018.md +++ b/generated_pages/actortypes/A018.md @@ -1,17 +1,16 @@ -# Actor A018: government +# Actor A018: government * **Summary:** Government agencies * **Sector:** S003 -* **Viewpoint:** FW01 -FW02 +* **Viewpoint:** FW01 FW02 | Counters | Response types | | -------- | -------------- | | [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00156 Better tell your country or organization story](../../generated_pages/counters/C00156.md) | D03 | +| [C00156 Better tell your country or organisation story](../../generated_pages/counters/C00156.md) | D03 | | [C00203 Stop offering press credentials to propaganda outlets](../../generated_pages/counters/C00203.md) | D03 | | [C00205 strong dialogue between the federal government and private sector to encourage better reporting](../../generated_pages/counters/C00205.md) | D03 | | [C00207 Run a competing disinformation campaign - not recommended](../../generated_pages/counters/C00207.md) | D07 | diff --git a/generated_pages/actortypes/A019.md b/generated_pages/actortypes/A019.md index 71f59c4..6ce7431 100644 --- a/generated_pages/actortypes/A019.md +++ b/generated_pages/actortypes/A019.md @@ -1,4 +1,4 @@ -# Actor A019: military +# Actor A019: military * **Summary:** @@ -9,7 +9,7 @@ | Counters | Response types | | -------- | -------------- | -| [C00156 Better tell your country or organization story](../../generated_pages/counters/C00156.md) | D03 | +| [C00156 Better tell your country or organisation story](../../generated_pages/counters/C00156.md) | D03 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/actortypes/A021.md b/generated_pages/actortypes/A021.md index 3b263a4..ade1a95 100644 --- a/generated_pages/actortypes/A021.md +++ b/generated_pages/actortypes/A021.md @@ -4,8 +4,7 @@ * **Sector:** S010 -* **Viewpoint:** FW01 -FW02 +* **Viewpoint:** FW01 FW02 | Counters | Response types | diff --git a/generated_pages/actortypes/A029.md b/generated_pages/actortypes/A029.md index 037ffe3..9d318e5 100644 --- a/generated_pages/actortypes/A029.md +++ b/generated_pages/actortypes/A029.md @@ -1,4 +1,4 @@ -# Actor A029: server admininistrator +# Actor A029: server admininistrator * **Summary:** diff --git a/generated_pages/actortypes/A030.md b/generated_pages/actortypes/A030.md index 317b17c..c7dc29a 100644 --- a/generated_pages/actortypes/A030.md +++ b/generated_pages/actortypes/A030.md @@ -1,4 +1,4 @@ -# Actor A030: platforms +# Actor A030: platforms * **Summary:** diff --git a/generated_pages/actortypes/A031.md b/generated_pages/actortypes/A031.md index 509e952..a23f3e5 100644 --- a/generated_pages/actortypes/A031.md +++ b/generated_pages/actortypes/A031.md @@ -1,6 +1,6 @@ # Actor A031: social media platform adminstrator -* **Summary:** Person with the authority to make changes to algorithms, take down content etc. +* **Summary:** Person with the authority to make changes to algorithms, take down content etc. * **Sector:** S007 diff --git a/generated_pages/actortypes/A032.md b/generated_pages/actortypes/A032.md index 283e460..cdee9a3 100644 --- a/generated_pages/actortypes/A032.md +++ b/generated_pages/actortypes/A032.md @@ -1,4 +1,4 @@ -# Actor A032: social media platform outreach +# Actor A032: social media platform outreach * **Summary:** diff --git a/generated_pages/actortypes_index.md b/generated_pages/actortypes_index.md index 5f3f4bb..48e004f 100644 --- a/generated_pages/actortypes_index.md +++ b/generated_pages/actortypes_index.md @@ -9,7 +9,7 @@ A001 -data scientist +data scientist Person who can wrangle data, implement machine learning algorithms etc S001, S002, S003, S004, S005, S006, S007, S008, S009, S010 @@ -21,7 +21,7 @@ A003 -trusted authority +trusted authority Influencer S001, S002, S003, S004, S005, S006, S007, S008, S009, S010 @@ -63,13 +63,13 @@ A010 -religious organisation +religious organisation S002 A011 -school +school S002 @@ -81,7 +81,7 @@ A013 -content creator +content creator S006 @@ -111,13 +111,13 @@ A018 -government +government Government agencies S003 A019 -military +military S003 @@ -177,25 +177,25 @@ A029 -server admininistrator +server admininistrator S008 A030 -platforms +platforms S007 A031 social media platform adminstrator -Person with the authority to make changes to algorithms, take down content etc. +Person with the authority to make changes to algorithms, take down content etc. S007 A032 -social media platform outreach +social media platform outreach S007 diff --git a/generated_pages/counters/C00006.md b/generated_pages/counters/C00006.md index cb9aca9..690690a 100644 --- a/generated_pages/counters/C00006.md +++ b/generated_pages/counters/C00006.md @@ -1,10 +1,10 @@ # Counter C00006: Charge for social media -* **Summary**: Include a paid-for privacy option, e.g. pay Facebook for an option of them not collecting your personal information. There are examples of this not working, e.g. most people don’t use proton mail etc. +* **Summary**: Include a paid-for privacy option, e.g. pay Facebook for an option of them not collecting your personal information. There are examples of this not working, e.g. most people don’t use proton mail etc. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** @@ -25,9 +25,6 @@ | Counters these Techniques | | ------------------------- | | [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0015 Create Hashtags and Search Artifacts](../../generated_pages/techniques/T0015.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0043 Chat Apps](../../generated_pages/techniques/T0043.md) | diff --git a/generated_pages/counters/C00008.md b/generated_pages/counters/C00008.md index facaf2d..8c42c8b 100644 --- a/generated_pages/counters/C00008.md +++ b/generated_pages/counters/C00008.md @@ -1,10 +1,10 @@ # Counter C00008: Create shared fact-checking database -* **Summary**: Share fact-checking resources - tips, responses, countermessages, across respose groups. +* **Summary**: Share fact-checking resources - tips, responses, countermessages, across respose groups. * **Playbooks**: -* **Metatechnique**: M006 - scoring +* **Metatechnique**: M006 - Scoring * **Resources needed:** @@ -24,13 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0013 Create Inauthentic Websites](../../generated_pages/techniques/T0013.md) | -| [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | diff --git a/generated_pages/counters/C00009.md b/generated_pages/counters/C00009.md index 3dcadbc..ac7b669 100644 --- a/generated_pages/counters/C00009.md +++ b/generated_pages/counters/C00009.md @@ -1,10 +1,10 @@ # Counter C00009: Educate high profile influencers on best practices -* **Summary**: Find online influencers. Provide training in the mechanisms of disinformation, how to spot campaigns, and/or how to contribute to responses by countermessaging, boosting information sites etc. +* **Summary**: Find online influencers. Provide training in the mechanisms of disinformation, how to spot campaigns, and/or how to contribute to responses by countermessaging, boosting information sites etc. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -26,9 +26,6 @@ | Counters these Techniques | | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0042 Seed Kernel of Truth](../../generated_pages/techniques/T0042.md) | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | diff --git a/generated_pages/counters/C00010.md b/generated_pages/counters/C00010.md index a55d317..4dc800f 100644 --- a/generated_pages/counters/C00010.md +++ b/generated_pages/counters/C00010.md @@ -1,10 +1,10 @@ # Counter C00010: Enhanced privacy regulation for social media -* **Summary**: Implement stronger privacy standards, to reduce the ability to microtarget community members. +* **Summary**: Implement stronger privacy standards, to reduce the ability to microtarget community members. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | diff --git a/generated_pages/counters/C00011.md b/generated_pages/counters/C00011.md index 6b9fca8..389d296 100644 --- a/generated_pages/counters/C00011.md +++ b/generated_pages/counters/C00011.md @@ -1,10 +1,10 @@ # Counter C00011: Media literacy. Games to identify fake news -* **Summary**: Create and use games to show people the mechanics of disinformation, and how to counter them. +* **Summary**: Create and use games to show people the mechanics of disinformation, and how to counter them. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -26,12 +26,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0013 Create Inauthentic Websites](../../generated_pages/techniques/T0013.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | diff --git a/generated_pages/counters/C00012.md b/generated_pages/counters/C00012.md index 6412ade..aed4904 100644 --- a/generated_pages/counters/C00012.md +++ b/generated_pages/counters/C00012.md @@ -1,10 +1,10 @@ # Counter C00012: Platform regulation -* **Summary**: Empower existing regulators to govern social media. Also covers Destroy. Includes: Include the role of social media in the regulatory framework for media. The U.S. approach will need to be carefully crafted to protect First Amendment principles, create needed transparency, ensure liability, and impose costs for noncompliance. Includes Create policy that makes social media police disinformation. Includes: Use fraud legislation to clean up social media +* **Summary**: Empower existing regulators to govern social media. Also covers Destroy. Includes: Include the role of social media in the regulatory framework for media. The U.S. approach will need to be carefully crafted to protect First Amendment principles, create needed transparency, ensure liability, and impose costs for noncompliance. Includes Create policy that makes social media police disinformation. Includes: Use fraud legislation to clean up social media * **Playbooks**: -* **Metatechnique**: M007 - metatechnique +* **Metatechnique**: M007 - Metatechnique * **Resources needed:** @@ -13,7 +13,7 @@ | Actor types | Sectors | | ----------- | ------- | -| [A018 government ](../../generated_pages/actortypes/A018.md) | S003 | +| [A018 government](../../generated_pages/actortypes/A018.md) | S003 | | [A020 policy maker](../../generated_pages/actortypes/A020.md) | S003 | | [A033 social media platform owner](../../generated_pages/actortypes/A033.md) | S007 | @@ -26,25 +26,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | -| [T0047 Censor Social Media as a Political Force](../../generated_pages/techniques/T0047.md) | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0043 Chat Apps](../../generated_pages/techniques/T0043.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | -| [T0015 Create Hashtags and Search Artifacts](../../generated_pages/techniques/T0015.md) | -| [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0013 Create Inauthentic Websites](../../generated_pages/techniques/T0013.md) | -| [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | diff --git a/generated_pages/counters/C00013.md b/generated_pages/counters/C00013.md index 1a215d9..7cd915f 100644 --- a/generated_pages/counters/C00013.md +++ b/generated_pages/counters/C00013.md @@ -1,10 +1,10 @@ # Counter C00013: Rating framework for news -* **Summary**: This is "strategic innoculation", raising the standards of what people expect in terms of evidence when consuming news. Example: journalistic ethics, or journalistic licensing body. Include full transcripts, link source, add items. +* **Summary**: This is "strategic innoculation", raising the standards of what people expect in terms of evidence when consuming news. Example: journalistic ethics, or journalistic licencing body. Include full transcripts, link source, add items. * **Playbooks**: -* **Metatechnique**: M006 - scoring +* **Metatechnique**: M006 - Scoring * **Resources needed:** @@ -23,8 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | diff --git a/generated_pages/counters/C00014.md b/generated_pages/counters/C00014.md index b7b719d..ad1ddd8 100644 --- a/generated_pages/counters/C00014.md +++ b/generated_pages/counters/C00014.md @@ -1,10 +1,10 @@ # Counter C00014: Real-time updates to fact-checking database -* **Summary**: Update fact-checking databases and resources in real time. Especially import for time-limited events like natural disasters. +* **Summary**: Update fact-checking databases and resources in real time. Especially import for time-limited events like natural disasters. * **Playbooks**: -* **Metatechnique**: M006 - scoring +* **Metatechnique**: M006 - Scoring * **Resources needed:** @@ -24,13 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0013 Create Inauthentic Websites](../../generated_pages/techniques/T0013.md) | -| [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | diff --git a/generated_pages/counters/C00016.md b/generated_pages/counters/C00016.md index 806db70..28ebf15 100644 --- a/generated_pages/counters/C00016.md +++ b/generated_pages/counters/C00016.md @@ -1,10 +1,10 @@ # Counter C00016: Censorship -* **Summary**: Alter and/or block the publication/dissemination of information controlled by disinformation creators. Not recommended. +* **Summary**: Alter and/or block the publication/dissemination of information controlled by disinformation creators. Not recommended. * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** @@ -24,22 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0013 Create Inauthentic Websites](../../generated_pages/techniques/T0013.md) | -| [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0015 Create Hashtags and Search Artifacts](../../generated_pages/techniques/T0015.md) | -| [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0043 Chat Apps](../../generated_pages/techniques/T0043.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | diff --git a/generated_pages/counters/C00017.md b/generated_pages/counters/C00017.md index 05ce1c1..6865c8c 100644 --- a/generated_pages/counters/C00017.md +++ b/generated_pages/counters/C00017.md @@ -1,10 +1,10 @@ # Counter C00017: Repair broken social connections -* **Summary**: For example, use a media campaign to promote in-group to out-group in person communication / activities . Technique could be in terms of forcing a reality-check by talking to people instead of reading about bogeymen. +* **Summary**: For example, use a media campaign to promote in-group to out-group in person communication / activities . Technique could be in terms of forcing a reality-check by talking to people instead of reading about bogeymen. * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** @@ -24,10 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | diff --git a/generated_pages/counters/C00019.md b/generated_pages/counters/C00019.md index 212c4de..6586a4c 100644 --- a/generated_pages/counters/C00019.md +++ b/generated_pages/counters/C00019.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** @@ -23,11 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | diff --git a/generated_pages/counters/C00021.md b/generated_pages/counters/C00021.md index 20feff1..befa888 100644 --- a/generated_pages/counters/C00021.md +++ b/generated_pages/counters/C00021.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -23,11 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | diff --git a/generated_pages/counters/C00022.md b/generated_pages/counters/C00022.md index 9ea0fc2..05b7d5a 100644 --- a/generated_pages/counters/C00022.md +++ b/generated_pages/counters/C00022.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -23,11 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | diff --git a/generated_pages/counters/C00024.md b/generated_pages/counters/C00024.md index 605da21..56dcd8c 100644 --- a/generated_pages/counters/C00024.md +++ b/generated_pages/counters/C00024.md @@ -1,10 +1,10 @@ # Counter C00024: Promote healthy narratives -* **Summary**: Includes promoting constructive narratives i.e. not polarising (e.g. pro-life, pro-choice, pro-USA). Includes promoting identity neutral narratives. +* **Summary**: Includes promoting constructive narratives i.e. not polarising (e.g. pro-life, pro-choice, pro-USA). Includes promoting identity neutral narratives. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -23,11 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | diff --git a/generated_pages/counters/C00026.md b/generated_pages/counters/C00026.md index 3658e2d..662448d 100644 --- a/generated_pages/counters/C00026.md +++ b/generated_pages/counters/C00026.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** @@ -23,7 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | diff --git a/generated_pages/counters/C00027.md b/generated_pages/counters/C00027.md index 3edef8e..ed70bf1 100644 --- a/generated_pages/counters/C00027.md +++ b/generated_pages/counters/C00027.md @@ -1,10 +1,10 @@ # Counter C00027: Create culture of civility -* **Summary**: This is passive. Includes promoting civility as an identity that people will defend. +* **Summary**: This is passive. Includes promoting civility as an identity that people will defend. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -23,12 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | diff --git a/generated_pages/counters/C00028.md b/generated_pages/counters/C00028.md index 8bcd1c2..08946fc 100644 --- a/generated_pages/counters/C00028.md +++ b/generated_pages/counters/C00028.md @@ -1,12 +1,10 @@ # Counter C00028: Make information provenance available -* **Summary**: Blockchain audit log and validation with collaborative decryption to post comments. Use blockchain technology to require collaborative validation before posts or comments are submitted. - -This could be used to adjust upvote weight via a trust factor of people and organisations you trust, or other criteria. +* **Summary**: Blockchain audit log and validation with collaborative decryption to post comments. Use blockchain technology to require collaborative validation before posts or comments are submitted. This could be used to adjust upvote weight via a trust factor of people and organisations you trust, or other criteria. * **Playbooks**: -* **Metatechnique**: M011 - verification +* **Metatechnique**: M011 - Verification * **Resources needed:** diff --git a/generated_pages/counters/C00029.md b/generated_pages/counters/C00029.md index 2882a73..c9da168 100644 --- a/generated_pages/counters/C00029.md +++ b/generated_pages/counters/C00029.md @@ -1,10 +1,10 @@ # Counter C00029: Create fake website to issue counter narrative and counter narrative through physical merchandise -* **Summary**: Create websites in disinformation voids - spaces where people are looking for known disinformation. +* **Summary**: Create websites in disinformation voids - spaces where people are looking for known disinformation. * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** @@ -24,8 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | diff --git a/generated_pages/counters/C00030.md b/generated_pages/counters/C00030.md index 8748091..e3fa1b2 100644 --- a/generated_pages/counters/C00030.md +++ b/generated_pages/counters/C00030.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** @@ -24,8 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | diff --git a/generated_pages/counters/C00031.md b/generated_pages/counters/C00031.md index 2bbc8c1..43c6d45 100644 --- a/generated_pages/counters/C00031.md +++ b/generated_pages/counters/C00031.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M009 - dilution +* **Metatechnique**: M009 - Dilution * **Resources needed:** @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | diff --git a/generated_pages/counters/C00032.md b/generated_pages/counters/C00032.md index 99c05ab..1d025d0 100644 --- a/generated_pages/counters/C00032.md +++ b/generated_pages/counters/C00032.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** diff --git a/generated_pages/counters/C00034.md b/generated_pages/counters/C00034.md index 5303035..5486c55 100644 --- a/generated_pages/counters/C00034.md +++ b/generated_pages/counters/C00034.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | diff --git a/generated_pages/counters/C00036.md b/generated_pages/counters/C00036.md index 3916fa2..f061307 100644 --- a/generated_pages/counters/C00036.md +++ b/generated_pages/counters/C00036.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M013 - targeting +* **Metatechnique**: M013 - Targeting * **Resources needed:** @@ -23,10 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | diff --git a/generated_pages/counters/C00040.md b/generated_pages/counters/C00040.md index 59c6979..22906a2 100644 --- a/generated_pages/counters/C00040.md +++ b/generated_pages/counters/C00040.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M011 - verification +* **Metatechnique**: M011 - Verification * **Resources needed:** @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | diff --git a/generated_pages/counters/C00042.md b/generated_pages/counters/C00042.md index 49637ea..f5dc5ae 100644 --- a/generated_pages/counters/C00042.md +++ b/generated_pages/counters/C00042.md @@ -1,10 +1,10 @@ # Counter C00042: Address truth contained in narratives -* **Summary**: Focus on and boost truths in misinformation narratives, removing misinformation from them. +* **Summary**: Focus on and boost truths in misinformation narratives, removing misinformation from them. * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** @@ -24,11 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | -| [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0042 Seed Kernel of Truth](../../generated_pages/techniques/T0042.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0059 Play the Long Game](../../generated_pages/techniques/T0059.md) | diff --git a/generated_pages/counters/C00044.md b/generated_pages/counters/C00044.md index 439548c..4290b78 100644 --- a/generated_pages/counters/C00044.md +++ b/generated_pages/counters/C00044.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** R004 - platform algorithms @@ -25,7 +25,6 @@ | Counters these Techniques | | ------------------------- | | [T0029 Online Polls](../../generated_pages/techniques/T0029.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | diff --git a/generated_pages/counters/C00046.md b/generated_pages/counters/C00046.md index 7f56f33..a25ee0d 100644 --- a/generated_pages/counters/C00046.md +++ b/generated_pages/counters/C00046.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M013 - targeting +* **Metatechnique**: M013 - Targeting * **Resources needed:** @@ -24,14 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | -| [T0060 Continue to Amplify](../../generated_pages/techniques/T0060.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | diff --git a/generated_pages/counters/C00047.md b/generated_pages/counters/C00047.md index 638d42a..a21481f 100644 --- a/generated_pages/counters/C00047.md +++ b/generated_pages/counters/C00047.md @@ -1,10 +1,10 @@ # Counter C00047: Honeypot with coordinated inauthentics -* **Summary**: Flood disinformation spaces with obviously fake content, to dilute core misinformation narratives in them. +* **Summary**: Flood disinformation spaces with obviously fake content, to dilute core misinformation narratives in them. * **Playbooks**: -* **Metatechnique**: M008 - data pollution +* **Metatechnique**: M008 - Data Pollution * **Resources needed:** diff --git a/generated_pages/counters/C00048.md b/generated_pages/counters/C00048.md index 9b91119..90fd526 100644 --- a/generated_pages/counters/C00048.md +++ b/generated_pages/counters/C00048.md @@ -1,10 +1,10 @@ # Counter C00048: Name and Shame Influencers -* **Summary**: Think about the different levels: individual vs state-sponsored account. Includes “call them out” and “name and shame”. Identify social media accounts as sources of propaganda—“calling them out”— might be helpful to prevent the spread of their message to audiences that otherwise would consider them factual. Identify, monitor, and, if necessary, target externally-based nonattributed social media accounts. Impact of and Dealing with Trolls - "Chatham House has observed that trolls also sometimes function as decoys, as a way of “keeping the infantry busy” that “aims to wear down the other side” (Lough et al., 2014). Another type of troll involves “false accounts posing as authoritative information sources on social media”. +* **Summary**: Think about the different levels: individual vs state-sponsored account. Includes “call them out” and “name and shame”. Identify social media accounts as sources of propaganda—“calling them out”— might be helpful to prevent the spread of their message to audiences that otherwise would consider them factual. Identify, monitor, and, if necessary, target externally-based nonattributed social media accounts. Impact of and Dealing with Trolls - "Chatham House has observed that trolls also sometimes function as decoys, as a way of “keeping the infantry busy” that “aims to wear down the other side” (Lough et al., 2014). Another type of troll involves “false accounts posing as authoritative information sources on social media”. * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** @@ -24,11 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | -| [T0060 Continue to Amplify](../../generated_pages/techniques/T0060.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | diff --git a/generated_pages/counters/C00051.md b/generated_pages/counters/C00051.md index ed069ec..f7fab44 100644 --- a/generated_pages/counters/C00051.md +++ b/generated_pages/counters/C00051.md @@ -1,10 +1,10 @@ # Counter C00051: Counter social engineering training -* **Summary**: Includes anti-elicitation training, phishing prevention education. +* **Summary**: Includes anti-elicitation training, phishing prevention education. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** diff --git a/generated_pages/counters/C00052.md b/generated_pages/counters/C00052.md index 42b379c..12d1636 100644 --- a/generated_pages/counters/C00052.md +++ b/generated_pages/counters/C00052.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M013 - targeting +* **Metatechnique**: M013 - Targeting * **Resources needed:** diff --git a/generated_pages/counters/C00053.md b/generated_pages/counters/C00053.md index 249b9a5..b2bdef6 100644 --- a/generated_pages/counters/C00053.md +++ b/generated_pages/counters/C00053.md @@ -1,10 +1,10 @@ # Counter C00053: Delete old accounts / Remove unused social media accounts -* **Summary**: remove or remove access to (e.g. stop the ability to update) old social media accounts, to reduce the pool of accounts available for takeover, botnets etc. +* **Summary**: remove or remove access to (e.g. stop the ability to update) old social media accounts, to reduce the pool of accounts available for takeover, botnets etc. * **Playbooks**: -* **Metatechnique**: M012 - cleaning +* **Metatechnique**: M012 - Cleaning * **Resources needed:** diff --git a/generated_pages/counters/C00056.md b/generated_pages/counters/C00056.md index 624a4b1..8a27c63 100644 --- a/generated_pages/counters/C00056.md +++ b/generated_pages/counters/C00056.md @@ -1,10 +1,10 @@ # Counter C00056: Encourage people to leave social media -* **Summary**: Encourage people to leave spcial media. We don't expect this to work +* **Summary**: Encourage people to leave spcial media. We don't expect this to work * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** diff --git a/generated_pages/counters/C00058.md b/generated_pages/counters/C00058.md index dc707fd..8a27998 100644 --- a/generated_pages/counters/C00058.md +++ b/generated_pages/counters/C00058.md @@ -1,10 +1,10 @@ # Counter C00058: Report crowdfunder as violator -* **Summary**: counters crowdfunding. Includes ‘Expose online funding as fake”. +* **Summary**: counters crowdfunding. Includes ‘Expose online funding as fake”. * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | diff --git a/generated_pages/counters/C00059.md b/generated_pages/counters/C00059.md index 05dd633..6486d18 100644 --- a/generated_pages/counters/C00059.md +++ b/generated_pages/counters/C00059.md @@ -1,10 +1,10 @@ # Counter C00059: Verification of project before posting fund requests -* **Summary**: third-party verification of projects posting funding campaigns before those campaigns can be posted. +* **Summary**: third-party verification of projects posting funding campaigns before those campaigns can be posted. * **Playbooks**: -* **Metatechnique**: M011 - verification +* **Metatechnique**: M011 - Verification * **Resources needed:** diff --git a/generated_pages/counters/C00060.md b/generated_pages/counters/C00060.md index 7cd1ca0..161900b 100644 --- a/generated_pages/counters/C00060.md +++ b/generated_pages/counters/C00060.md @@ -1,10 +1,10 @@ # Counter C00060: Legal action against for-profit engagement factories -* **Summary**: Take legal action against for-profit "factories" creating misinformation. +* **Summary**: Take legal action against for-profit "factories" creating misinformation. * **Playbooks**: -* **Metatechnique**: M013 - targeting +* **Metatechnique**: M013 - Targeting * **Resources needed:** @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0047 Censor Social Media as a Political Force](../../generated_pages/techniques/T0047.md) | diff --git a/generated_pages/counters/C00062.md b/generated_pages/counters/C00062.md index e297510..072f032 100644 --- a/generated_pages/counters/C00062.md +++ b/generated_pages/counters/C00062.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** diff --git a/generated_pages/counters/C00065.md b/generated_pages/counters/C00065.md index cd475cf..adc9527 100644 --- a/generated_pages/counters/C00065.md +++ b/generated_pages/counters/C00065.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** diff --git a/generated_pages/counters/C00066.md b/generated_pages/counters/C00066.md index ee895fe..4c5cf0f 100644 --- a/generated_pages/counters/C00066.md +++ b/generated_pages/counters/C00066.md @@ -1,10 +1,10 @@ # Counter C00066: Co-opt a hashtag and drown it out (hijack it back) -* **Summary**: Flood a disinformation-related hashtag with other content. +* **Summary**: Flood a disinformation-related hashtag with other content. * **Playbooks**: -* **Metatechnique**: M009 - dilution +* **Metatechnique**: M009 - Dilution * **Resources needed:** @@ -23,7 +23,7 @@ | Counters these Techniques | | ------------------------- | -| [T0015 Create Hashtags and Search Artifacts](../../generated_pages/techniques/T0015.md) | +| [T0015 Create Hashtags and Search Artefacts](../../generated_pages/techniques/T0015.md) | diff --git a/generated_pages/counters/C00067.md b/generated_pages/counters/C00067.md index 163c043..50bb312 100644 --- a/generated_pages/counters/C00067.md +++ b/generated_pages/counters/C00067.md @@ -1,10 +1,10 @@ # Counter C00067: Denigrate the recipient/ project (of online funding) -* **Summary**: Reduce the credibility of groups behind misinformation-linked funding campaigns. +* **Summary**: Reduce the credibility of groups behind misinformation-linked funding campaigns. * **Playbooks**: -* **Metatechnique**: M013 - targeting +* **Metatechnique**: M013 - Targeting * **Resources needed:** @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | diff --git a/generated_pages/counters/C00070.md b/generated_pages/counters/C00070.md index dc600d5..d3db622 100644 --- a/generated_pages/counters/C00070.md +++ b/generated_pages/counters/C00070.md @@ -1,18 +1,10 @@ # Counter C00070: Block access to disinformation resources -* **Summary**: Resources = accounts, channels etc. Block access to platform. DDOS an attacker. - -TA02*: DDOS at the critical time, to deny an adversary's time-bound objective. - -T0008: A quick response to a proto-viral story will affect it's ability to spread and raise questions about their legitimacy. - -Hashtag: Against the platform, by drowning the hashtag. - -T0046 - Search Engine Optimization: Sub-optimal website performance affect its search engine rank, which I interpret as "blocking access to a platform". +* **Summary**: Resources = accounts, channels etc. Block access to platform. DDOS an attacker. TA02*: DDOS at the critical time, to deny an adversary's time-bound objective. T0008: A quick response to a proto-viral story will affect it's ability to spread and raise questions about their legitimacy. Hashtag: Against the platform, by drowning the hashtag. T0046 - Search Engine Optimisation: Sub-optimal website performance affect its search engine rank, which I interpret as "blocking access to a platform". * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** @@ -31,11 +23,6 @@ T0046 - Search Engine Optimization: Sub-optimal website performance affect its s | Counters these Techniques | | ------------------------- | -| [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0015 Create Hashtags and Search Artifacts](../../generated_pages/techniques/T0015.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | diff --git a/generated_pages/counters/C00071.md b/generated_pages/counters/C00071.md index 85fcc02..4a05f34 100644 --- a/generated_pages/counters/C00071.md +++ b/generated_pages/counters/C00071.md @@ -1,10 +1,10 @@ # Counter C00071: Block source of pollution -* **Summary**: Block websites, accounts, groups etc connected to misinformation and other information pollution. +* **Summary**: Block websites, accounts, groups etc connected to misinformation and other information pollution. * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** diff --git a/generated_pages/counters/C00072.md b/generated_pages/counters/C00072.md index 91cc9fe..03c5b07 100644 --- a/generated_pages/counters/C00072.md +++ b/generated_pages/counters/C00072.md @@ -1,10 +1,10 @@ # Counter C00072: Remove non-relevant content from special interest groups - not recommended -* **Summary**: Check special-interest groups (e.g. medical, knitting) for unrelated and misinformation-linked content, and remove it. +* **Summary**: Check special-interest groups (e.g. medical, knitting) for unrelated and misinformation-linked content, and remove it. * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** @@ -24,11 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | diff --git a/generated_pages/counters/C00073.md b/generated_pages/counters/C00073.md index 5911ab8..53f6c82 100644 --- a/generated_pages/counters/C00073.md +++ b/generated_pages/counters/C00073.md @@ -1,10 +1,10 @@ # Counter C00073: Inoculate populations through media literacy training -* **Summary**: Use training to build the resilience of at-risk populations. Educate on how to handle info pollution. Push out targeted education on why it's pollution. Build cultural resistance to false content, e.g. cultural resistance to bullshit. Influence literacy training, to inoculate against “cult” recruiting. Media literacy training: leverage librarians / library for media literacy training. Inoculate at language. Strategic planning included as inoculating population has strategic value. Concepts of media literacy to a mass audience that authorities launch a public information campaign that teaches the program will take time to develop and establish impact, recommends curriculum-based training. Covers detect, deny, and degrade. +* **Summary**: Use training to build the resilience of at-risk populations. Educate on how to handle info pollution. Push out targeted education on why it's pollution. Build cultural resistance to false content, e.g. cultural resistance to bullshit. Influence literacy training, to inoculate against “cult” recruiting. Media literacy training: leverage librarians / library for media literacy training. Inoculate at language. Strategic planning included as inoculating population has strategic value. Concepts of media literacy to a mass audience that authorities launch a public information campaign that teaches the programme will take time to develop and establish impact, recommends curriculum-based training. Covers detect, deny, and degrade. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -17,10 +17,10 @@ | [A006 educator](../../generated_pages/actortypes/A006.md) | S002 | | [A008 library](../../generated_pages/actortypes/A008.md) | S002 | | [A009 NGO](../../generated_pages/actortypes/A009.md) | S002 | -| [A010 religious organisation ](../../generated_pages/actortypes/A010.md) | S002 | +| [A010 religious organisation](../../generated_pages/actortypes/A010.md) | S002 | | [A017 coordinating body](../../generated_pages/actortypes/A017.md) | S003 | | [A021 media organisation](../../generated_pages/actortypes/A021.md) | S010 | -| [A032 social media platform outreach ](../../generated_pages/actortypes/A032.md) | S007 | +| [A032 social media platform outreach](../../generated_pages/actortypes/A032.md) | S007 | @@ -32,15 +32,6 @@ | Counters these Techniques | | ------------------------- | | [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | -| [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | -| [T0040 Demand Insurmountable Proof](../../generated_pages/techniques/T0040.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | -| [T0060 Continue to Amplify](../../generated_pages/techniques/T0060.md) | diff --git a/generated_pages/counters/C00074.md b/generated_pages/counters/C00074.md index 8614dd3..50a408b 100644 --- a/generated_pages/counters/C00074.md +++ b/generated_pages/counters/C00074.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M012 - cleaning +* **Metatechnique**: M012 - Cleaning * **Resources needed:** @@ -26,12 +26,6 @@ | Counters these Techniques | | ------------------------- | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0043 Chat Apps](../../generated_pages/techniques/T0043.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | -| [T0060 Continue to Amplify](../../generated_pages/techniques/T0060.md) | diff --git a/generated_pages/counters/C00075.md b/generated_pages/counters/C00075.md index 7f57891..1d4d7db 100644 --- a/generated_pages/counters/C00075.md +++ b/generated_pages/counters/C00075.md @@ -1,10 +1,10 @@ # Counter C00075: normalise language -* **Summary**: normalise the language around disinformation and misinformation; give people the words for artifact and effect types. +* **Summary**: normalise the language around disinformation and misinformation; give people the words for artefact and effect types. * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** diff --git a/generated_pages/counters/C00076.md b/generated_pages/counters/C00076.md index 7493d83..560e786 100644 --- a/generated_pages/counters/C00076.md +++ b/generated_pages/counters/C00076.md @@ -1,10 +1,10 @@ # Counter C00076: Prohibit images in political discourse channels -* **Summary**: Make political discussion channels text-only. +* **Summary**: Make political discussion channels text-only. * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** @@ -24,8 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | diff --git a/generated_pages/counters/C00077.md b/generated_pages/counters/C00077.md index c3d36ff..8a26dc8 100644 --- a/generated_pages/counters/C00077.md +++ b/generated_pages/counters/C00077.md @@ -1,10 +1,10 @@ # Counter C00077: Active defence: run TA15 "develop people” - not recommended -* **Summary**: Develop networks of communities and influencers around counter-misinformation. Match them to misinformation creators +* **Summary**: Develop networks of communities and influencers around counter-misinformation. Match them to misinformation creators * **Playbooks**: -* **Metatechnique**: M013 - targeting +* **Metatechnique**: M013 - Targeting * **Resources needed:** diff --git a/generated_pages/counters/C00078.md b/generated_pages/counters/C00078.md index 889d077..75f3c4c 100644 --- a/generated_pages/counters/C00078.md +++ b/generated_pages/counters/C00078.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** @@ -23,9 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | -| [T0060 Continue to Amplify](../../generated_pages/techniques/T0060.md) | diff --git a/generated_pages/counters/C00080.md b/generated_pages/counters/C00080.md index cd3693b..93ba828 100644 --- a/generated_pages/counters/C00080.md +++ b/generated_pages/counters/C00080.md @@ -1,10 +1,10 @@ # Counter C00080: Create competing narrative -* **Summary**: Create counternarratives, or narratives that compete in the same spaces as misinformation narratives. Could also be degrade +* **Summary**: Create counternarratives, or narratives that compete in the same spaces as misinformation narratives. Could also be degrade * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** @@ -24,8 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | diff --git a/generated_pages/counters/C00081.md b/generated_pages/counters/C00081.md index 6c77b2f..c8e7ab2 100644 --- a/generated_pages/counters/C00081.md +++ b/generated_pages/counters/C00081.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** @@ -24,10 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | diff --git a/generated_pages/counters/C00082.md b/generated_pages/counters/C00082.md index 36affe1..b197919 100644 --- a/generated_pages/counters/C00082.md +++ b/generated_pages/counters/C00082.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** @@ -24,11 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | diff --git a/generated_pages/counters/C00084.md b/generated_pages/counters/C00084.md index b9f272b..c4d18c0 100644 --- a/generated_pages/counters/C00084.md +++ b/generated_pages/counters/C00084.md @@ -1,10 +1,10 @@ # Counter C00084: Modify disinformation narratives, and rebroadcast them -* **Summary**: Includes “poison pill recasting of message” and “steal their truths”. Many techniques involve promotion which could be manipulated. For example, online fundings or rallies could be advertised, through compromised or fake channels, as being associated with "far-up/down/left/right" actors. "Long Game" narratives could be subjected in a similar way with negative connotations. Can also replay technique T0003. +* **Summary**: Includes “poison pill recasting of message” and “steal their truths”. Many techniques involve promotion which could be manipulated. For example, online fundings or rallies could be advertised, through compromised or fake channels, as being associated with "far-up/down/left/right" actors. "Long Game" narratives could be subjected in a similar way with negative connotations. Can also replay technique T0003. * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** @@ -24,10 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0059 Play the Long Game](../../generated_pages/techniques/T0059.md) | diff --git a/generated_pages/counters/C00085.md b/generated_pages/counters/C00085.md index 978e9cb..274c9bc 100644 --- a/generated_pages/counters/C00085.md +++ b/generated_pages/counters/C00085.md @@ -1,12 +1,10 @@ # Counter C00085: Mute content -* **Summary**: Rate-limit disinformation content. Reduces its effects, whilst not running afoul of censorship concerns. - -Online archives of content (archives of websites, social media profiles, media, copies of published advertisements; or archives of comments attributed to bad actors, as well as anonymized metadata about users who interacted with them and analysis of the effect) is useful for intelligence analysis and public transparency, but will need similar muting or tagging/ shaming as associated with bad actors. +* **Summary**: Rate-limit disinformation content. Reduces its effects, whilst not running afoul of censorship concerns. Online archives of content (archives of websites, social media profiles, media, copies of published advertisements; or archives of comments attributed to bad actors, as well as anonymized metadata about users who interacted with them and analysis of the effect) is useful for intelligence analysis and public transparency, but will need similar muting or tagging/ shaming as associated with bad actors. * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** @@ -25,15 +23,6 @@ Online archives of content (archives of websites, social media profiles, media, | Counters these Techniques | | ------------------------- | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | -| [T0060 Continue to Amplify](../../generated_pages/techniques/T0060.md) | diff --git a/generated_pages/counters/C00086.md b/generated_pages/counters/C00086.md index 131d267..2b45086 100644 --- a/generated_pages/counters/C00086.md +++ b/generated_pages/counters/C00086.md @@ -1,10 +1,10 @@ # Counter C00086: Distract from noise with addictive content -* **Summary**: Example: Interject addictive links or contents into discussions of disinformation materials and measure a "conversion rate" of users who engage with your content and away from the social media channel's "information bubble" around the disinformation item. Use bots to amplify and upvote the addictive content. +* **Summary**: Example: Interject addictive links or contents into discussions of disinformation materials and measure a "conversion rate" of users who engage with your content and away from the social media channel's "information bubble" around the disinformation item. Use bots to amplify and upvote the addictive content. * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | diff --git a/generated_pages/counters/C00087.md b/generated_pages/counters/C00087.md index 91d4652..ff486a9 100644 --- a/generated_pages/counters/C00087.md +++ b/generated_pages/counters/C00087.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M009 - dilution +* **Metatechnique**: M009 - Dilution * **Resources needed:** @@ -23,9 +23,7 @@ | Counters these Techniques | | ------------------------- | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | +| [T0039 Bait Legitimate Influencers](../../generated_pages/techniques/T0039.md) | diff --git a/generated_pages/counters/C00090.md b/generated_pages/counters/C00090.md index d6fde45..8f70469 100644 --- a/generated_pages/counters/C00090.md +++ b/generated_pages/counters/C00090.md @@ -1,10 +1,10 @@ # Counter C00090: Fake engagement system -* **Summary**: Create honeypots for misinformation creators to engage with, and reduce the resources they have available for misinformation campaigns. +* **Summary**: Create honeypots for misinformation creators to engage with, and reduce the resources they have available for misinformation campaigns. * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** diff --git a/generated_pages/counters/C00091.md b/generated_pages/counters/C00091.md index 83c6e1c..5c9c8b5 100644 --- a/generated_pages/counters/C00091.md +++ b/generated_pages/counters/C00091.md @@ -1,10 +1,10 @@ # Counter C00091: Honeypot social community -* **Summary**: Set honeypots, e.g. communities, in networks likely to be used for disinformation. +* **Summary**: Set honeypots, e.g. communities, in networks likely to be used for disinformation. * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** @@ -23,7 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | diff --git a/generated_pages/counters/C00092.md b/generated_pages/counters/C00092.md index 8b1c26c..5714342 100644 --- a/generated_pages/counters/C00092.md +++ b/generated_pages/counters/C00092.md @@ -1,10 +1,10 @@ # Counter C00092: Establish a truth teller reputation score for influencers -* **Summary**: Includes "Establish a truth teller reputation score for influencers” and “Reputation scores for social media users”. Influencers are individuals or accounts with many followers. +* **Summary**: Includes "Establish a truth teller reputation score for influencers” and “Reputation scores for social media users”. Influencers are individuals or accounts with many followers. * **Playbooks**: -* **Metatechnique**: M006 - scoring +* **Metatechnique**: M006 - Scoring * **Resources needed:** R001 - datastreams @@ -13,7 +13,7 @@ | Actor types | Sectors | | ----------- | ------- | -| [A001 data scientist ](../../generated_pages/actortypes/A001.md) | S001, S002, S003, S004, S005, S006, S007, S008, S009, S010 | +| [A001 data scientist](../../generated_pages/actortypes/A001.md) | S001, S002, S003, S004, S005, S006, S007, S008, S009, S010 | @@ -24,10 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | diff --git a/generated_pages/counters/C00093.md b/generated_pages/counters/C00093.md index 5fbf887..33d01af 100644 --- a/generated_pages/counters/C00093.md +++ b/generated_pages/counters/C00093.md @@ -1,10 +1,10 @@ # Counter C00093: Influencer code of conduct -* **Summary**: Establish tailored code of conduct for individuals with many followers. Can be platform code of conduct; can also be community code. +* **Summary**: Establish tailored code of conduct for individuals with many followers. Can be platform code of conduct; can also be community code. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -23,10 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0047 Censor Social Media as a Political Force](../../generated_pages/techniques/T0047.md) | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | diff --git a/generated_pages/counters/C00094.md b/generated_pages/counters/C00094.md index 34ff7c4..f486f1c 100644 --- a/generated_pages/counters/C00094.md +++ b/generated_pages/counters/C00094.md @@ -1,10 +1,10 @@ # Counter C00094: Force full disclosure on corporate sponsor of research -* **Summary**: Accountability move: make sure research is published with its funding sources. +* **Summary**: Accountability move: make sure research is published with its funding sources. * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** diff --git a/generated_pages/counters/C00096.md b/generated_pages/counters/C00096.md index e68b39a..6b124d9 100644 --- a/generated_pages/counters/C00096.md +++ b/generated_pages/counters/C00096.md @@ -1,10 +1,10 @@ # Counter C00096: Strengthen institutions that are always truth tellers -* **Summary**: Increase credibility, visibility, and reach of positive influencers in the information space. +* **Summary**: Increase credibility, visibility, and reach of positive influencers in the information space. * **Playbooks**: -* **Metatechnique**: M006 - scoring +* **Metatechnique**: M006 - Scoring * **Resources needed:** @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | diff --git a/generated_pages/counters/C00097.md b/generated_pages/counters/C00097.md index fb51ce0..c682585 100644 --- a/generated_pages/counters/C00097.md +++ b/generated_pages/counters/C00097.md @@ -1,10 +1,10 @@ # Counter C00097: Require use of verified identities to contribute to poll or comment -* **Summary**: Reduce poll flooding by online taking comments or poll entries from verified accounts. +* **Summary**: Reduce poll flooding by online taking comments or poll entries from verified accounts. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** R004 - platform algorithms @@ -25,9 +25,6 @@ | Counters these Techniques | | ------------------------- | | [T0029 Online Polls](../../generated_pages/techniques/T0029.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | diff --git a/generated_pages/counters/C00098.md b/generated_pages/counters/C00098.md index 1b5b94b..8a63784 100644 --- a/generated_pages/counters/C00098.md +++ b/generated_pages/counters/C00098.md @@ -1,10 +1,10 @@ # Counter C00098: Revocation of allowlisted or "verified" status -* **Summary**: remove blue checkmarks etc from known misinformation accounts. +* **Summary**: remove blue checkmarks etc from known misinformation accounts. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0011 Compromise Legitimate Accounts](../../generated_pages/techniques/T0011.md) | diff --git a/generated_pages/counters/C00099.md b/generated_pages/counters/C00099.md index 38c4ed4..209c6ab 100644 --- a/generated_pages/counters/C00099.md +++ b/generated_pages/counters/C00099.md @@ -1,10 +1,10 @@ # Counter C00099: Strengthen verification methods -* **Summary**: Improve content veerification methods available to groups, individuals etc. +* **Summary**: Improve content veerification methods available to groups, individuals etc. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** R004 - platform algorithms @@ -24,9 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | diff --git a/generated_pages/counters/C00100.md b/generated_pages/counters/C00100.md index d6e1893..684f533 100644 --- a/generated_pages/counters/C00100.md +++ b/generated_pages/counters/C00100.md @@ -1,10 +1,10 @@ # Counter C00100: Hashtag jacking -* **Summary**: Post large volumes of unrelated content on known misinformation hashtags +* **Summary**: Post large volumes of unrelated content on known misinformation hashtags * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** diff --git a/generated_pages/counters/C00101.md b/generated_pages/counters/C00101.md index 04f3b78..1308899 100644 --- a/generated_pages/counters/C00101.md +++ b/generated_pages/counters/C00101.md @@ -1,10 +1,10 @@ # Counter C00101: Create friction by rate-limiting engagement -* **Summary**: Create participant friction. Includes Make repeat voting hard, and throttle number of forwards. +* **Summary**: Create participant friction. Includes Make repeat voting hard, and throttle number of forwards. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** R004 - platform algorithms @@ -25,7 +25,6 @@ | Counters these Techniques | | ------------------------- | | [T0029 Online Polls](../../generated_pages/techniques/T0029.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | diff --git a/generated_pages/counters/C00103.md b/generated_pages/counters/C00103.md index bb292a3..4102be1 100644 --- a/generated_pages/counters/C00103.md +++ b/generated_pages/counters/C00103.md @@ -1,10 +1,10 @@ # Counter C00103: Create a bot that engages / distract trolls -* **Summary**: This is reactive, not active measure (honeypots are active). It's a platform controlled measure. +* **Summary**: This is reactive, not active measure (honeypots are active). It's a platform controlled measure. * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** diff --git a/generated_pages/counters/C00105.md b/generated_pages/counters/C00105.md index 5e3e583..d71480a 100644 --- a/generated_pages/counters/C00105.md +++ b/generated_pages/counters/C00105.md @@ -1,10 +1,10 @@ # Counter C00105: Buy more advertising than misinformation creators -* **Summary**: Shift influence and algorithms by posting more adverts into spaces than misinformation creators. +* **Summary**: Shift influence and algorithms by posting more adverts into spaces than misinformation creators. * **Playbooks**: -* **Metatechnique**: M009 - dilution +* **Metatechnique**: M009 - Dilution * **Resources needed:** R003 - money @@ -25,7 +25,6 @@ | Counters these Techniques | | ------------------------- | | [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | diff --git a/generated_pages/counters/C00106.md b/generated_pages/counters/C00106.md index c96ffea..fee380f 100644 --- a/generated_pages/counters/C00106.md +++ b/generated_pages/counters/C00106.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** diff --git a/generated_pages/counters/C00107.md b/generated_pages/counters/C00107.md index 43b9fb3..fb3dc56 100644 --- a/generated_pages/counters/C00107.md +++ b/generated_pages/counters/C00107.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M006 - scoring, M005 - removal +* **Metatechnique**: M006 - Scoring, M005 - Removal * **Resources needed:** @@ -25,17 +25,6 @@ | Counters these Techniques | | ------------------------- | | [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0013 Create Inauthentic Websites](../../generated_pages/techniques/T0013.md) | -| [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0015 Create Hashtags and Search Artifacts](../../generated_pages/techniques/T0015.md) | -| [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | diff --git a/generated_pages/counters/C00109.md b/generated_pages/counters/C00109.md index 9f4da49..7fa0e2b 100644 --- a/generated_pages/counters/C00109.md +++ b/generated_pages/counters/C00109.md @@ -1,10 +1,10 @@ # Counter C00109: Dampen Emotional Reaction -* **Summary**: Reduce emotional responses to misinformation through calming messages, etc. +* **Summary**: Reduce emotional responses to misinformation through calming messages, etc. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** diff --git a/generated_pages/counters/C00111.md b/generated_pages/counters/C00111.md index 59c0934..4210fa1 100644 --- a/generated_pages/counters/C00111.md +++ b/generated_pages/counters/C00111.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -13,7 +13,7 @@ | Actor types | Sectors | | ----------- | ------- | -| [A013 content creator ](../../generated_pages/actortypes/A013.md) | S006 | +| [A013 content creator](../../generated_pages/actortypes/A013.md) | S006 | | [A021 media organisation](../../generated_pages/actortypes/A021.md) | S010 | diff --git a/generated_pages/counters/C00112.md b/generated_pages/counters/C00112.md index fd6a8a0..63bb126 100644 --- a/generated_pages/counters/C00112.md +++ b/generated_pages/counters/C00112.md @@ -1,10 +1,10 @@ # Counter C00112: "Prove they are not an op!" -* **Summary**: Challenge misinformation creators to prove they're not an information operation. +* **Summary**: Challenge misinformation creators to prove they're not an information operation. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0040 Demand Insurmountable Proof](../../generated_pages/techniques/T0040.md) | -| [T0042 Seed Kernel of Truth](../../generated_pages/techniques/T0042.md) | diff --git a/generated_pages/counters/C00113.md b/generated_pages/counters/C00113.md index f22fef0..35505dd 100644 --- a/generated_pages/counters/C00113.md +++ b/generated_pages/counters/C00113.md @@ -1,10 +1,10 @@ # Counter C00113: Debunk and defuse a fake expert / credentials. -* **Summary**: Debunk fake experts, their credentials, and potentially also their audience quality +* **Summary**: Debunk fake experts, their credentials, and potentially also their audience quality * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** diff --git a/generated_pages/counters/C00114.md b/generated_pages/counters/C00114.md index cd8e3ff..16f11a7 100644 --- a/generated_pages/counters/C00114.md +++ b/generated_pages/counters/C00114.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** @@ -24,7 +24,7 @@ | Counters these Techniques | | ------------------------- | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | +| [T0039 Bait Legitimate Influencers](../../generated_pages/techniques/T0039.md) | diff --git a/generated_pages/counters/C00115.md b/generated_pages/counters/C00115.md index 936dabf..de49899 100644 --- a/generated_pages/counters/C00115.md +++ b/generated_pages/counters/C00115.md @@ -1,10 +1,10 @@ # Counter C00115: Expose actor and intentions -* **Summary**: Debunk misinformation creators and posters. +* **Summary**: Debunk misinformation creators and posters. * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** @@ -23,7 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | diff --git a/generated_pages/counters/C00116.md b/generated_pages/counters/C00116.md index 88d07f7..74be335 100644 --- a/generated_pages/counters/C00116.md +++ b/generated_pages/counters/C00116.md @@ -1,10 +1,10 @@ # Counter C00116: Provide proof of involvement -* **Summary**: Build and post information about groups etc's involvement in misinformation incidents. +* **Summary**: Build and post information about groups etc's involvement in misinformation incidents. * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** diff --git a/generated_pages/counters/C00117.md b/generated_pages/counters/C00117.md index 4505337..81d7958 100644 --- a/generated_pages/counters/C00117.md +++ b/generated_pages/counters/C00117.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** @@ -23,11 +23,7 @@ | Counters these Techniques | | ------------------------- | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0060 Continue to Amplify](../../generated_pages/techniques/T0060.md) | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | diff --git a/generated_pages/counters/C00118.md b/generated_pages/counters/C00118.md index a230758..9fc4977 100644 --- a/generated_pages/counters/C00118.md +++ b/generated_pages/counters/C00118.md @@ -1,10 +1,10 @@ # Counter C00118: Repurpose images with new text -* **Summary**: Add countermessage text to iamges used in misinformation incidents. +* **Summary**: Add countermessage text to iamges used in misinformation incidents. * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** diff --git a/generated_pages/counters/C00119.md b/generated_pages/counters/C00119.md index 3c066d3..199468f 100644 --- a/generated_pages/counters/C00119.md +++ b/generated_pages/counters/C00119.md @@ -1,10 +1,10 @@ # Counter C00119: Engage payload and debunk. -* **Summary**: debunk misinformation content. Provide link to facts. +* **Summary**: debunk misinformation content. Provide link to facts. * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** @@ -24,9 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0060 Continue to Amplify](../../generated_pages/techniques/T0060.md) | diff --git a/generated_pages/counters/C00120.md b/generated_pages/counters/C00120.md index 9dc5e79..01337af 100644 --- a/generated_pages/counters/C00120.md +++ b/generated_pages/counters/C00120.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M007 - metatechnique +* **Metatechnique**: M007 - Metatechnique * **Resources needed:** diff --git a/generated_pages/counters/C00121.md b/generated_pages/counters/C00121.md index 7e8dda0..d507975 100644 --- a/generated_pages/counters/C00121.md +++ b/generated_pages/counters/C00121.md @@ -1,10 +1,10 @@ -# Counter C00121: Tool transparency and literacy for channels people follow. +# Counter C00121: Tool transparency and literacy for channels people follow. -* **Summary**: Make algorithms in platforms explainable, and visible to people using those platforms. +* **Summary**: Make algorithms in platforms explainable, and visible to people using those platforms. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** diff --git a/generated_pages/counters/C00122.md b/generated_pages/counters/C00122.md index 7315a5a..db15eb0 100644 --- a/generated_pages/counters/C00122.md +++ b/generated_pages/counters/C00122.md @@ -1,10 +1,10 @@ # Counter C00122: Content moderation -* **Summary**: Beware: content moderation misused becomes censorship. +* **Summary**: Beware: content moderation misused becomes censorship. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** @@ -24,19 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0015 Create Hashtags and Search Artifacts](../../generated_pages/techniques/T0015.md) | -| [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | -| [T0060 Continue to Amplify](../../generated_pages/techniques/T0060.md) | diff --git a/generated_pages/counters/C00123.md b/generated_pages/counters/C00123.md index 887ae12..117fe96 100644 --- a/generated_pages/counters/C00123.md +++ b/generated_pages/counters/C00123.md @@ -1,10 +1,10 @@ # Counter C00123: Remove or rate limit botnets -* **Summary**: reduce the visibility of known botnets online. +* **Summary**: reduce the visibility of known botnets online. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** @@ -24,8 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0029 Online Polls](../../generated_pages/techniques/T0029.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | -| [T0060 Continue to Amplify](../../generated_pages/techniques/T0060.md) | diff --git a/generated_pages/counters/C00124.md b/generated_pages/counters/C00124.md index 12a341d..977248f 100644 --- a/generated_pages/counters/C00124.md +++ b/generated_pages/counters/C00124.md @@ -1,10 +1,10 @@ # Counter C00124: Don't feed the trolls -* **Summary**: Don't engage with individuals relaying misinformation. +* **Summary**: Don't engage with individuals relaying misinformation. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** diff --git a/generated_pages/counters/C00125.md b/generated_pages/counters/C00125.md index 630613b..df07dc3 100644 --- a/generated_pages/counters/C00125.md +++ b/generated_pages/counters/C00125.md @@ -1,10 +1,10 @@ # Counter C00125: Prebunking -* **Summary**: Produce material in advance of misinformation incidents, by anticipating the narratives used in them, and debunking them. +* **Summary**: Produce material in advance of misinformation incidents, by anticipating the narratives used in them, and debunking them. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -23,12 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | diff --git a/generated_pages/counters/C00126.md b/generated_pages/counters/C00126.md index 3407b2f..08e3639 100644 --- a/generated_pages/counters/C00126.md +++ b/generated_pages/counters/C00126.md @@ -1,10 +1,10 @@ # Counter C00126: Social media amber alert -* **Summary**: Create an alert system around disinformation and misinformation artifacts, narratives, and incidents +* **Summary**: Create an alert system around disinformation and misinformation artefacts, narratives, and incidents * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** @@ -23,13 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | diff --git a/generated_pages/counters/C00128.md b/generated_pages/counters/C00128.md index dac9b32..146ee00 100644 --- a/generated_pages/counters/C00128.md +++ b/generated_pages/counters/C00128.md @@ -1,10 +1,10 @@ # Counter C00128: Create friction by marking content with ridicule or other "decelerants" -* **Summary**: Repost or comment on misinformation artifacts, using ridicule or other content to reduce the likelihood of reposting. +* **Summary**: Repost or comment on misinformation artefacts, using ridicule or other content to reduce the likelihood of reposting. * **Playbooks**: -* **Metatechnique**: M009 - dilution +* **Metatechnique**: M009 - Dilution * **Resources needed:** @@ -13,7 +13,7 @@ | Actor types | Sectors | | ----------- | ------- | -| [A003 trusted authority ](../../generated_pages/actortypes/A003.md) | S001, S002, S003, S004, S005, S006, S007, S008, S009, S010 | +| [A003 trusted authority](../../generated_pages/actortypes/A003.md) | S001, S002, S003, S004, S005, S006, S007, S008, S009, S010 | @@ -24,9 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | diff --git a/generated_pages/counters/C00129.md b/generated_pages/counters/C00129.md index 360e4bd..5fc3576 100644 --- a/generated_pages/counters/C00129.md +++ b/generated_pages/counters/C00129.md @@ -1,10 +1,10 @@ -# Counter C00129: Use banking to cut off access +# Counter C00129: Use banking to cut off access * **Summary**: fiscal sanctions; parallel to counter terrorism * **Playbooks**: -* **Metatechnique**: M014 - reduce resources +* **Metatechnique**: M014 - Reduce Resources * **Resources needed:** @@ -23,11 +23,7 @@ | Counters these Techniques | | ------------------------- | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | -| [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | +| [T0057 Organise Events](../../generated_pages/techniques/T0057.md) | diff --git a/generated_pages/counters/C00130.md b/generated_pages/counters/C00130.md index c2cbecb..ee54db2 100644 --- a/generated_pages/counters/C00130.md +++ b/generated_pages/counters/C00130.md @@ -1,10 +1,10 @@ # Counter C00130: Mentorship: elders, youth, credit. Learn vicariously. -* **Summary**: Train local influencers in countering misinformation. +* **Summary**: Train local influencers in countering misinformation. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** diff --git a/generated_pages/counters/C00131.md b/generated_pages/counters/C00131.md index 5af1bb6..bb673b3 100644 --- a/generated_pages/counters/C00131.md +++ b/generated_pages/counters/C00131.md @@ -1,10 +1,10 @@ # Counter C00131: Seize and analyse botnet servers -* **Summary**: Take botnet servers offline by seizing them. +* **Summary**: Take botnet servers offline by seizing them. * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** @@ -13,7 +13,7 @@ | Actor types | Sectors | | ----------- | ------- | -| [A029 server admininistrator ](../../generated_pages/actortypes/A029.md) | S008 | +| [A029 server admininistrator](../../generated_pages/actortypes/A029.md) | S008 | @@ -25,7 +25,6 @@ | Counters these Techniques | | ------------------------- | | [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | -| [T0060 Continue to Amplify](../../generated_pages/techniques/T0060.md) | diff --git a/generated_pages/counters/C00133.md b/generated_pages/counters/C00133.md index 7c5cc2f..a5aa87c 100644 --- a/generated_pages/counters/C00133.md +++ b/generated_pages/counters/C00133.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** @@ -24,11 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0011 Compromise Legitimate Accounts](../../generated_pages/techniques/T0011.md) | diff --git a/generated_pages/counters/C00135.md b/generated_pages/counters/C00135.md index 6ec098b..d6357b1 100644 --- a/generated_pages/counters/C00135.md +++ b/generated_pages/counters/C00135.md @@ -1,10 +1,10 @@ # Counter C00135: Deplatform message groups and/or message boards -* **Summary**: Merged two rows here. +* **Summary**: Merged two rows here. * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** @@ -24,8 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0043 Chat Apps](../../generated_pages/techniques/T0043.md) | diff --git a/generated_pages/counters/C00136.md b/generated_pages/counters/C00136.md index ff1f355..840ad6b 100644 --- a/generated_pages/counters/C00136.md +++ b/generated_pages/counters/C00136.md @@ -1,10 +1,10 @@ # Counter C00136: Microtarget most likely targets then send them countermessages -* **Summary**: Find communities likely to be targetted by misinformation campaigns, and send them countermessages or pointers to information sources. +* **Summary**: Find communities likely to be targetted by misinformation campaigns, and send them countermessages or pointers to information sources. * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** @@ -23,8 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0020 Trial Content](../../generated_pages/techniques/T0020.md) | diff --git a/generated_pages/counters/C00138.md b/generated_pages/counters/C00138.md index efc6946..db0d485 100644 --- a/generated_pages/counters/C00138.md +++ b/generated_pages/counters/C00138.md @@ -1,10 +1,10 @@ # Counter C00138: Spam domestic actors with lawsuits -* **Summary**: File multiple lawsuits against known misinformation creators and posters, to distract them from disinformation creation. +* **Summary**: File multiple lawsuits against known misinformation creators and posters, to distract them from disinformation creation. * **Playbooks**: -* **Metatechnique**: M014 - reduce resources +* **Metatechnique**: M014 - Reduce Resources * **Resources needed:** diff --git a/generated_pages/counters/C00139.md b/generated_pages/counters/C00139.md index 0476a6c..d9d1f79 100644 --- a/generated_pages/counters/C00139.md +++ b/generated_pages/counters/C00139.md @@ -1,10 +1,10 @@ # Counter C00139: Weaponise youtube content matrices -* **Summary**: God knows what this is. Keeping temporarily in case we work it out. +* **Summary**: God knows what this is. Keeping temporarily in case we work it out. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** diff --git a/generated_pages/counters/C00140.md b/generated_pages/counters/C00140.md index 603b05d..2b72b20 100644 --- a/generated_pages/counters/C00140.md +++ b/generated_pages/counters/C00140.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M008 - data pollution +* **Metatechnique**: M008 - Data Pollution * **Resources needed:** diff --git a/generated_pages/counters/C00142.md b/generated_pages/counters/C00142.md index c560137..05e8efa 100644 --- a/generated_pages/counters/C00142.md +++ b/generated_pages/counters/C00142.md @@ -1,10 +1,10 @@ # Counter C00142: Platform adds warning label and decision point when sharing content -* **Summary**: Includes “this has been disproved: do you want to forward it”. Includes “"Hey this story is old" popup when messaging with old URL” - this assumes that this technique is based on visits to an URL shortener or a captured news site that can publish a message of our choice. Includes “mark clickbait visually”. +* **Summary**: Includes “this has been disproved: do you want to forward it”. Includes “"Hey this story is old" popup when messaging with old URL” - this assumes that this technique is based on visits to an URL shortener or a captured news site that can publish a message of our choice. Includes “mark clickbait visually”. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** R004 - platform algorithms @@ -24,7 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | diff --git a/generated_pages/counters/C00143.md b/generated_pages/counters/C00143.md index 1b6511a..d7b13c5 100644 --- a/generated_pages/counters/C00143.md +++ b/generated_pages/counters/C00143.md @@ -1,10 +1,10 @@ # Counter C00143: (botnet) DMCA takedown requests to waste group time -* **Summary**: Use copyright infringement claims to remove videos etc. +* **Summary**: Use copyright infringement claims to remove videos etc. * **Playbooks**: -* **Metatechnique**: M013 - targeting +* **Metatechnique**: M013 - Targeting * **Resources needed:** diff --git a/generated_pages/counters/C00144.md b/generated_pages/counters/C00144.md index ac11eae..802d989 100644 --- a/generated_pages/counters/C00144.md +++ b/generated_pages/counters/C00144.md @@ -1,10 +1,10 @@ # Counter C00144: Buy out troll farm employees / offer them jobs -* **Summary**: Degrade the infrastructure. Could e.g. pay to not act for 30 days. Not recommended +* **Summary**: Degrade the infrastructure. Could e.g. pay to not act for 30 days. Not recommended * **Playbooks**: -* **Metatechnique**: M014 - reduce resources +* **Metatechnique**: M014 - Reduce Resources * **Resources needed:** diff --git a/generated_pages/counters/C00147.md b/generated_pages/counters/C00147.md index 3c45dca..6344b70 100644 --- a/generated_pages/counters/C00147.md +++ b/generated_pages/counters/C00147.md @@ -1,10 +1,10 @@ # Counter C00147: Make amplification of social media posts expire (e.g. can't like/ retweet after n days) -* **Summary**: Stop new community activity (likes, comments) on old social media posts. +* **Summary**: Stop new community activity (likes, comments) on old social media posts. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** R004 - platform algorithms diff --git a/generated_pages/counters/C00148.md b/generated_pages/counters/C00148.md index d894eb3..5d1f341 100644 --- a/generated_pages/counters/C00148.md +++ b/generated_pages/counters/C00148.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M008 - data pollution +* **Metatechnique**: M008 - Data Pollution * **Resources needed:** R004 - platform algorithms diff --git a/generated_pages/counters/C00149.md b/generated_pages/counters/C00149.md index 003a80c..58e6bce 100644 --- a/generated_pages/counters/C00149.md +++ b/generated_pages/counters/C00149.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M008 - data pollution +* **Metatechnique**: M008 - Data Pollution * **Resources needed:** @@ -23,9 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0020 Trial Content](../../generated_pages/techniques/T0020.md) | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | diff --git a/generated_pages/counters/C00153.md b/generated_pages/counters/C00153.md index 9bb65e8..ca34171 100644 --- a/generated_pages/counters/C00153.md +++ b/generated_pages/counters/C00153.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M013 - targeting +* **Metatechnique**: M013 - Targeting * **Resources needed:** @@ -24,15 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0011 Compromise Legitimate Accounts](../../generated_pages/techniques/T0011.md) | -| [T0013 Create Inauthentic Websites](../../generated_pages/techniques/T0013.md) | -| [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | diff --git a/generated_pages/counters/C00154.md b/generated_pages/counters/C00154.md index 31aa172..f3f60c7 100644 --- a/generated_pages/counters/C00154.md +++ b/generated_pages/counters/C00154.md @@ -1,10 +1,10 @@ # Counter C00154: Ask media not to report false information -* **Summary**: Train media to spot and respond to misinformation, and ask them not to post or transmit misinformation they've found. +* **Summary**: Train media to spot and respond to misinformation, and ask them not to post or transmit misinformation they've found. * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** @@ -24,8 +24,7 @@ | Counters these Techniques | | ------------------------- | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | +| [T0039 Bait Legitimate Influencers](../../generated_pages/techniques/T0039.md) | diff --git a/generated_pages/counters/C00155.md b/generated_pages/counters/C00155.md index e7a3128..902c659 100644 --- a/generated_pages/counters/C00155.md +++ b/generated_pages/counters/C00155.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** @@ -25,7 +25,6 @@ | Counters these Techniques | | ------------------------- | | [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | diff --git a/generated_pages/counters/C00156.md b/generated_pages/counters/C00156.md index 74a819a..a9702e2 100644 --- a/generated_pages/counters/C00156.md +++ b/generated_pages/counters/C00156.md @@ -1,10 +1,10 @@ -# Counter C00156: Better tell your country or organization story +# Counter C00156: Better tell your country or organisation story -* **Summary**: Civil engagement activities conducted on the part of EFP forces. NATO should likewise provide support and training, where needed, to local public affairs and other communication personnel. Local government and military public affairs personnel can play their part in creating and disseminating entertaining and sharable content that supports the EFP mission. +* **Summary**: Civil engagement activities conducted on the part of EFP forces. NATO should likewise provide support and training, where needed, to local public affairs and other communication personnel. Local government and military public affairs personnel can play their part in creating and disseminating entertaining and sharable content that supports the EFP mission. * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** @@ -13,8 +13,8 @@ | Actor types | Sectors | | ----------- | ------- | -| [A018 government ](../../generated_pages/actortypes/A018.md) | S003 | -| [A019 military ](../../generated_pages/actortypes/A019.md) | S003 | +| [A018 government](../../generated_pages/actortypes/A018.md) | S003 | +| [A019 military](../../generated_pages/actortypes/A019.md) | S003 | @@ -26,10 +26,6 @@ | Counters these Techniques | | ------------------------- | | [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | diff --git a/generated_pages/counters/C00159.md b/generated_pages/counters/C00159.md index 9c4f6cd..ce10c30 100644 --- a/generated_pages/counters/C00159.md +++ b/generated_pages/counters/C00159.md @@ -1,10 +1,10 @@ # Counter C00159: Have a disinformation response plan -* **Summary**: e.g. Create a campaign plan and toolkit for competition short of armed conflict (this used to be called “the grey zone”). The campaign plan should account for own vulnerabilities and strengths, and not over-rely on any one tool of statecraft or line of effort. It will identify and employ a broad spectrum of national power to deter, compete, and counter (where necessary) other countries’ approaches, and will include understanding of own capabilities, capabilities of disinformation creators, and international standards of conduct to compete in, shrink the size, and ultimately deter use of competition short of armed conflict. +* **Summary**: e.g. Create a campaign plan and toolkit for competition short of armed conflict (this used to be called “the grey zone”). The campaign plan should account for own vulnerabilities and strengths, and not over-rely on any one tool of statecraft or line of effort. It will identify and employ a broad spectrum of national power to deter, compete, and counter (where necessary) other countries’ approaches, and will include understanding of own capabilities, capabilities of disinformation creators, and international standards of conduct to compete in, shrink the size, and ultimately deter use of competition short of armed conflict. * **Playbooks**: -* **Metatechnique**: M007 - metatechnique +* **Metatechnique**: M007 - Metatechnique * **Resources needed:** diff --git a/generated_pages/counters/C00160.md b/generated_pages/counters/C00160.md index c6d039e..20d4d32 100644 --- a/generated_pages/counters/C00160.md +++ b/generated_pages/counters/C00160.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -13,7 +13,7 @@ | Actor types | Sectors | | ----------- | ------- | -| [A001 data scientist ](../../generated_pages/actortypes/A001.md) | S001, S002, S003, S004, S005, S006, S007, S008, S009, S010 | +| [A001 data scientist](../../generated_pages/actortypes/A001.md) | S001, S002, S003, S004, S005, S006, S007, S008, S009, S010 | | [A016 influencer](../../generated_pages/actortypes/A016.md) | S006 | @@ -25,8 +25,7 @@ | Counters these Techniques | | ------------------------- | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0048 Harass](../../generated_pages/techniques/T0048.md) | +| [T0039 Bait Legitimate Influencers](../../generated_pages/techniques/T0039.md) | diff --git a/generated_pages/counters/C00161.md b/generated_pages/counters/C00161.md index d87ac0b..e92d397 100644 --- a/generated_pages/counters/C00161.md +++ b/generated_pages/counters/C00161.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M007 - metatechnique +* **Metatechnique**: M007 - Metatechnique * **Resources needed:** @@ -24,9 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | diff --git a/generated_pages/counters/C00162.md b/generated_pages/counters/C00162.md index ac4ec54..f54d1de 100644 --- a/generated_pages/counters/C00162.md +++ b/generated_pages/counters/C00162.md @@ -1,10 +1,10 @@ # Counter C00162: Unravel/target the Potemkin villages -* **Summary**: Kremlin’s narrative spin extends through constellations of “civil society” organizations, political parties, churches, and other actors. Moscow leverages think tanks, human rights groups, election observers, Eurasianist integration groups, and orthodox groups. A collection of Russian civil society organizations, such as the Federal Agency for the Commonwealth of Independent States Affairs, Compatriots Living Abroad, and International Humanitarian Cooperation, together receive at least US$100 million per year, in addition to government-organized nongovernmental organizations (NGOs), at least 150 of which are funded by Russian presidential grants totaling US$70 million per year. +* **Summary**: Kremlin’s narrative spin extends through constellations of “civil society” organisations, political parties, churches, and other actors. Moscow leverages think tanks, human rights groups, election observers, Eurasianist integration groups, and orthodox groups. A collection of Russian civil society organisations, such as the Federal Agency for the Commonwealth of Independent States Affairs, Compatriots Living Abroad, and International Humanitarian Cooperation, together receive at least US$100 million per year, in addition to government-organized nongovernmental organisations (NGOs), at least 150 of which are funded by Russian presidential grants totaling US$70 million per year. * **Playbooks**: -* **Metatechnique**: M013 - targeting +* **Metatechnique**: M013 - Targeting * **Resources needed:** @@ -24,10 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | diff --git a/generated_pages/counters/C00164.md b/generated_pages/counters/C00164.md index 7af967f..4bdec7e 100644 --- a/generated_pages/counters/C00164.md +++ b/generated_pages/counters/C00164.md @@ -1,10 +1,10 @@ # Counter C00164: compatriot policy -* **Summary**: protect the interests of this population and, more importantly, influence the population to support pro-Russia causes and effectively influence the politics of its neighbors +* **Summary**: protect the interests of this population and, more importantly, influence the population to support pro-Russia causes and effectively influence the politics of its neighbours * **Playbooks**: -* **Metatechnique**: M013 - targeting +* **Metatechnique**: M013 - Targeting * **Resources needed:** @@ -24,9 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | diff --git a/generated_pages/counters/C00165.md b/generated_pages/counters/C00165.md index d35c20e..6025b55 100644 --- a/generated_pages/counters/C00165.md +++ b/generated_pages/counters/C00165.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** diff --git a/generated_pages/counters/C00169.md b/generated_pages/counters/C00169.md index 04aaf4d..70f464c 100644 --- a/generated_pages/counters/C00169.md +++ b/generated_pages/counters/C00169.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** @@ -24,10 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0002 Facilitate State Propaganda](../../generated_pages/techniques/T0002.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0004 Develop Competing Narratives](../../generated_pages/techniques/T0004.md) | diff --git a/generated_pages/counters/C00170.md b/generated_pages/counters/C00170.md index fb8f610..b36066a 100644 --- a/generated_pages/counters/C00170.md +++ b/generated_pages/counters/C00170.md @@ -1,10 +1,10 @@ # Counter C00170: elevate information as a critical domain of statecraft -* **Summary**: Shift from reactive to proactive response, with priority on sharing relevant information with the public and mobilizing private-sector engagement. Recent advances in data-driven technologies have elevated information as a source of power to influence the political and economic environment, to foster economic growth, to enable a decision-making advantage over competitors, and to communicate securely and quickly. +* **Summary**: Shift from reactive to proactive response, with priority on sharing relevant information with the public and mobilising private-sector engagement. Recent advances in data-driven technologies have elevated information as a source of power to influence the political and economic environment, to foster economic growth, to enable a decision-making advantage over competitors, and to communicate securely and quickly. * **Playbooks**: -* **Metatechnique**: M007 - metatechnique +* **Metatechnique**: M007 - Metatechnique * **Resources needed:** diff --git a/generated_pages/counters/C00172.md b/generated_pages/counters/C00172.md index f640771..b913a57 100644 --- a/generated_pages/counters/C00172.md +++ b/generated_pages/counters/C00172.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** @@ -24,8 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0013 Create Inauthentic Websites](../../generated_pages/techniques/T0013.md) | diff --git a/generated_pages/counters/C00174.md b/generated_pages/counters/C00174.md index b01fc3f..23ef084 100644 --- a/generated_pages/counters/C00174.md +++ b/generated_pages/counters/C00174.md @@ -1,10 +1,10 @@ # Counter C00174: Create a healthier news environment -* **Summary**: Free and fair press: create bipartisan, patriotic commitment to press freedom. Note difference between news and editorialising. Build alternative news sources: create alternative local-language news sources to counter local-language propaganda outlets. Delegitimize the 24 hour news cycle. includes Provide an alternative to disinformation content by expanding and improving local content: Develop content that can displace geopolitically-motivated narratives in the entire media environment, both new and old media alike. +* **Summary**: Free and fair press: create bipartisan, patriotic commitment to press freedom. Note difference between news and editorialising. Build alternative news sources: create alternative local-language news sources to counter local-language propaganda outlets. Delegitimize the 24 hour news cycle. includes Provide an alternative to disinformation content by expanding and improving local content: Develop content that can displace geopolitically-motivated narratives in the entire media environment, both new and old media alike. * **Playbooks**: -* **Metatechnique**: M007 - metatechnique, M002 - diversion +* **Metatechnique**: M007 - Metatechnique, M002 - Diversion * **Resources needed:** @@ -24,10 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | diff --git a/generated_pages/counters/C00176.md b/generated_pages/counters/C00176.md index 229b536..f827633 100644 --- a/generated_pages/counters/C00176.md +++ b/generated_pages/counters/C00176.md @@ -1,10 +1,10 @@ # Counter C00176: Improve Coordination amongst stakeholders: public and private -* **Summary**: Coordinated disinformation challenges are increasingly multidisciplinary, there are few organizations within the national security structures that are equipped with the broad-spectrum capability to effectively counter large-scale conflict short of war tactics in real-time. Institutional hurdles currently impede diverse subject matter experts, hailing from outside of the traditional national security and foreign policy disciplines (e.g., physical science, engineering, media, legal, and economics fields), from contributing to the direct development of national security countermeasures to emerging conflict short of war threat vectors. A Cognitive Security Action Group (CSAG), akin to the Counterterrorism Security Group (CSG), could drive interagency alignment across equivalents of DHS, DoS, DoD, Intelligence Community, and other implementing agencies, in areas including strategic narrative, and the nexus of cyber and information operations. +* **Summary**: Coordinated disinformation challenges are increasingly multidisciplinary, there are few organisations within the national security structures that are equipped with the broad-spectrum capability to effectively counter large-scale conflict short of war tactics in real-time. Institutional hurdles currently impede diverse subject matter experts, hailing from outside of the traditional national security and foreign policy disciplines (e.g., physical science, engineering, media, legal, and economics fields), from contributing to the direct development of national security countermeasures to emerging conflict short of war threat vectors. A Cognitive Security Action Group (CSAG), akin to the Counterterrorism Security Group (CSG), could drive interagency alignment across equivalents of DHS, DoS, DoD, Intelligence Community, and other implementing agencies, in areas including strategic narrative, and the nexus of cyber and information operations. * **Playbooks**: -* **Metatechnique**: M007 - metatechnique +* **Metatechnique**: M007 - Metatechnique * **Resources needed:** @@ -24,21 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0013 Create Inauthentic Websites](../../generated_pages/techniques/T0013.md) | -| [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0015 Create Hashtags and Search Artifacts](../../generated_pages/techniques/T0015.md) | -| [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0043 Chat Apps](../../generated_pages/techniques/T0043.md) | -| [T0049 Flooding the Information Space](../../generated_pages/techniques/T0049.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | diff --git a/generated_pages/counters/C00178.md b/generated_pages/counters/C00178.md index df634b9..ab26e68 100644 --- a/generated_pages/counters/C00178.md +++ b/generated_pages/counters/C00178.md @@ -1,10 +1,10 @@ # Counter C00178: Fill information voids with non-disinformation content -* **Summary**: 1) Pollute the data voids with wholesome content (Kittens! Babyshark!). 2) fill data voids with relevant information, e.g. increase Russian-language programming in areas subject to Russian disinformation. +* **Summary**: 1) Pollute the data voids with wholesome content (Kittens! Babyshark!). 2) fill data voids with relevant information, e.g. increase Russian-language programming in areas subject to Russian disinformation. * **Playbooks**: -* **Metatechnique**: M009 - dilution, M008 - data pollution +* **Metatechnique**: M009 - Dilution, M008 - Data Pollution * **Resources needed:** @@ -24,9 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0020 Trial Content](../../generated_pages/techniques/T0020.md) | diff --git a/generated_pages/counters/C00182.md b/generated_pages/counters/C00182.md index c72713b..f1da153 100644 --- a/generated_pages/counters/C00182.md +++ b/generated_pages/counters/C00182.md @@ -1,10 +1,10 @@ # Counter C00182: Redirection / malware detection/ remediation -* **Summary**: Detect redirction or malware, then quarantine or delete. +* **Summary**: Detect redirction or malware, then quarantine or delete. * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** diff --git a/generated_pages/counters/C00184.md b/generated_pages/counters/C00184.md index b3d7b49..ca23cb6 100644 --- a/generated_pages/counters/C00184.md +++ b/generated_pages/counters/C00184.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** @@ -24,9 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | diff --git a/generated_pages/counters/C00188.md b/generated_pages/counters/C00188.md index bb0f1f4..88b86be 100644 --- a/generated_pages/counters/C00188.md +++ b/generated_pages/counters/C00188.md @@ -1,10 +1,10 @@ # Counter C00188: Newsroom/Journalist training to counter influence moves -* **Summary**: Includes SEO influence. Includes promotion of a “higher standard of journalism”: journalism training “would be helpful, especially for the online community. Includes Strengthen local media: Improve effectiveness of local media outlets. +* **Summary**: Includes SEO influence. Includes promotion of a “higher standard of journalism”: journalism training “would be helpful, especially for the online community. Includes Strengthen local media: Improve effectiveness of local media outlets. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -25,11 +25,6 @@ | Counters these Techniques | | ------------------------- | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | diff --git a/generated_pages/counters/C00189.md b/generated_pages/counters/C00189.md index a6fc77b..8209e04 100644 --- a/generated_pages/counters/C00189.md +++ b/generated_pages/counters/C00189.md @@ -1,10 +1,10 @@ # Counter C00189: Ensure that platforms are taking down flagged accounts -* **Summary**: Use ongoing analysis/monitoring of "flagged" profiles. Confirm whether platforms are actively removing flagged accounts, and raise pressure via e.g. government organizations to encourage removal +* **Summary**: Use ongoing analysis/monitoring of "flagged" profiles. Confirm whether platforms are actively removing flagged accounts, and raise pressure via e.g. government organisations to encourage removal * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** @@ -23,10 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0011 Compromise Legitimate Accounts](../../generated_pages/techniques/T0011.md) | -| [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | diff --git a/generated_pages/counters/C00190.md b/generated_pages/counters/C00190.md index 4718149..e1fe5c0 100644 --- a/generated_pages/counters/C00190.md +++ b/generated_pages/counters/C00190.md @@ -1,10 +1,10 @@ # Counter C00190: open engagement with civil society -* **Summary**: Government open engagement with civil society as an independent check on government action and messaging. Government seeks to coordinate and synchronize narrative themes with allies and partners while calibrating action in cases where elements in these countries may have been co-opted by competitor nations. Includes “fight in the light”: Use leadership in the arts, entertainment, and media to highlight and build on fundamental tenets of democracy. +* **Summary**: Government open engagement with civil society as an independent check on government action and messaging. Government seeks to coordinate and synchronise narrative themes with allies and partners while calibrating action in cases where elements in these countries may have been co-opted by competitor nations. Includes “fight in the light”: Use leadership in the arts, entertainment, and media to highlight and build on fundamental tenets of democracy. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -24,9 +24,6 @@ | Counters these Techniques | | ------------------------- | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | diff --git a/generated_pages/counters/C00195.md b/generated_pages/counters/C00195.md index 05477fe..edf145e 100644 --- a/generated_pages/counters/C00195.md +++ b/generated_pages/counters/C00195.md @@ -1,10 +1,10 @@ -# Counter C00195: Redirect searches away from disinformation or extremist content +# Counter C00195: Redirect searches away from disinformation or extremist content -* **Summary**: Use Google AdWords to identify instances in which people search Google about particular fake-news stories or propaganda themes. Includes Monetize centrist SEO by subsidizing the difference in greater clicks towards extremist content. +* **Summary**: Use Google AdWords to identify instances in which people search Google about particular fake-news stories or propaganda themes. Includes Monetize centrist SEO by subsidising the difference in greater clicks towards extremist content. * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** R002 - funding @@ -24,11 +24,6 @@ | Counters these Techniques | | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | diff --git a/generated_pages/counters/C00197.md b/generated_pages/counters/C00197.md index a35714e..894da2e 100644 --- a/generated_pages/counters/C00197.md +++ b/generated_pages/counters/C00197.md @@ -1,10 +1,10 @@ # Counter C00197: remove suspicious accounts -* **Summary**: Standard reporting for false profiles (identity issues). Includes detecting hijacked accounts and reallocating them - if possible, back to original owners. +* **Summary**: Standard reporting for false profiles (identity issues). Includes detecting hijacked accounts and reallocating them - if possible, back to original owners. * **Playbooks**: -* **Metatechnique**: M005 - removal +* **Metatechnique**: M005 - Removal * **Resources needed:** R003 - money @@ -26,8 +26,6 @@ | Counters these Techniques | | ------------------------- | | [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | -| [T0011 Compromise Legitimate Accounts](../../generated_pages/techniques/T0011.md) | diff --git a/generated_pages/counters/C00200.md b/generated_pages/counters/C00200.md index 6005c6b..40558f6 100644 --- a/generated_pages/counters/C00200.md +++ b/generated_pages/counters/C00200.md @@ -1,10 +1,10 @@ # Counter C00200: Respected figure (influencer) disavows misinfo -* **Summary**: FIXIT: standardize language used for influencer/ respected figure. +* **Summary**: FIXIT: standardise language used for influencer/ respected figure. * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** @@ -25,8 +25,6 @@ | Counters these Techniques | | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | diff --git a/generated_pages/counters/C00202.md b/generated_pages/counters/C00202.md index 5fb87b8..1aca615 100644 --- a/generated_pages/counters/C00202.md +++ b/generated_pages/counters/C00202.md @@ -1,10 +1,10 @@ # Counter C00202: Set data 'honeytraps' -* **Summary**: Set honeytraps in content likely to be accessed for disinformation. +* **Summary**: Set honeytraps in content likely to be accessed for disinformation. * **Playbooks**: -* **Metatechnique**: M002 - diversion +* **Metatechnique**: M002 - Diversion * **Resources needed:** diff --git a/generated_pages/counters/C00203.md b/generated_pages/counters/C00203.md index 8ca2060..f9364d9 100644 --- a/generated_pages/counters/C00203.md +++ b/generated_pages/counters/C00203.md @@ -1,10 +1,10 @@ # Counter C00203: Stop offering press credentials to propaganda outlets -* **Summary**: Remove access to official press events from known misinformation actors. +* **Summary**: Remove access to official press events from known misinformation actors. * **Playbooks**: -* **Metatechnique**: M004 - friction +* **Metatechnique**: M004 - Friction * **Resources needed:** @@ -13,7 +13,7 @@ | Actor types | Sectors | | ----------- | ------- | -| [A018 government ](../../generated_pages/actortypes/A018.md) | S003 | +| [A018 government](../../generated_pages/actortypes/A018.md) | S003 | @@ -25,8 +25,6 @@ | Counters these Techniques | | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | diff --git a/generated_pages/counters/C00205.md b/generated_pages/counters/C00205.md index 332650a..3e3a1f3 100644 --- a/generated_pages/counters/C00205.md +++ b/generated_pages/counters/C00205.md @@ -1,10 +1,10 @@ # Counter C00205: strong dialogue between the federal government and private sector to encourage better reporting -* **Summary**: Increase civic resilience by partnering with business community to combat gray zone threats and ensuring adequate reporting and enforcement mechanisms. +* **Summary**: Increase civic resilience by partnering with business community to combat grey zone threats and ensuring adequate reporting and enforcement mechanisms. * **Playbooks**: -* **Metatechnique**: M007 - metatechnique +* **Metatechnique**: M007 - Metatechnique * **Resources needed:** @@ -13,7 +13,7 @@ | Actor types | Sectors | | ----------- | ------- | -| [A018 government ](../../generated_pages/actortypes/A018.md) | S003 | +| [A018 government](../../generated_pages/actortypes/A018.md) | S003 | | [A033 social media platform owner](../../generated_pages/actortypes/A033.md) | S007 | @@ -25,8 +25,6 @@ | Counters these Techniques | | ------------------------- | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | diff --git a/generated_pages/counters/C00207.md b/generated_pages/counters/C00207.md index 4c7af68..19bad6d 100644 --- a/generated_pages/counters/C00207.md +++ b/generated_pages/counters/C00207.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M013 - targeting +* **Metatechnique**: M013 - Targeting * **Resources needed:** @@ -13,7 +13,7 @@ | Actor types | Sectors | | ----------- | ------- | -| [A018 government ](../../generated_pages/actortypes/A018.md) | S003 | +| [A018 government](../../generated_pages/actortypes/A018.md) | S003 | | [A033 social media platform owner](../../generated_pages/actortypes/A033.md) | S007 | diff --git a/generated_pages/counters/C00211.md b/generated_pages/counters/C00211.md index 7877a9d..220cded 100644 --- a/generated_pages/counters/C00211.md +++ b/generated_pages/counters/C00211.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M010 - countermessaging +* **Metatechnique**: M010 - Countermessaging * **Resources needed:** @@ -23,10 +23,6 @@ | Counters these Techniques | | ------------------------- | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | -| [T0020 Trial Content](../../generated_pages/techniques/T0020.md) | -| [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | diff --git a/generated_pages/counters/C00212.md b/generated_pages/counters/C00212.md index 2bfc4f0..179d9f9 100644 --- a/generated_pages/counters/C00212.md +++ b/generated_pages/counters/C00212.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** @@ -14,7 +14,7 @@ | Actor types | Sectors | | ----------- | ------- | | [A006 educator](../../generated_pages/actortypes/A006.md) | S002 | -| [A018 government ](../../generated_pages/actortypes/A018.md) | S003 | +| [A018 government](../../generated_pages/actortypes/A018.md) | S003 | @@ -25,9 +25,6 @@ | Counters these Techniques | | ------------------------- | -| [T0003 Leverage Existing Narratives](../../generated_pages/techniques/T0003.md) | -| [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | -| [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | diff --git a/generated_pages/counters/C00216.md b/generated_pages/counters/C00216.md index f1f40eb..866efd7 100644 --- a/generated_pages/counters/C00216.md +++ b/generated_pages/counters/C00216.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M014 - reduce resources +* **Metatechnique**: M014 - Reduce Resources * **Resources needed:** @@ -25,11 +25,6 @@ | Counters these Techniques | | ------------------------- | | [T0014 Prepare Fundraising Campaigns](../../generated_pages/techniques/T0014.md) | -| [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | -| [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | -| [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | -| [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | diff --git a/generated_pages/counters/C00219.md b/generated_pages/counters/C00219.md index 17f9e75..703f328 100644 --- a/generated_pages/counters/C00219.md +++ b/generated_pages/counters/C00219.md @@ -1,10 +1,10 @@ # Counter C00219: Add metadata to content that’s out of the control of disinformation creators -* **Summary**: Steganography. Adding date, signatures etc to stop issue of photo relabelling etc. +* **Summary**: Steganography. Adding date, signatures etc to stop issue of photo relabelling etc. * **Playbooks**: -* **Metatechnique**: M003 - daylight +* **Metatechnique**: M003 - Daylight * **Resources needed:** diff --git a/generated_pages/counters/C00220.md b/generated_pages/counters/C00220.md index 7fff2c2..4b0420d 100644 --- a/generated_pages/counters/C00220.md +++ b/generated_pages/counters/C00220.md @@ -1,10 +1,10 @@ # Counter C00220: Develop a monitoring and intelligence plan -* **Summary**: Create a plan for misinformation and disinformation response, before it's needed. Include connections / contacts needed, expected counteremessages etc. +* **Summary**: Create a plan for misinformation and disinformation response, before it's needed. Include connections / contacts needed, expected counteremessages etc. * **Playbooks**: -* **Metatechnique**: M007 - metatechnique +* **Metatechnique**: M007 - Metatechnique * **Resources needed:** diff --git a/generated_pages/counters/C00221.md b/generated_pages/counters/C00221.md index 0a0e665..8390032 100644 --- a/generated_pages/counters/C00221.md +++ b/generated_pages/counters/C00221.md @@ -4,7 +4,7 @@ * **Playbooks**: -* **Metatechnique**: M007 - metatechnique +* **Metatechnique**: M007 - Metatechnique * **Resources needed:** diff --git a/generated_pages/counters/C00222.md b/generated_pages/counters/C00222.md index d893627..70db921 100644 --- a/generated_pages/counters/C00222.md +++ b/generated_pages/counters/C00222.md @@ -1,10 +1,10 @@ # Counter C00222: Tabletop simulations -* **Summary**: Simulate misinformation and disinformation campaigns, and responses to them, before campaigns happen. +* **Summary**: Simulate misinformation and disinformation campaigns, and responses to them, before campaigns happen. * **Playbooks**: -* **Metatechnique**: M007 - metatechnique +* **Metatechnique**: M007 - Metatechnique * **Resources needed:** diff --git a/generated_pages/counters/C00223.md b/generated_pages/counters/C00223.md index fd9b855..ad41535 100644 --- a/generated_pages/counters/C00223.md +++ b/generated_pages/counters/C00223.md @@ -1,10 +1,10 @@ # Counter C00223: Strengthen Trust in social media platforms -* **Summary**: Improve trust in the misinformation responses from social media and other platforms. Examples include creating greater transparancy on their actions and algorithms. +* **Summary**: Improve trust in the misinformation responses from social media and other platforms. Examples include creating greater transparancy on their actions and algorithms. * **Playbooks**: -* **Metatechnique**: M001 - resilience +* **Metatechnique**: M001 - Resilience * **Resources needed:** diff --git a/generated_pages/counters_index.md b/generated_pages/counters_index.md index c3df912..87235ce 100644 --- a/generated_pages/counters_index.md +++ b/generated_pages/counters_index.md @@ -12,80 +12,80 @@ C00006 Charge for social media -Include a paid-for privacy option, e.g. pay Facebook for an option of them not collecting your personal information. There are examples of this not working, e.g. most people don’t use proton mail etc. -M004 - friction +Include a paid-for privacy option, e.g. pay Facebook for an option of them not collecting your personal information. There are examples of this not working, e.g. most people don’t use proton mail etc. +M004 - Friction TA01 Strategic Planning D02 C00008 Create shared fact-checking database -Share fact-checking resources - tips, responses, countermessages, across respose groups. -M006 - scoring +Share fact-checking resources - tips, responses, countermessages, across respose groups. +M006 - Scoring TA01 Strategic Planning D04 C00009 Educate high profile influencers on best practices -Find online influencers. Provide training in the mechanisms of disinformation, how to spot campaigns, and/or how to contribute to responses by countermessaging, boosting information sites etc. -M001 - resilience +Find online influencers. Provide training in the mechanisms of disinformation, how to spot campaigns, and/or how to contribute to responses by countermessaging, boosting information sites etc. +M001 - Resilience TA02 Objective Planning D02 C00010 Enhanced privacy regulation for social media -Implement stronger privacy standards, to reduce the ability to microtarget community members. -M004 - friction +Implement stronger privacy standards, to reduce the ability to microtarget community members. +M004 - Friction TA01 Strategic Planning D02 C00011 Media literacy. Games to identify fake news -Create and use games to show people the mechanics of disinformation, and how to counter them. -M001 - resilience +Create and use games to show people the mechanics of disinformation, and how to counter them. +M001 - Resilience TA02 Objective Planning D02 C00012 Platform regulation -Empower existing regulators to govern social media. Also covers Destroy. Includes: Include the role of social media in the regulatory framework for media. The U.S. approach will need to be carefully crafted to protect First Amendment principles, create needed transparency, ensure liability, and impose costs for noncompliance. Includes Create policy that makes social media police disinformation. Includes: Use fraud legislation to clean up social media -M007 - metatechnique +Empower existing regulators to govern social media. Also covers Destroy. Includes: Include the role of social media in the regulatory framework for media. The U.S. approach will need to be carefully crafted to protect First Amendment principles, create needed transparency, ensure liability, and impose costs for noncompliance. Includes Create policy that makes social media police disinformation. Includes: Use fraud legislation to clean up social media +M007 - Metatechnique TA01 Strategic Planning D02 C00013 Rating framework for news -This is "strategic innoculation", raising the standards of what people expect in terms of evidence when consuming news. Example: journalistic ethics, or journalistic licensing body. Include full transcripts, link source, add items. -M006 - scoring +This is "strategic innoculation", raising the standards of what people expect in terms of evidence when consuming news. Example: journalistic ethics, or journalistic licencing body. Include full transcripts, link source, add items. +M006 - Scoring TA01 Strategic Planning D02 C00014 Real-time updates to fact-checking database -Update fact-checking databases and resources in real time. Especially import for time-limited events like natural disasters. -M006 - scoring +Update fact-checking databases and resources in real time. Especially import for time-limited events like natural disasters. +M006 - Scoring TA06 Develop Content D04 C00016 Censorship -Alter and/or block the publication/dissemination of information controlled by disinformation creators. Not recommended. -M005 - removal +Alter and/or block the publication/dissemination of information controlled by disinformation creators. Not recommended. +M005 - Removal TA01 Strategic Planning D02 C00017 Repair broken social connections -For example, use a media campaign to promote in-group to out-group in person communication / activities . Technique could be in terms of forcing a reality-check by talking to people instead of reading about bogeymen. -M010 - countermessaging +For example, use a media campaign to promote in-group to out-group in person communication / activities . Technique could be in terms of forcing a reality-check by talking to people instead of reading about bogeymen. +M010 - Countermessaging TA01 Strategic Planning D03 @@ -93,7 +93,7 @@ C00019 Reduce effect of division-enablers includes Promote constructive communication by shaming division-enablers, and Promote playbooks to call out division-enablers -M003 - daylight +M003 - Daylight TA01 Strategic Planning D03 @@ -101,7 +101,7 @@ C00021 Encourage in-person communication Encourage offline communication -M001 - resilience +M001 - Resilience TA01 Strategic Planning D04 @@ -109,15 +109,15 @@ C00022 Innoculate. Positive campaign to promote feeling of safety Used to counter ability based and fear based attacks -M001 - resilience +M001 - Resilience TA01 Strategic Planning D04 C00024 Promote healthy narratives -Includes promoting constructive narratives i.e. not polarising (e.g. pro-life, pro-choice, pro-USA). Includes promoting identity neutral narratives. -M001 - resilience +Includes promoting constructive narratives i.e. not polarising (e.g. pro-life, pro-choice, pro-USA). Includes promoting identity neutral narratives. +M001 - Resilience TA01 Strategic Planning D04 @@ -125,33 +125,31 @@ C00026 Shore up democracy based messages Messages about e.g. peace, freedom. And make it sexy. Includes Deploy Information and Narrative-Building in Service of Statecraft: Promote a narrative of transparency, truthfulness, liberal values, and democracy. Implement a compelling narrative via effective mechanisms of communication. Continually reassess messages, mechanisms, and audiences over time. Counteract efforts to manipulate media, undermine free markets, and suppress political freedoms via public diplomacy -M010 - countermessaging +M010 - Countermessaging TA01 Strategic Planning D04 C00027 Create culture of civility -This is passive. Includes promoting civility as an identity that people will defend. -M001 - resilience +This is passive. Includes promoting civility as an identity that people will defend. +M001 - Resilience TA01 Strategic Planning D07 C00028 Make information provenance available -Blockchain audit log and validation with collaborative decryption to post comments. Use blockchain technology to require collaborative validation before posts or comments are submitted. - -This could be used to adjust upvote weight via a trust factor of people and organisations you trust, or other criteria. -M011 - verification +Blockchain audit log and validation with collaborative decryption to post comments. Use blockchain technology to require collaborative validation before posts or comments are submitted. This could be used to adjust upvote weight via a trust factor of people and organisations you trust, or other criteria. +M011 - Verification TA02 Objective Planning D03 C00029 Create fake website to issue counter narrative and counter narrative through physical merchandise -Create websites in disinformation voids - spaces where people are looking for known disinformation. -M002 - diversion +Create websites in disinformation voids - spaces where people are looking for known disinformation. +M002 - Diversion TA02 Objective Planning D03 @@ -159,7 +157,7 @@ This could be used to adjust upvote weight via a trust factor of people and orga C00030 Develop a compelling counter narrative (truth based) -M002 - diversion +M002 - Diversion TA02 Objective Planning D03 @@ -167,7 +165,7 @@ This could be used to adjust upvote weight via a trust factor of people and orga C00031 Dilute the core narrative - create multiple permutations, target / amplify Create competing narratives. Included "Facilitate State Propaganda" as diluting the narrative could have an effect on the pro-state narrative used by volunteers, or lower their involvement. -M009 - dilution +M009 - Dilution TA02 Objective Planning D03 @@ -175,7 +173,7 @@ This could be used to adjust upvote weight via a trust factor of people and orga C00032 Hijack content and link to truth- based info Link to platform -M002 - diversion +M002 - Diversion TA06 Develop Content D03 @@ -183,7 +181,7 @@ This could be used to adjust upvote weight via a trust factor of people and orga C00034 Create more friction at account creation Counters fake account -M004 - friction +M004 - Friction TA15 - Establish Social Assets D04 @@ -191,7 +189,7 @@ This could be used to adjust upvote weight via a trust factor of people and orga C00036 Infiltrate the in-group to discredit leaders (divide) All of these would be highly affected by infiltration or false-claims of infiltration. -M013 - targeting +M013 - Targeting TA15 - Establish Social Assets D02 @@ -199,15 +197,15 @@ This could be used to adjust upvote weight via a trust factor of people and orga C00040 third party verification for people counters fake experts -M011 - verification +M011 - Verification TA15 - Establish Social Assets D02 C00042 Address truth contained in narratives -Focus on and boost truths in misinformation narratives, removing misinformation from them. -M010 - countermessaging +Focus on and boost truths in misinformation narratives, removing misinformation from them. +M010 - Countermessaging TA15 Establish Social Assets D04 @@ -215,7 +213,7 @@ This could be used to adjust upvote weight via a trust factor of people and orga C00044 Keep people from posting to social media immediately Platforms can introduce friction to slow down activities, force a small delay between posts, or replies to posts. -M004 - friction +M004 - Friction TA15 - Establish Social Assets D03 @@ -223,31 +221,31 @@ This could be used to adjust upvote weight via a trust factor of people and orga C00046 Marginalise and discredit extremist groups Reduce the credibility of extremist groups posting misinformation. -M013 - targeting +M013 - Targeting TA15 - Establish Social Assets D04 C00047 Honeypot with coordinated inauthentics -Flood disinformation spaces with obviously fake content, to dilute core misinformation narratives in them. -M008 - data pollution +Flood disinformation spaces with obviously fake content, to dilute core misinformation narratives in them. +M008 - Data Pollution TA15 Establish Social Assets D05 C00048 Name and Shame Influencers -Think about the different levels: individual vs state-sponsored account. Includes “call them out” and “name and shame”. Identify social media accounts as sources of propaganda—“calling them out”— might be helpful to prevent the spread of their message to audiences that otherwise would consider them factual. Identify, monitor, and, if necessary, target externally-based nonattributed social media accounts. Impact of and Dealing with Trolls - "Chatham House has observed that trolls also sometimes function as decoys, as a way of “keeping the infantry busy” that “aims to wear down the other side” (Lough et al., 2014). Another type of troll involves “false accounts posing as authoritative information sources on social media”. -M003 - daylight +Think about the different levels: individual vs state-sponsored account. Includes “call them out” and “name and shame”. Identify social media accounts as sources of propaganda—“calling them out”— might be helpful to prevent the spread of their message to audiences that otherwise would consider them factual. Identify, monitor, and, if necessary, target externally-based nonattributed social media accounts. Impact of and Dealing with Trolls - "Chatham House has observed that trolls also sometimes function as decoys, as a way of “keeping the infantry busy” that “aims to wear down the other side” (Lough et al., 2014). Another type of troll involves “false accounts posing as authoritative information sources on social media”. +M003 - Daylight TA15 - Establish Social Assets D07 C00051 Counter social engineering training -Includes anti-elicitation training, phishing prevention education. -M001 - resilience +Includes anti-elicitation training, phishing prevention education. +M001 - Resilience TA15 - Establish Social Assets D02 @@ -255,47 +253,47 @@ This could be used to adjust upvote weight via a trust factor of people and orga C00052 Infiltrate platforms Detect and degrade -M013 - targeting +M013 - Targeting TA15 Establish Social Assets D04 C00053 Delete old accounts / Remove unused social media accounts -remove or remove access to (e.g. stop the ability to update) old social media accounts, to reduce the pool of accounts available for takeover, botnets etc. -M012 - cleaning +remove or remove access to (e.g. stop the ability to update) old social media accounts, to reduce the pool of accounts available for takeover, botnets etc. +M012 - Cleaning TA15 Establish Social Assets D04 C00056 Encourage people to leave social media -Encourage people to leave spcial media. We don't expect this to work -M004 - friction +Encourage people to leave spcial media. We don't expect this to work +M004 - Friction TA15 Establish Social Assets D02 C00058 Report crowdfunder as violator -counters crowdfunding. Includes ‘Expose online funding as fake”. -M005 - removal +counters crowdfunding. Includes ‘Expose online funding as fake”. +M005 - Removal TA15 - Establish Social Assets D02 C00059 Verification of project before posting fund requests -third-party verification of projects posting funding campaigns before those campaigns can be posted. -M011 - verification +third-party verification of projects posting funding campaigns before those campaigns can be posted. +M011 - Verification TA15 Establish Social Assets D02 C00060 Legal action against for-profit engagement factories -Take legal action against for-profit "factories" creating misinformation. -M013 - targeting +Take legal action against for-profit "factories" creating misinformation. +M013 - Targeting TA02 Objective Planning D03 @@ -303,7 +301,7 @@ This could be used to adjust upvote weight via a trust factor of people and orga C00062 Free open library sources worldwide Open-source libraries could be created that aid in some way for each technique. Even for Strategic Planning, some open-source frameworks such as DISARM can be created to counter the adversarial efforts. -M010 - countermessaging +M010 - Countermessaging TA15 Establish Social Assets D04 @@ -311,63 +309,55 @@ This could be used to adjust upvote weight via a trust factor of people and orga C00065 Reduce political targeting Includes “ban political micro targeting” and “ban political ads” -M005 - removal +M005 - Removal TA05 Microtargeting D03 C00066 Co-opt a hashtag and drown it out (hijack it back) -Flood a disinformation-related hashtag with other content. -M009 - dilution +Flood a disinformation-related hashtag with other content. +M009 - Dilution TA05 Microtargeting D03 C00067 Denigrate the recipient/ project (of online funding) -Reduce the credibility of groups behind misinformation-linked funding campaigns. -M013 - targeting +Reduce the credibility of groups behind misinformation-linked funding campaigns. +M013 - Targeting TA15 Establish Social Assets D03 C00070 Block access to disinformation resources -Resources = accounts, channels etc. Block access to platform. DDOS an attacker. - -TA02*: DDOS at the critical time, to deny an adversary's time-bound objective. - -T0008: A quick response to a proto-viral story will affect it's ability to spread and raise questions about their legitimacy. - -Hashtag: Against the platform, by drowning the hashtag. - -T0046 - Search Engine Optimization: Sub-optimal website performance affect its search engine rank, which I interpret as "blocking access to a platform". -M005 - removal +Resources = accounts, channels etc. Block access to platform. DDOS an attacker. TA02*: DDOS at the critical time, to deny an adversary's time-bound objective. T0008: A quick response to a proto-viral story will affect it's ability to spread and raise questions about their legitimacy. Hashtag: Against the platform, by drowning the hashtag. T0046 - Search Engine Optimisation: Sub-optimal website performance affect its search engine rank, which I interpret as "blocking access to a platform". +M005 - Removal TA02 Objective Planning D02 C00071 Block source of pollution -Block websites, accounts, groups etc connected to misinformation and other information pollution. -M005 - removal +Block websites, accounts, groups etc connected to misinformation and other information pollution. +M005 - Removal TA06 Develop Content D02 C00072 Remove non-relevant content from special interest groups - not recommended -Check special-interest groups (e.g. medical, knitting) for unrelated and misinformation-linked content, and remove it. -M005 - removal +Check special-interest groups (e.g. medical, knitting) for unrelated and misinformation-linked content, and remove it. +M005 - Removal TA06 Develop Content D02 C00073 Inoculate populations through media literacy training -Use training to build the resilience of at-risk populations. Educate on how to handle info pollution. Push out targeted education on why it's pollution. Build cultural resistance to false content, e.g. cultural resistance to bullshit. Influence literacy training, to inoculate against “cult” recruiting. Media literacy training: leverage librarians / library for media literacy training. Inoculate at language. Strategic planning included as inoculating population has strategic value. Concepts of media literacy to a mass audience that authorities launch a public information campaign that teaches the program will take time to develop and establish impact, recommends curriculum-based training. Covers detect, deny, and degrade. -M001 - resilience +Use training to build the resilience of at-risk populations. Educate on how to handle info pollution. Push out targeted education on why it's pollution. Build cultural resistance to false content, e.g. cultural resistance to bullshit. Influence literacy training, to inoculate against “cult” recruiting. Media literacy training: leverage librarians / library for media literacy training. Inoculate at language. Strategic planning included as inoculating population has strategic value. Concepts of media literacy to a mass audience that authorities launch a public information campaign that teaches the programme will take time to develop and establish impact, recommends curriculum-based training. Covers detect, deny, and degrade. +M001 - Resilience TA01 Strategic Planning D02 @@ -375,31 +365,31 @@ T0046 - Search Engine Optimization: Sub-optimal website performance affect its s C00074 Identify and delete or rate limit identical content C00000 -M012 - cleaning +M012 - Cleaning TA06 Develop Content D02 C00075 normalise language -normalise the language around disinformation and misinformation; give people the words for artifact and effect types. -M010 - countermessaging +normalise the language around disinformation and misinformation; give people the words for artefact and effect types. +M010 - Countermessaging TA06 Develop Content D02 C00076 Prohibit images in political discourse channels -Make political discussion channels text-only. -M005 - removal +Make political discussion channels text-only. +M005 - Removal TA06 Develop Content D02 C00077 Active defence: run TA15 "develop people” - not recommended -Develop networks of communities and influencers around counter-misinformation. Match them to misinformation creators -M013 - targeting +Develop networks of communities and influencers around counter-misinformation. Match them to misinformation creators +M013 - Targeting TA15 - Establish Social Assets D03 @@ -407,15 +397,15 @@ T0046 - Search Engine Optimization: Sub-optimal website performance affect its s C00078 Change Search Algorithms for Disinformation Content Includes “change image search algorithms for hate groups and extremists” and “Change search algorithms for hate and extremist queries to show content sympathetic to opposite side” -M002 - diversion +M002 - Diversion TA06 Develop Content D03 C00080 Create competing narrative -Create counternarratives, or narratives that compete in the same spaces as misinformation narratives. Could also be degrade -M002 - diversion +Create counternarratives, or narratives that compete in the same spaces as misinformation narratives. Could also be degrade +M002 - Diversion TA06 Develop Content D03 @@ -423,7 +413,7 @@ T0046 - Search Engine Optimization: Sub-optimal website performance affect its s C00081 Highlight flooding and noise, and explain motivations Discredit by pointing out the "noise" and informing public that "flooding" is a technique of disinformation campaigns; point out intended objective of "noise" -M003 - daylight +M003 - Daylight TA06 Develop Content D03 @@ -431,33 +421,31 @@ T0046 - Search Engine Optimization: Sub-optimal website performance affect its s C00082 Ground truthing as automated response to pollution Also inoculation. -M010 - countermessaging +M010 - Countermessaging TA06 Develop Content D03 C00084 Modify disinformation narratives, and rebroadcast them -Includes “poison pill recasting of message” and “steal their truths”. Many techniques involve promotion which could be manipulated. For example, online fundings or rallies could be advertised, through compromised or fake channels, as being associated with "far-up/down/left/right" actors. "Long Game" narratives could be subjected in a similar way with negative connotations. Can also replay technique T0003. -M002 - diversion +Includes “poison pill recasting of message” and “steal their truths”. Many techniques involve promotion which could be manipulated. For example, online fundings or rallies could be advertised, through compromised or fake channels, as being associated with "far-up/down/left/right" actors. "Long Game" narratives could be subjected in a similar way with negative connotations. Can also replay technique T0003. +M002 - Diversion TA06 Develop Content D03 C00085 Mute content -Rate-limit disinformation content. Reduces its effects, whilst not running afoul of censorship concerns. - -Online archives of content (archives of websites, social media profiles, media, copies of published advertisements; or archives of comments attributed to bad actors, as well as anonymized metadata about users who interacted with them and analysis of the effect) is useful for intelligence analysis and public transparency, but will need similar muting or tagging/ shaming as associated with bad actors. -M003 - daylight +Rate-limit disinformation content. Reduces its effects, whilst not running afoul of censorship concerns. Online archives of content (archives of websites, social media profiles, media, copies of published advertisements; or archives of comments attributed to bad actors, as well as anonymized metadata about users who interacted with them and analysis of the effect) is useful for intelligence analysis and public transparency, but will need similar muting or tagging/ shaming as associated with bad actors. +M003 - Daylight TA06 Develop Content D03 C00086 Distract from noise with addictive content -Example: Interject addictive links or contents into discussions of disinformation materials and measure a "conversion rate" of users who engage with your content and away from the social media channel's "information bubble" around the disinformation item. Use bots to amplify and upvote the addictive content. -M002 - diversion +Example: Interject addictive links or contents into discussions of disinformation materials and measure a "conversion rate" of users who engage with your content and away from the social media channel's "information bubble" around the disinformation item. Use bots to amplify and upvote the addictive content. +M002 - Diversion TA06 Develop Content D04 @@ -465,111 +453,111 @@ Online archives of content (archives of websites, social media profiles, media, C00087 Make more noise than the disinformation -M009 - dilution +M009 - Dilution TA06 Develop Content D04 C00090 Fake engagement system -Create honeypots for misinformation creators to engage with, and reduce the resources they have available for misinformation campaigns. -M002 - diversion +Create honeypots for misinformation creators to engage with, and reduce the resources they have available for misinformation campaigns. +M002 - Diversion TA07 Channel Selection D05 C00091 Honeypot social community -Set honeypots, e.g. communities, in networks likely to be used for disinformation. -M002 - diversion +Set honeypots, e.g. communities, in networks likely to be used for disinformation. +M002 - Diversion TA06 Develop Content D05 C00092 Establish a truth teller reputation score for influencers -Includes "Establish a truth teller reputation score for influencers” and “Reputation scores for social media users”. Influencers are individuals or accounts with many followers. -M006 - scoring +Includes "Establish a truth teller reputation score for influencers” and “Reputation scores for social media users”. Influencers are individuals or accounts with many followers. +M006 - Scoring TA02 Objective Planning D07 C00093 Influencer code of conduct -Establish tailored code of conduct for individuals with many followers. Can be platform code of conduct; can also be community code. -M001 - resilience +Establish tailored code of conduct for individuals with many followers. Can be platform code of conduct; can also be community code. +M001 - Resilience TA15 - Establish Social Assets D07 C00094 Force full disclosure on corporate sponsor of research -Accountability move: make sure research is published with its funding sources. -M003 - daylight +Accountability move: make sure research is published with its funding sources. +M003 - Daylight TA06 Develop Content D04 C00096 Strengthen institutions that are always truth tellers -Increase credibility, visibility, and reach of positive influencers in the information space. -M006 - scoring +Increase credibility, visibility, and reach of positive influencers in the information space. +M006 - Scoring TA01 Strategic Planning D07 C00097 Require use of verified identities to contribute to poll or comment -Reduce poll flooding by online taking comments or poll entries from verified accounts. -M004 - friction +Reduce poll flooding by online taking comments or poll entries from verified accounts. +M004 - Friction TA07 Channel Selection D02 C00098 Revocation of allowlisted or "verified" status -remove blue checkmarks etc from known misinformation accounts. -M004 - friction +remove blue checkmarks etc from known misinformation accounts. +M004 - Friction TA07 Channel Selection D02 C00099 Strengthen verification methods -Improve content veerification methods available to groups, individuals etc. -M004 - friction +Improve content veerification methods available to groups, individuals etc. +M004 - Friction TA07 Channel Selection D02 C00100 Hashtag jacking -Post large volumes of unrelated content on known misinformation hashtags -M002 - diversion +Post large volumes of unrelated content on known misinformation hashtags +M002 - Diversion TA08 Pump Priming D03 C00101 Create friction by rate-limiting engagement -Create participant friction. Includes Make repeat voting hard, and throttle number of forwards. -M004 - friction +Create participant friction. Includes Make repeat voting hard, and throttle number of forwards. +M004 - Friction TA07 Channel Selection D04 C00103 Create a bot that engages / distract trolls -This is reactive, not active measure (honeypots are active). It's a platform controlled measure. -M002 - diversion +This is reactive, not active measure (honeypots are active). It's a platform controlled measure. +M002 - Diversion TA07 Channel Selection D05 C00105 Buy more advertising than misinformation creators -Shift influence and algorithms by posting more adverts into spaces than misinformation creators. -M009 - dilution +Shift influence and algorithms by posting more adverts into spaces than misinformation creators. +M009 - Dilution TA07 Channel Selection D03 @@ -577,7 +565,7 @@ Online archives of content (archives of websites, social media profiles, media, C00106 Click-bait centrist content Create emotive centrist content that gets more clicks -M002 - diversion +M002 - Diversion TA06 Develop Content D03 @@ -585,15 +573,15 @@ Online archives of content (archives of websites, social media profiles, media, C00107 Content moderation includes social media content take-downs, e.g. facebook or Twitter content take-downs -M006 - scoring, M005 - removal +M006 - Scoring, M005 - Removal TA06 Develop Content D02 C00109 Dampen Emotional Reaction -Reduce emotional responses to misinformation through calming messages, etc. -M001 - resilience +Reduce emotional responses to misinformation through calming messages, etc. +M001 - Resilience TA09 Exposure D03 @@ -601,23 +589,23 @@ Online archives of content (archives of websites, social media profiles, media, C00111 Reduce polarisation by connecting and presenting sympathetic renditions of opposite views -M001 - resilience +M001 - Resilience TA01 Strategic Planning D04 C00112 "Prove they are not an op!" -Challenge misinformation creators to prove they're not an information operation. -M004 - friction +Challenge misinformation creators to prove they're not an information operation. +M004 - Friction TA08 Pump Priming D02 C00113 Debunk and defuse a fake expert / credentials. -Debunk fake experts, their credentials, and potentially also their audience quality -M003 - daylight +Debunk fake experts, their credentials, and potentially also their audience quality +M003 - Daylight TA08 Pump Priming D02 @@ -625,23 +613,23 @@ Online archives of content (archives of websites, social media profiles, media, C00114 Don't engage with payloads Stop passing on misinformation -M004 - friction +M004 - Friction TA08 Pump Priming D02 C00115 Expose actor and intentions -Debunk misinformation creators and posters. -M003 - daylight +Debunk misinformation creators and posters. +M003 - Daylight TA08 Pump Priming D02 C00116 Provide proof of involvement -Build and post information about groups etc's involvement in misinformation incidents. -M003 - daylight +Build and post information about groups etc's involvement in misinformation incidents. +M003 - Daylight TA08 Pump Priming D02 @@ -649,23 +637,23 @@ Online archives of content (archives of websites, social media profiles, media, C00117 Downgrade / de-amplify so message is seen by fewer people Label promote counter to disinformation -M010 - countermessaging +M010 - Countermessaging TA08 Pump Priming D04 C00118 Repurpose images with new text -Add countermessage text to iamges used in misinformation incidents. -M010 - countermessaging +Add countermessage text to iamges used in misinformation incidents. +M010 - Countermessaging TA08 Pump Priming D04 C00119 Engage payload and debunk. -debunk misinformation content. Provide link to facts. -M010 - countermessaging +debunk misinformation content. Provide link to facts. +M010 - Countermessaging TA08 Pump Priming D07 @@ -673,87 +661,87 @@ Online archives of content (archives of websites, social media profiles, media, C00120 Open dialogue about design of platforms to produce different outcomes Redesign platforms and algorithms to reduce the effectiveness of disinformation -M007 - metatechnique +M007 - Metatechnique TA08 Pump Priming D07 C00121 -Tool transparency and literacy for channels people follow. -Make algorithms in platforms explainable, and visible to people using those platforms. -M001 - resilience +Tool transparency and literacy for channels people follow. +Make algorithms in platforms explainable, and visible to people using those platforms. +M001 - Resilience TA08 Pump Priming D07 C00122 Content moderation -Beware: content moderation misused becomes censorship. -M004 - friction +Beware: content moderation misused becomes censorship. +M004 - Friction TA09 Exposure D02 C00123 Remove or rate limit botnets -reduce the visibility of known botnets online. -M004 - friction +reduce the visibility of known botnets online. +M004 - Friction TA09 Exposure D03 C00124 Don't feed the trolls -Don't engage with individuals relaying misinformation. -M004 - friction +Don't engage with individuals relaying misinformation. +M004 - Friction TA09 Exposure D03 C00125 Prebunking -Produce material in advance of misinformation incidents, by anticipating the narratives used in them, and debunking them. -M001 - resilience +Produce material in advance of misinformation incidents, by anticipating the narratives used in them, and debunking them. +M001 - Resilience TA09 Exposure D03 C00126 Social media amber alert -Create an alert system around disinformation and misinformation artifacts, narratives, and incidents -M003 - daylight +Create an alert system around disinformation and misinformation artefacts, narratives, and incidents +M003 - Daylight TA09 Exposure D03 C00128 Create friction by marking content with ridicule or other "decelerants" -Repost or comment on misinformation artifacts, using ridicule or other content to reduce the likelihood of reposting. -M009 - dilution +Repost or comment on misinformation artefacts, using ridicule or other content to reduce the likelihood of reposting. +M009 - Dilution TA09 Exposure D03 C00129 -Use banking to cut off access +Use banking to cut off access fiscal sanctions; parallel to counter terrorism -M014 - reduce resources +M014 - Reduce Resources TA09 Exposure D02 C00130 Mentorship: elders, youth, credit. Learn vicariously. -Train local influencers in countering misinformation. -M001 - resilience +Train local influencers in countering misinformation. +M001 - Resilience TA05 Microtargeting D07 C00131 Seize and analyse botnet servers -Take botnet servers offline by seizing them. -M005 - removal +Take botnet servers offline by seizing them. +M005 - Removal TA11 Persistence D02 @@ -761,39 +749,39 @@ Online archives of content (archives of websites, social media profiles, media, C00133 Deplatform Account* Note: Similar to Deplatform People but less generic. Perhaps both should be left. -M005 - removal +M005 - Removal TA15 - Establish Social Assets D03 C00135 Deplatform message groups and/or message boards -Merged two rows here. -M005 - removal +Merged two rows here. +M005 - Removal TA15 Establish Social Assets D03 C00136 Microtarget most likely targets then send them countermessages -Find communities likely to be targetted by misinformation campaigns, and send them countermessages or pointers to information sources. -M010 - countermessaging +Find communities likely to be targetted by misinformation campaigns, and send them countermessages or pointers to information sources. +M010 - Countermessaging TA08 Pump Priming D03 C00138 Spam domestic actors with lawsuits -File multiple lawsuits against known misinformation creators and posters, to distract them from disinformation creation. -M014 - reduce resources +File multiple lawsuits against known misinformation creators and posters, to distract them from disinformation creation. +M014 - Reduce Resources TA11 Persistence D03 C00139 Weaponise youtube content matrices -God knows what this is. Keeping temporarily in case we work it out. -M004 - friction +God knows what this is. Keeping temporarily in case we work it out. +M004 - Friction TA11 Persistence D03 @@ -801,39 +789,39 @@ Online archives of content (archives of websites, social media profiles, media, C00140 "Bomb" link shorteners with lots of calls Applies to most of the content used by exposure techniques except "T0055 - Use hashtag”. Applies to analytics -M008 - data pollution +M008 - Data Pollution TA12 Measure Effectiveness D03 C00142 Platform adds warning label and decision point when sharing content -Includes “this has been disproved: do you want to forward it”. Includes “"Hey this story is old" popup when messaging with old URL” - this assumes that this technique is based on visits to an URL shortener or a captured news site that can publish a message of our choice. Includes “mark clickbait visually”. -M004 - friction +Includes “this has been disproved: do you want to forward it”. Includes “"Hey this story is old" popup when messaging with old URL” - this assumes that this technique is based on visits to an URL shortener or a captured news site that can publish a message of our choice. Includes “mark clickbait visually”. +M004 - Friction TA06 Develop Content D04 C00143 (botnet) DMCA takedown requests to waste group time -Use copyright infringement claims to remove videos etc. -M013 - targeting +Use copyright infringement claims to remove videos etc. +M013 - Targeting TA11 Persistence D04 C00144 Buy out troll farm employees / offer them jobs -Degrade the infrastructure. Could e.g. pay to not act for 30 days. Not recommended -M014 - reduce resources +Degrade the infrastructure. Could e.g. pay to not act for 30 days. Not recommended +M014 - Reduce Resources TA02 Objective Planning D04 C00147 Make amplification of social media posts expire (e.g. can't like/ retweet after n days) -Stop new community activity (likes, comments) on old social media posts. -M004 - friction +Stop new community activity (likes, comments) on old social media posts. +M004 - Friction TA09 Exposure D03 @@ -841,7 +829,7 @@ Online archives of content (archives of websites, social media profiles, media, C00148 Add random links to network graphs If creators are using network analysis to determine how to attack networks, then adding random extra links to those networks might throw that analysis out enough to change attack outcomes. Unsure which DISARM techniques. -M008 - data pollution +M008 - Data Pollution TA12 Measure Effectiveness D04 @@ -849,7 +837,7 @@ Online archives of content (archives of websites, social media profiles, media, C00149 Poison the monitoring & evaluation data Includes Pollute the AB-testing data feeds: Polluting A/B testing requires knowledge of MOEs and MOPs. A/B testing must be caught early when there is relatively little data available so infiltration of TAs and understanding of how content is migrated from testing to larger audiences is fundamental. -M008 - data pollution +M008 - Data Pollution TA12 Measure Effectiveness D04 @@ -857,15 +845,15 @@ Online archives of content (archives of websites, social media profiles, media, C00153 Take pre-emptive action against actors' infrastructure Align offensive cyber action with information operations and counter disinformation approaches, where appropriate. -M013 - targeting +M013 - Targeting TA01 Strategic Planning D03 C00154 Ask media not to report false information -Train media to spot and respond to misinformation, and ask them not to post or transmit misinformation they've found. -M005 - removal +Train media to spot and respond to misinformation, and ask them not to post or transmit misinformation they've found. +M005 - Removal TA08 Pump Priming D02 @@ -873,23 +861,23 @@ Online archives of content (archives of websites, social media profiles, media, C00155 Ban incident actors from funding sites Ban misinformation creators and posters from funding sites -M005 - removal +M005 - Removal TA15 - Establish Social Assets D02 C00156 -Better tell your country or organization story -Civil engagement activities conducted on the part of EFP forces. NATO should likewise provide support and training, where needed, to local public affairs and other communication personnel. Local government and military public affairs personnel can play their part in creating and disseminating entertaining and sharable content that supports the EFP mission. -M010 - countermessaging +Better tell your country or organisation story +Civil engagement activities conducted on the part of EFP forces. NATO should likewise provide support and training, where needed, to local public affairs and other communication personnel. Local government and military public affairs personnel can play their part in creating and disseminating entertaining and sharable content that supports the EFP mission. +M010 - Countermessaging TA02 Objective Planning D03 C00159 Have a disinformation response plan -e.g. Create a campaign plan and toolkit for competition short of armed conflict (this used to be called “the grey zone”). The campaign plan should account for own vulnerabilities and strengths, and not over-rely on any one tool of statecraft or line of effort. It will identify and employ a broad spectrum of national power to deter, compete, and counter (where necessary) other countries’ approaches, and will include understanding of own capabilities, capabilities of disinformation creators, and international standards of conduct to compete in, shrink the size, and ultimately deter use of competition short of armed conflict. -M007 - metatechnique +e.g. Create a campaign plan and toolkit for competition short of armed conflict (this used to be called “the grey zone”). The campaign plan should account for own vulnerabilities and strengths, and not over-rely on any one tool of statecraft or line of effort. It will identify and employ a broad spectrum of national power to deter, compete, and counter (where necessary) other countries’ approaches, and will include understanding of own capabilities, capabilities of disinformation creators, and international standards of conduct to compete in, shrink the size, and ultimately deter use of competition short of armed conflict. +M007 - Metatechnique TA01 Strategic Planning D03 @@ -897,7 +885,7 @@ Online archives of content (archives of websites, social media profiles, media, C00160 find and train influencers Identify key influencers (e.g. use network analysis), then reach out to identified users and offer support, through either training or resources. -M001 - resilience +M001 - Resilience TA15 - Establish Social Assets D02 @@ -905,23 +893,23 @@ Online archives of content (archives of websites, social media profiles, media, C00161 Coalition Building with stakeholders and Third-Party Inducements Advance coalitions across borders and sectors, spanning public and private, as well as foreign and domestic, divides. Improve mechanisms to collaborate, share information, and develop coordinated approaches with the private sector at home and allies and partners abroad. -M007 - metatechnique +M007 - Metatechnique TA01 Strategic Planning D07 C00162 Unravel/target the Potemkin villages -Kremlin’s narrative spin extends through constellations of “civil society” organizations, political parties, churches, and other actors. Moscow leverages think tanks, human rights groups, election observers, Eurasianist integration groups, and orthodox groups. A collection of Russian civil society organizations, such as the Federal Agency for the Commonwealth of Independent States Affairs, Compatriots Living Abroad, and International Humanitarian Cooperation, together receive at least US$100 million per year, in addition to government-organized nongovernmental organizations (NGOs), at least 150 of which are funded by Russian presidential grants totaling US$70 million per year. -M013 - targeting +Kremlin’s narrative spin extends through constellations of “civil society” organisations, political parties, churches, and other actors. Moscow leverages think tanks, human rights groups, election observers, Eurasianist integration groups, and orthodox groups. A collection of Russian civil society organisations, such as the Federal Agency for the Commonwealth of Independent States Affairs, Compatriots Living Abroad, and International Humanitarian Cooperation, together receive at least US$100 million per year, in addition to government-organized nongovernmental organisations (NGOs), at least 150 of which are funded by Russian presidential grants totaling US$70 million per year. +M013 - Targeting TA15 Establish Social Assets D03 C00164 compatriot policy -protect the interests of this population and, more importantly, influence the population to support pro-Russia causes and effectively influence the politics of its neighbors -M013 - targeting +protect the interests of this population and, more importantly, influence the population to support pro-Russia causes and effectively influence the politics of its neighbours +M013 - Targeting TA02 Objective Planning D03 @@ -929,7 +917,7 @@ Online archives of content (archives of websites, social media profiles, media, C00165 Ensure integrity of official documents e.g. for leaked legal documents, use court motions to limit future discovery actions -M004 - friction +M004 - Friction TA06 Develop Content D02 @@ -937,15 +925,15 @@ Online archives of content (archives of websites, social media profiles, media, C00169 develop a creative content hub international donors will donate to a basket fund that will pay a committee of local experts who will, in turn, manage and distribute the money to Russian-language producers and broadcasters that pitch various projects. -M010 - countermessaging +M010 - Countermessaging TA02 Objective Planning D03 C00170 elevate information as a critical domain of statecraft -Shift from reactive to proactive response, with priority on sharing relevant information with the public and mobilizing private-sector engagement. Recent advances in data-driven technologies have elevated information as a source of power to influence the political and economic environment, to foster economic growth, to enable a decision-making advantage over competitors, and to communicate securely and quickly. -M007 - metatechnique +Shift from reactive to proactive response, with priority on sharing relevant information with the public and mobilising private-sector engagement. Recent advances in data-driven technologies have elevated information as a source of power to influence the political and economic environment, to foster economic growth, to enable a decision-making advantage over competitors, and to communicate securely and quickly. +M007 - Metatechnique TA01 Strategic Planning D03 @@ -953,39 +941,39 @@ Online archives of content (archives of websites, social media profiles, media, C00172 social media source removal Removing accounts, pages, groups, e.g. facebook page removal -M005 - removal +M005 - Removal TA15 Establish Social Assets D02 C00174 Create a healthier news environment -Free and fair press: create bipartisan, patriotic commitment to press freedom. Note difference between news and editorialising. Build alternative news sources: create alternative local-language news sources to counter local-language propaganda outlets. Delegitimize the 24 hour news cycle. includes Provide an alternative to disinformation content by expanding and improving local content: Develop content that can displace geopolitically-motivated narratives in the entire media environment, both new and old media alike. -M007 - metatechnique, M002 - diversion +Free and fair press: create bipartisan, patriotic commitment to press freedom. Note difference between news and editorialising. Build alternative news sources: create alternative local-language news sources to counter local-language propaganda outlets. Delegitimize the 24 hour news cycle. includes Provide an alternative to disinformation content by expanding and improving local content: Develop content that can displace geopolitically-motivated narratives in the entire media environment, both new and old media alike. +M007 - Metatechnique, M002 - Diversion TA01 Strategic Planning D02 C00176 Improve Coordination amongst stakeholders: public and private -Coordinated disinformation challenges are increasingly multidisciplinary, there are few organizations within the national security structures that are equipped with the broad-spectrum capability to effectively counter large-scale conflict short of war tactics in real-time. Institutional hurdles currently impede diverse subject matter experts, hailing from outside of the traditional national security and foreign policy disciplines (e.g., physical science, engineering, media, legal, and economics fields), from contributing to the direct development of national security countermeasures to emerging conflict short of war threat vectors. A Cognitive Security Action Group (CSAG), akin to the Counterterrorism Security Group (CSG), could drive interagency alignment across equivalents of DHS, DoS, DoD, Intelligence Community, and other implementing agencies, in areas including strategic narrative, and the nexus of cyber and information operations. -M007 - metatechnique +Coordinated disinformation challenges are increasingly multidisciplinary, there are few organisations within the national security structures that are equipped with the broad-spectrum capability to effectively counter large-scale conflict short of war tactics in real-time. Institutional hurdles currently impede diverse subject matter experts, hailing from outside of the traditional national security and foreign policy disciplines (e.g., physical science, engineering, media, legal, and economics fields), from contributing to the direct development of national security countermeasures to emerging conflict short of war threat vectors. A Cognitive Security Action Group (CSAG), akin to the Counterterrorism Security Group (CSG), could drive interagency alignment across equivalents of DHS, DoS, DoD, Intelligence Community, and other implementing agencies, in areas including strategic narrative, and the nexus of cyber and information operations. +M007 - Metatechnique TA01 Strategic Planning D07 C00178 Fill information voids with non-disinformation content -1) Pollute the data voids with wholesome content (Kittens! Babyshark!). 2) fill data voids with relevant information, e.g. increase Russian-language programming in areas subject to Russian disinformation. -M009 - dilution, M008 - data pollution +1) Pollute the data voids with wholesome content (Kittens! Babyshark!). 2) fill data voids with relevant information, e.g. increase Russian-language programming in areas subject to Russian disinformation. +M009 - Dilution, M008 - Data Pollution TA05 Microtargeting D04 C00182 Redirection / malware detection/ remediation -Detect redirction or malware, then quarantine or delete. -M005 - removal +Detect redirction or malware, then quarantine or delete. +M005 - Removal TA09 Exposure D02 @@ -993,79 +981,79 @@ Online archives of content (archives of websites, social media profiles, media, C00184 Media exposure highlight misinformation activities and actors in media -M003 - daylight +M003 - Daylight TA08 Pump Priming D04 C00188 Newsroom/Journalist training to counter influence moves -Includes SEO influence. Includes promotion of a “higher standard of journalism”: journalism training “would be helpful, especially for the online community. Includes Strengthen local media: Improve effectiveness of local media outlets. -M001 - resilience +Includes SEO influence. Includes promotion of a “higher standard of journalism”: journalism training “would be helpful, especially for the online community. Includes Strengthen local media: Improve effectiveness of local media outlets. +M001 - Resilience TA08 Pump Priming D03 C00189 Ensure that platforms are taking down flagged accounts -Use ongoing analysis/monitoring of "flagged" profiles. Confirm whether platforms are actively removing flagged accounts, and raise pressure via e.g. government organizations to encourage removal -M003 - daylight +Use ongoing analysis/monitoring of "flagged" profiles. Confirm whether platforms are actively removing flagged accounts, and raise pressure via e.g. government organisations to encourage removal +M003 - Daylight TA15 - Establish Social Assets D06 C00190 open engagement with civil society -Government open engagement with civil society as an independent check on government action and messaging. Government seeks to coordinate and synchronize narrative themes with allies and partners while calibrating action in cases where elements in these countries may have been co-opted by competitor nations. Includes “fight in the light”: Use leadership in the arts, entertainment, and media to highlight and build on fundamental tenets of democracy. -M001 - resilience +Government open engagement with civil society as an independent check on government action and messaging. Government seeks to coordinate and synchronise narrative themes with allies and partners while calibrating action in cases where elements in these countries may have been co-opted by competitor nations. Includes “fight in the light”: Use leadership in the arts, entertainment, and media to highlight and build on fundamental tenets of democracy. +M001 - Resilience TA01 Strategic Planning D03 C00195 -Redirect searches away from disinformation or extremist content -Use Google AdWords to identify instances in which people search Google about particular fake-news stories or propaganda themes. Includes Monetize centrist SEO by subsidizing the difference in greater clicks towards extremist content. -M002 - diversion +Redirect searches away from disinformation or extremist content +Use Google AdWords to identify instances in which people search Google about particular fake-news stories or propaganda themes. Includes Monetize centrist SEO by subsidising the difference in greater clicks towards extremist content. +M002 - Diversion TA07 Channel Selection D02 C00197 remove suspicious accounts -Standard reporting for false profiles (identity issues). Includes detecting hijacked accounts and reallocating them - if possible, back to original owners. -M005 - removal +Standard reporting for false profiles (identity issues). Includes detecting hijacked accounts and reallocating them - if possible, back to original owners. +M005 - Removal TA15 - Establish Social Assets D02 C00200 Respected figure (influencer) disavows misinfo -FIXIT: standardize language used for influencer/ respected figure. -M010 - countermessaging +FIXIT: standardise language used for influencer/ respected figure. +M010 - Countermessaging TA09 Exposure D03 C00202 Set data 'honeytraps' -Set honeytraps in content likely to be accessed for disinformation. -M002 - diversion +Set honeytraps in content likely to be accessed for disinformation. +M002 - Diversion TA06 Develop Content D02 C00203 Stop offering press credentials to propaganda outlets -Remove access to official press events from known misinformation actors. -M004 - friction +Remove access to official press events from known misinformation actors. +M004 - Friction TA15 Establish Social Assets D03 C00205 strong dialogue between the federal government and private sector to encourage better reporting -Increase civic resilience by partnering with business community to combat gray zone threats and ensuring adequate reporting and enforcement mechanisms. -M007 - metatechnique +Increase civic resilience by partnering with business community to combat grey zone threats and ensuring adequate reporting and enforcement mechanisms. +M007 - Metatechnique TA01 Strategic Planning D03 @@ -1073,7 +1061,7 @@ Online archives of content (archives of websites, social media profiles, media, C00207 Run a competing disinformation campaign - not recommended -M013 - targeting +M013 - Targeting TA02 Objective Planning D07 @@ -1081,7 +1069,7 @@ Online archives of content (archives of websites, social media profiles, media, C00211 Use humorous counter-narratives -M010 - countermessaging +M010 - Countermessaging TA09 Exposure D03 @@ -1089,7 +1077,7 @@ Online archives of content (archives of websites, social media profiles, media, C00212 build public resilience by making civil society more vibrant Increase public service experience, and support wider civics and history education. -M001 - resilience +M001 - Resilience TA01 Strategic Planning D03 @@ -1097,23 +1085,23 @@ Online archives of content (archives of websites, social media profiles, media, C00216 Use advertiser controls to stem flow of funds to bad actors Prevent ad revenue going to disinformation domains -M014 - reduce resources +M014 - Reduce Resources TA05 Microtargeting D02 C00219 Add metadata to content that’s out of the control of disinformation creators -Steganography. Adding date, signatures etc to stop issue of photo relabelling etc. -M003 - daylight +Steganography. Adding date, signatures etc to stop issue of photo relabelling etc. +M003 - Daylight TA06 Develop Content D04 C00220 Develop a monitoring and intelligence plan -Create a plan for misinformation and disinformation response, before it's needed. Include connections / contacts needed, expected counteremessages etc. -M007 - metatechnique +Create a plan for misinformation and disinformation response, before it's needed. Include connections / contacts needed, expected counteremessages etc. +M007 - Metatechnique TA01 Strategic Planning D03 @@ -1121,23 +1109,23 @@ Online archives of content (archives of websites, social media profiles, media, C00221 Run a disinformation red team, and design mitigation factors Include PACE plans - Primary, Alternate, Contingency, Emergency -M007 - metatechnique +M007 - Metatechnique TA01 Strategic Planning D03 C00222 Tabletop simulations -Simulate misinformation and disinformation campaigns, and responses to them, before campaigns happen. -M007 - metatechnique +Simulate misinformation and disinformation campaigns, and responses to them, before campaigns happen. +M007 - Metatechnique TA02 Objective Planning D03 C00223 Strengthen Trust in social media platforms -Improve trust in the misinformation responses from social media and other platforms. Examples include creating greater transparancy on their actions and algorithms. -M001 - resilience +Improve trust in the misinformation responses from social media and other platforms. Examples include creating greater transparancy on their actions and algorithms. +M001 - Resilience TA01 Strategic Planning D03 diff --git a/generated_pages/detections_index.md b/generated_pages/detections_index.md index ed1d0fd..12b9765 100644 --- a/generated_pages/detections_index.md +++ b/generated_pages/detections_index.md @@ -12,7 +12,7 @@ F00001 Analyse aborted / failed campaigns -Examine failed campaigns. How did they fail? Can we create useful activities that increase these failures? +Examine failed campaigns. How did they fail? Can we create useful activities that increase these failures? TA01 Strategic Planning D01 @@ -20,7 +20,7 @@ F00002 Analyse viral fizzle -We have no idea what this means. Is it something to do with the way a viral story spreads? +We have no idea what this means. Is it something to do with the way a viral story spreads? TA01 Strategic Planning D01 @@ -35,7 +35,7 @@ F00004 -Recruit like-minded converts "people who used to be in-group" +Recruit like-minded converts "people who used to be in-group" TA01 Strategic Planning @@ -44,7 +44,7 @@ F00005 SWOT Analysis of Cognition in Various Groups -Strengths, Weaknesses, Opportunities, Threats analysis of groups and audience segments. +Strengths, Weaknesses, Opportunities, Threats analysis of groups and audience segments. TA01 Strategic Planning D01 @@ -132,9 +132,7 @@ F00016 Identify fence-sitters -Note: In each case, depending on the platform there may be a way to identify a fence-sitter. For example, online polls may have a neutral option or a "somewhat this-or-that" option, and may reveal who voted for that to all visitors. This information could be of use to data analysts. - -In TA08-11, the engagement level of victims could be identified to detect and respond to increasing engagement. +Note: In each case, depending on the platform there may be a way to identify a fence-sitter. For example, online polls may have a neutral option or a "somewhat this-or-that" option, and may reveal who voted for that to all visitors. This information could be of use to data analysts. In TA08-11, the engagement level of victims could be identified to detect and respond to increasing engagement. TA15 Establish Social Assets D01 @@ -262,7 +260,7 @@ In TA08-11, the engagement level of victims could be identified to detect and re F00032 Educate on how to identify to pollution -DUPLICATE - DELETE +DUPLICATE - DELETE TA06 Develop Content D01 @@ -301,10 +299,8 @@ In TA08-11, the engagement level of victims could be identified to detect and re F00037 -News content provenance certification. -Original Comment: Shortcomings: intentional falsehood. Doesn't solve accuracy. Can't be mandatory. - -Technique should be in terms of "strategic innoculation", raising the standards of what people expect in terms of evidence when consuming news. +News content provenance certification. +Original Comment: Shortcomings: intentional falsehood. Doesn't solve accuracy. Can't be mandatory. Technique should be in terms of "strategic innoculation", raising the standards of what people expect in terms of evidence when consuming news. TA06 Develop Content D01 @@ -343,7 +339,7 @@ Technique should be in terms of "strategic innoculation", raising the standards F00042 -Categorize polls by intent +Categorise polls by intent Use T00029, but against the creators TA07 Channel Selection @@ -352,7 +348,7 @@ Technique should be in terms of "strategic innoculation", raising the standards F00043 Monitor for creation of fake known personas -Platform companies and some information security companies (e.g. ZeroFox) do this. +Platform companies and some information security companies (e.g. ZeroFox) do this. TA07 Channel Selection D01 @@ -472,9 +468,7 @@ Technique should be in terms of "strategic innoculation", raising the standards F00058 Deplatform (cancel culture) -*Deplatform People: This technique needs to be a bit more specific to distinguish it from "account removal" or DDOS and other techniques that get more specific when applied to content. - -For example, other ways of deplatforming people include attacking their sources of funds, their allies, their followers, etc. +*Deplatform People: This technique needs to be a bit more specific to distinguish it from "account removal" or DDOS and other techniques that get more specific when applied to content. For example, other ways of deplatforming people include attacking their sources of funds, their allies, their followers, etc. TA10 Go Physical D01 @@ -490,7 +484,7 @@ For example, other ways of deplatforming people include attacking their sources F00060 Identify susceptible influencers -I assume this was a transcript error. Otherwise, "Identify Susceptible Influences" as in the various methods of influences that may work against a victim could also be a technique. Nope, wasn't a transcript error: original note says influencers, as in find people of influence that might be targetted. +I assume this was a transcript error. Otherwise, "Identify Susceptible Influences" as in the various methods of influences that may work against a victim could also be a technique. Nope, wasn't a transcript error: original note says influencers, as in find people of influence that might be targetted. TA10 Go Physical D01 @@ -554,7 +548,7 @@ For example, other ways of deplatforming people include attacking their sources F00068 Resonance analysis -a developing methodology for identifying statistical differences in how social groups use language and quantifying how common those statistical differences are within a larger population. In essence, it hypothesizes how much affinity might exist for a specific group within a general population, based on the language its members employ +a developing methodology for identifying statistical differences in how social groups use language and quantifying how common those statistical differences are within a larger population. In essence, it hypothesises how much affinity might exist for a specific group within a general population, based on the language its members employ D01 @@ -594,7 +588,7 @@ For example, other ways of deplatforming people include attacking their sources F00073 collect intel/recon on black/covert content creators/manipulators -Players at the level of covert attribution, referred to as “black” in the grayscale of deniability, produce content on user-generated media, such as YouTube, but also add fear-mongering commentary to and amplify content produced by others and supply exploitable content to data dump websites. These activities are conducted by a network of trolls, bots, honeypots, and hackers. +Players at the level of covert attribution, referred to as “black” in the grayscale of deniability, produce content on user-generated media, such as YouTube, but also add fear-mongering commentary to and amplify content produced by others and supply exploitable content to data dump websites. These activities are conducted by a network of trolls, bots, honeypots, and hackers. D01 @@ -602,7 +596,7 @@ For example, other ways of deplatforming people include attacking their sources F00074 identify relevant fence-sitter communities -brand ambassador programs could be used with influencers across a variety of social media channels. It could also target other prominent experts, such as academics, business leaders, and other potentially prominent people. Authorities must ultimately take care in implementing such a program given the risk that contact with U.S. or NATO authorities might damage influencer reputations. Engagements must consequently be made with care, and, if possible, government interlocutors should work through local NGOs. +brand ambassador programmes could be used with influencers across a variety of social media channels. It could also target other prominent experts, such as academics, business leaders, and other potentially prominent people. Authorities must ultimately take care in implementing such a programme given the risk that contact with U.S. or NATO authorities might damage influencer reputations. Engagements must consequently be made with care, and, if possible, government interlocutors should work through local NGOs. D01 @@ -610,7 +604,7 @@ For example, other ways of deplatforming people include attacking their sources F00075 leverage open-source information -significant amounts of quality open-source information are now available and should be leveraged to build products and analysis prior to problem prioritization in the areas of observation, attribution, and intent. Successfully distinguishing the gray zone campaign signal through the global noise requires action through the entirety of the national security community. Policy, process, and tools must all adapt and evolve to detect, discern, and act upon a new type of signal +significant amounts of quality open-source information are now available and should be leveraged to build products and analysis prior to problem prioritisation in the areas of observation, attribution, and intent. Successfully distinguishing the grey zone campaign signal through the global noise requires action through the entirety of the national security community. Policy, process, and tools must all adapt and evolve to detect, discern, and act upon a new type of signal D01 @@ -618,15 +612,14 @@ For example, other ways of deplatforming people include attacking their sources F00076 Monitor/collect audience engagement data connected to “useful idiots” -Target audience connected to "useful idiots rather than the specific profiles because - The active presence of such sources complicates targeting of Russian propaganda, given that it is often difficult to discriminate between authentic views and opinions on the internet and those disseminated by the Russian state. - +Target audience connected to "useful idiots rather than the specific profiles because - The active presence of such sources complicates targeting of Russian propaganda, given that it is often difficult to discriminate between authentic views and opinions on the internet and those disseminated by the Russian state. D01 F00077 -Model for bot account behavior +Model for bot account behaviour Bot account: action based, people. Unsure which DISARM techniques. TA15 - Establish Social Assets @@ -691,10 +684,7 @@ For example, other ways of deplatforming people include attacking their sources F00085 detection of a weak signal through global noise -Gray zone threats are challenging given that warning requires detection of a weak signal through global noise and across threat vectors and regional boundaries.Three interconnected gray zone elements characterize the nature of the activity: -Temporality: The nature of gray zone threats truly requires a “big picture view” over long timescales and across regions and functional topics. -Attribution: requiring an “almost certain” or “nearly certain analytic assessment before acting costs time and analytic effort -Intent: judgement of adversarial intent to conduct gray zone activity. Indeed, the purpose of countering gray zone threats is to deter adversaries from fulfilling their intent to act. While attribution is one piece of the puzzle, closing the space around intent often means synthesizing multiple relevant indicators and warnings, including the state’s geopolitical ambitions, military ties, trade and investment, level of corruption, and media landscape, among others. +Grey zone threats are challenging given that warning requires detection of a weak signal through global noise and across threat vectors and regional boundaries.Three interconnected grey zone elements characterise the nature of the activity: Temporality: The nature of grey zone threats truly requires a “big picture view” over long timescales and across regions and functional topics. Attribution: requiring an “almost certain” or “nearly certain analytic assessment before acting costs time and analytic effort Intent: judgement of adversarial intent to conduct grey zone activity. Indeed, the purpose of countering grey zone threats is to deter adversaries from fulfilling their intent to act. While attribution is one piece of the puzzle, closing the space around intent often means synthesising multiple relevant indicators and warnings, including the state’s geopolitical ambitions, military ties, trade and investment, level of corruption, and media landscape, among others. @@ -710,15 +700,15 @@ Intent: judgement of adversarial intent to conduct gray zone activity. Indeed, t F00087 Improve Indications and Warning -United States has not adequately adapted its information indicators and thresholds for warning policymakers to account for gray zone tactics. Competitors have undertaken a marked shift to slow-burn, deceptive, non-military, and indirect challenges to U.S. interests. Relative to traditional security indicators and warnings, these are more numerous and harder to detect and make it difficult for analysts to infer intent. +United States has not adequately adapted its information indicators and thresholds for warning policymakers to account for grey zone tactics. Competitors have undertaken a marked shift to slow-burn, deceptive, non-military, and indirect challenges to U.S. interests. Relative to traditional security indicators and warnings, these are more numerous and harder to detect and make it difficult for analysts to infer intent. D01 F00088 -Revitalize an “active measures working group,” -Recognize campaigns from weak signals, including rivals’ intent, capability, impact, interactive effects, and impact on U.S. interests... focus on adversarial covert action aspects of campaigning. +Revitalise an “active measures working group,” +Recognise campaigns from weak signals, including rivals’ intent, capability, impact, interactive effects, and impact on U.S. interests... focus on adversarial covert action aspects of campaigning. D01 @@ -726,7 +716,7 @@ Intent: judgement of adversarial intent to conduct gray zone activity. Indeed, t F00089 target/name/flag "grey zone" website content -"Gray zone" is second level of content producers and circulators, composed of outlets with uncertain attribution. This category covers conspiracy websites, far-right or far-left websites, news aggregators, and data dump websites +"Grey zone" is second level of content producers and circulators, composed of outlets with uncertain attribution. This category covers conspiracy websites, far-right or far-left websites, news aggregators, and data dump websites TA15 Establish Social Assets D01 @@ -742,7 +732,7 @@ Intent: judgement of adversarial intent to conduct gray zone activity. Indeed, t F00091 Partner to develop analytic methods & tools -This might include working with relevant technology firms to ensure that contracted analytic support is available. Contracted support is reportedly valuable because technology to monitor social media data is continually evolving, and such firms can provide the expertise to help identify and analyze trends, and they can more effectively stay abreast of the changing systems and develop new models as they are required +This might include working with relevant technology firms to ensure that contracted analytic support is available. Contracted support is reportedly valuable because technology to monitor social media data is continually evolving, and such firms can provide the expertise to help identify and analyse trends, and they can more effectively stay abreast of the changing systems and develop new models as they are required TA01 Strategic Planning D01 @@ -750,7 +740,7 @@ Intent: judgement of adversarial intent to conduct gray zone activity. Indeed, t F00092 daylight -Warn social media companies about an ongoing campaign (e.g. antivax sites). Anyone with datasets or data summaries can help with this +Warn social media companies about an ongoing campaign (e.g. antivax sites). Anyone with datasets or data summaries can help with this TA09 Exposure D01 @@ -758,8 +748,8 @@ Intent: judgement of adversarial intent to conduct gray zone activity. Indeed, t F00093 S4d detection and re-allocation approaches -S4D is a way to separate out different speakers in text, audio. -M004 - friction +S4D is a way to separate out different speakers in text, audio. +M004 - Friction TA15 - Establish Social Assets D01 @@ -767,14 +757,14 @@ Intent: judgement of adversarial intent to conduct gray zone activity. Indeed, t F00094 Registries alert when large batches of newsy URLs get registered together -M003 - daylight +M003 - Daylight TA07 Channel Selection D01 F00095 Fact checking -Process suspicious artifacts, narratives, and incidents +Process suspicious artefacts, narratives, and incidents TA09 Exposure D01 diff --git a/generated_pages/disarm_blue_framework.md b/generated_pages/disarm_blue_framework.md index 4f31824..ddb2207 100644 --- a/generated_pages/disarm_blue_framework.md +++ b/generated_pages/disarm_blue_framework.md @@ -17,7 +17,7 @@ TA14 Develop Narratives TA15 Establish Social Assets TA16 Establish Legitimacy -TA17 Maximize Exposure +TA17 Maximise Exposure TA18 Drive Online Harms @@ -151,9 +151,9 @@ C00070 Block access to disinformation resources C00078 Change Search Algorithms for Disinformation Content -C00195 Redirect searches away from disinformation or extremist content +C00195 Redirect searches away from disinformation or extremist content C00118 Repurpose images with new text -C00129 Use banking to cut off access +C00129 Use banking to cut off access @@ -202,11 +202,11 @@ C00024 Promote healthy narratives -C00156 Better tell your country or organization story +C00156 Better tell your country or organisation story C00082 Ground truthing as automated response to pollution -C00121 Tool transparency and literacy for channels people follow. +C00121 Tool transparency and literacy for channels people follow. C00200 Respected figure (influencer) disavows misinfo diff --git a/generated_pages/disarm_red_framework.md b/generated_pages/disarm_red_framework.md index 88ef243..656ddf6 100644 --- a/generated_pages/disarm_red_framework.md +++ b/generated_pages/disarm_red_framework.md @@ -17,14 +17,14 @@ TA14 Develop Narratives TA15 Establish Social Assets TA16 Establish Legitimacy -TA17 Maximize Exposure +TA17 Maximise Exposure TA18 Drive Online Harms T0073 Determine Target Audiences T0002 Facilitate State Propaganda T0016 Create Clickbait -T0015 Create Hashtags and Search Artifacts +T0015 Create Hashtags and Search Artefacts T0029 Online Polls T0020 Trial Content T0114 Deliver Ads @@ -44,7 +44,7 @@ T0018 Purchase Targeted Advertisements T0019 Generate Information Pollution T0043 Chat Apps -T0039 Bait Legitimate Influencers +T0039 Bait Legitimate Influencers T0114.001 Social Media T0017.001 Conduct Crowdfunding Campaigns T0060 Continue to Amplify @@ -52,19 +52,19 @@ T0072.001 Geographic Segmentation T0004 Develop Competing Narratives T0010 Cultivate Ignorant Agents -T0009.001 Utilize Academic/Pseudoscientific Justifications +T0009.001 Utilise Academic/Pseudoscientific Justifications T0049.001 Trolls Amplify and Manipulate T0048 Harass T0075 Dismiss -T0101 Create Localized Content +T0101 Create Localised Content T0019.001 Create Fake Research T0043.001 Use Encrypted Chat Apps T0042 Seed Kernel of Truth T0114.002 Traditional Media -T0057 Organize Events +T0057 Organise Events T0128 Conceal People T0132.002 Content Focused T0072.002 Demographic Segmentation @@ -107,7 +107,7 @@ T0022.002 Develop Original Conspiracy Theory Narratives T0014.001 Raise Funds from Malign Actors T0097.001 Backstop Personas -T0049.004 Utilize Spamoflauge +T0049.004 Utilise Spamoflauge T0048.003 Threaten to Dox @@ -116,11 +116,11 @@ T0102.002 Create Echo Chambers/Filter Bubbles T0023.001 Reframe Context T0103.001 Video Livestream -T0046 Use Search Engine Optimization +T0046 Use Search Engine Optimisation T0115.002 Post Violative Content to Provoke Takedown and Backlash T0061 Sell Merchandise T0128.003 Distance Reputable Individuals from Operation -T0133.001 Behavior Changes +T0133.001 Behaviour Changes T0072.005 Political Segmentation T0040 Demand Insurmountable Proof T0014.002 Raise Funds from Ignorant Agents @@ -186,7 +186,7 @@ -T0084.002 Plagiarize Content +T0084.002 Plagiarise Content T0104.002 Dating Apps T0117 Attract Traditional Media @@ -204,7 +204,7 @@ -T0084.003 Deceptively Labeled or Translated +T0084.003 Deceptively Labelled or Translated T0104.003 Private/Closed Social Networks @@ -287,7 +287,7 @@ T0091.002 Recruit Partisans T0100.003 Co-Opt Influencers -T0120.001 Use Affiliate Marketing Programs +T0120.001 Use Affiliate Marketing Programmes T0124.003 Exploit Platform TOS/Content Moderation @@ -339,7 +339,7 @@ T0081.005 Identify Existing Conspiracy Narratives/Suspicions -T0092.001 Create Organizations +T0092.001 Create Organisations T0121.001 Bypass Content Blocking @@ -407,7 +407,7 @@ -T0130.002 Utilize Bulletproof Hosting +T0130.002 Utilise Bulletproof Hosting @@ -425,7 +425,7 @@ -T0130.003 Use Shell Organizations +T0130.003 Use Shell Organisations @@ -483,7 +483,7 @@ -T0094.002 Utilize Butterfly Attacks +T0094.002 Utilise Butterfly Attacks @@ -555,7 +555,7 @@ -T0096.002 Outsource Content Creation to External Organizations +T0096.002 Outsource Content Creation to External Organisations diff --git a/generated_pages/incidents/I00002.md b/generated_pages/incidents/I00002.md index 1795569..b561271 100644 --- a/generated_pages/incidents/I00002.md +++ b/generated_pages/incidents/I00002.md @@ -18,7 +18,7 @@ | [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | IT00000002 Promote "funding" campaign | | [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | IT00000001 buy FB targeted ads | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | IT00000003 create web-site - information pollution | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000005 SEO optimisation/manipulation ("key words") | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000005 SEO optimisation/manipulation ("key words") | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00005.md b/generated_pages/incidents/I00005.md index efb15a2..ad71146 100644 --- a/generated_pages/incidents/I00005.md +++ b/generated_pages/incidents/I00005.md @@ -29,8 +29,8 @@ The report adds that although officially the Russian government asserted its neu | [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | IT00000010 Targeted FB paid ads | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | IT00000014 RT & Sputnik generate information pollution | | [T0029 Online Polls](../../generated_pages/techniques/T0029.md) | IT00000013 manipulate social media "online polls"? | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000022 SEO optimisation/manipulation ("key words") | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | IT00000012 Digital to physical "organize+promote" rallies & events? | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000022 SEO optimisation/manipulation ("key words") | +| [T0057 Organise Events](../../generated_pages/techniques/T0057.md) | IT00000012 Digital to physical "organize+promote" rallies & events? | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00006.md b/generated_pages/incidents/I00006.md index 328e789..a4ce671 100644 --- a/generated_pages/incidents/I00006.md +++ b/generated_pages/incidents/I00006.md @@ -16,7 +16,8 @@ | Technique | Description given for this incident | | --------- | ------------------------- | | [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | IT00000029 Fake twitter profiles to amplify | -| [T0015 Create Hashtags and Search Artifacts](../../generated_pages/techniques/T0015.md) | IT00000027 Create and use hashtag | +| [T0015 Create Hashtags and Search Artefacts](../../generated_pages/techniques/T0015.md) | IT00000027 Create and use hashtag | +| [T0039 Bait Legitimate Influencers](../../generated_pages/techniques/T0039.md) | IT00000030 bait journalists/media/politicians | | [T0043 Chat Apps](../../generated_pages/techniques/T0043.md) | IT00000025 Use SMS/text messages | diff --git a/generated_pages/incidents/I00010.md b/generated_pages/incidents/I00010.md index 4bf5bc8..52dd8f3 100644 --- a/generated_pages/incidents/I00010.md +++ b/generated_pages/incidents/I00010.md @@ -18,7 +18,8 @@ | [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | IT00000045 FB pages/groups/profiles | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | IT00000044 cultivate, manipulate, exploit useful idiots (Alex Jones... drives conspiracy theories; false flags, crisis actors) | | [T0020 Trial Content](../../generated_pages/techniques/T0020.md) | IT00000048 4Chan/8Chan - trial content | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000043 SEO optimisation/manipulation ("key words") | +| [T0039 Bait Legitimate Influencers](../../generated_pages/techniques/T0039.md) | IT00000049 journalist/media baiting | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000043 SEO optimisation/manipulation ("key words") | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00015.md b/generated_pages/incidents/I00015.md index 9e88bd1..e0a2e79 100644 --- a/generated_pages/incidents/I00015.md +++ b/generated_pages/incidents/I00015.md @@ -15,6 +15,7 @@ | Technique | Description given for this incident | | --------- | ------------------------- | +| [T0039 Bait Legitimate Influencers](../../generated_pages/techniques/T0039.md) | IT00000053 journalist/media baiting | | [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | IT00000052 Circulate to media via DM, then release publicly | diff --git a/generated_pages/incidents/I00017.md b/generated_pages/incidents/I00017.md index 39d4be1..9918ee4 100644 --- a/generated_pages/incidents/I00017.md +++ b/generated_pages/incidents/I00017.md @@ -22,8 +22,8 @@ | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | IT00000061 RT & Sputnik generate information pollution | | [T0020 Trial Content](../../generated_pages/techniques/T0020.md) | IT00000070 4Chan/8Chan - trial content | | [T0029 Online Polls](../../generated_pages/techniques/T0029.md) | IT00000060 manipulate social media "online polls"? | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000071 SEO optimisation/manipulation ("key words") | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | IT00000059 Digital to physical "organize+promote" rallies & events | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000071 SEO optimisation/manipulation ("key words") | +| [T0057 Organise Events](../../generated_pages/techniques/T0057.md) | IT00000059 Digital to physical "organize+promote" rallies & events | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00029.md b/generated_pages/incidents/I00029.md index 922a42d..7ae112e 100644 --- a/generated_pages/incidents/I00029.md +++ b/generated_pages/incidents/I00029.md @@ -18,7 +18,7 @@ | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | IT00000084 cultivate, manipulate, exploit useful idiots | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | IT00000082 RT & Sputnik generate information pollution (synthetic media) | | [T0040 Demand Insurmountable Proof](../../generated_pages/techniques/T0040.md) | IT00000089 Demand insurmountable proof | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000085 SEO optimisation/manipulation ("key words") | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000085 SEO optimisation/manipulation ("key words") | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00032.md b/generated_pages/incidents/I00032.md index 9663c21..19abd9c 100644 --- a/generated_pages/incidents/I00032.md +++ b/generated_pages/incidents/I00032.md @@ -19,8 +19,8 @@ | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | IT00000104 cultivate, manipulate, exploit useful idiots (Alex Jones... drives conspiracy theories) | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | IT00000094 RT & Sputnik generate information pollution | | [T0020 Trial Content](../../generated_pages/techniques/T0020.md) | IT00000102 4Chan/8Chan - trial content | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000103 SEO optimisation/manipulation ("key words") | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | IT00000093 Digital to physical "organize+promote" rallies & events? | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000103 SEO optimisation/manipulation ("key words") | +| [T0057 Organise Events](../../generated_pages/techniques/T0057.md) | IT00000093 Digital to physical "organize+promote" rallies & events? | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00044.md b/generated_pages/incidents/I00044.md index 204f6d8..f659844 100644 --- a/generated_pages/incidents/I00044.md +++ b/generated_pages/incidents/I00044.md @@ -19,7 +19,7 @@ | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | IT00000126 cultivate, manipulate, exploit useful idiots (Alex Jones... drives conspiracy theories) | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | IT00000120 RT & Sputnik generate information pollution | | [T0020 Trial Content](../../generated_pages/techniques/T0020.md) | IT00000124 4Chan/8Chan - trial content | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000125 SEO optimisation/manipulation ("key words") | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000125 SEO optimisation/manipulation ("key words") | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00049.md b/generated_pages/incidents/I00049.md index e95d9c7..80081ef 100644 --- a/generated_pages/incidents/I00049.md +++ b/generated_pages/incidents/I00049.md @@ -17,7 +17,7 @@ | --------- | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | IT00000139 cultivate, manipulate, exploit useful idiots (Roger Waters; Venessa Beeley...) | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | IT00000137 RT & Sputnik generate information pollution (synthetic media) | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000140 SEO optimisation/manipulation ("key words") | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000140 SEO optimisation/manipulation ("key words") | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00050.md b/generated_pages/incidents/I00050.md index bb1fa2c..8f859e1 100644 --- a/generated_pages/incidents/I00050.md +++ b/generated_pages/incidents/I00050.md @@ -20,7 +20,7 @@ Maduro has remained defiant in the face of domestic and international pressure, | --------- | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | IT00000148 cultivate, manipulate, exploit useful idiots (Roger Waters) | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | IT00000146 RT & Sputnik generate information pollution (synthetic media) | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000149 SEO optimisation/manipulation ("key words") | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000149 SEO optimisation/manipulation ("key words") | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00051.md b/generated_pages/incidents/I00051.md index 071de42..1d866b6 100644 --- a/generated_pages/incidents/I00051.md +++ b/generated_pages/incidents/I00051.md @@ -18,7 +18,7 @@ The FCO comments on the IfS were issued after a news report said the group had r | --------- | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | IT00000158 cultivate, manipulate, exploit useful idiots | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | IT00000156 RT & Sputnik generate information pollution | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000161 SEO optimisation/manipulation ("key words") | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000161 SEO optimisation/manipulation ("key words") | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00053.md b/generated_pages/incidents/I00053.md index 6e18f2d..07906cd 100644 --- a/generated_pages/incidents/I00053.md +++ b/generated_pages/incidents/I00053.md @@ -17,8 +17,8 @@ Geopolitically complex issue combines US/China trade; Security concerns/issues r | Technique | Description given for this incident | | --------- | ------------------------- | | [T0023 Distort Facts](../../generated_pages/techniques/T0023.md) | IT00000163 Distorted, saccharine “news” about the Chinese State and Party | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | IT00000164 Events coordinated and promoted across media platforms | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | IT00000166 Extend digital the physical space… gatherings ie: support for Meng outside courthouse | +| [T0057 Organise Events](../../generated_pages/techniques/T0057.md) | IT00000164 Events coordinated and promoted across media platforms | +| [T0057 Organise Events](../../generated_pages/techniques/T0057.md) | IT00000166 Extend digital the physical space… gatherings ie: support for Meng outside courthouse | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00056.md b/generated_pages/incidents/I00056.md index 2a9f1bd..919ad52 100644 --- a/generated_pages/incidents/I00056.md +++ b/generated_pages/incidents/I00056.md @@ -18,7 +18,7 @@ While there is history to Iran’s information/influence operations, starting wi | --------- | ------------------------- | | [T0007 Create Inauthentic Social Media Pages and Groups](../../generated_pages/techniques/T0007.md) | IT00000171 Fake FB groups/pages/profiles + dark content (non-paid advertising) | | [T0022 Leverage Conspiracy Theory Narratives](../../generated_pages/techniques/T0022.md) | IT00000174 Memes... anti-Isreal/USA/West, conspiracy narratives | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000172 SEO optimisation/manipulation ("key words") | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000172 SEO optimisation/manipulation ("key words") | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/incidents/I00063.md b/generated_pages/incidents/I00063.md index f8799f4..78d8c99 100644 --- a/generated_pages/incidents/I00063.md +++ b/generated_pages/incidents/I00063.md @@ -19,7 +19,7 @@ The investigation found corroborating evidence after conducting witness intervie | --------- | ------------------------- | | [T0010 Cultivate Ignorant Agents](../../generated_pages/techniques/T0010.md) | IT00000182 cultivate, manipulate, exploit useful idiots | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | IT00000180 RT & Sputnik generate information pollution (synthetic media) | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | IT00000183 SEO optimisation/manipulation ("key words") | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | IT00000183 SEO optimisation/manipulation ("key words") | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/metatechniques/M001.md b/generated_pages/metatechniques/M001.md index bbf8816..c840af8 100644 --- a/generated_pages/metatechniques/M001.md +++ b/generated_pages/metatechniques/M001.md @@ -1,4 +1,4 @@ -# Metatechnique M001: resilience +# Metatechnique M001: Resilience * **Summary:** Increase the resilience to disinformation of the end subjects or other parts of the underlying system @@ -22,7 +22,7 @@ | [C00111 Reduce polarisation by connecting and presenting sympathetic renditions of opposite views](../../generated_pages/counters/C00111.md) | D04 | | [C00027 Create culture of civility](../../generated_pages/counters/C00027.md) | D07 | | [C00093 Influencer code of conduct](../../generated_pages/counters/C00093.md) | D07 | -| [C00121 Tool transparency and literacy for channels people follow. ](../../generated_pages/counters/C00121.md) | D07 | +| [C00121 Tool transparency and literacy for channels people follow.](../../generated_pages/counters/C00121.md) | D07 | | [C00130 Mentorship: elders, youth, credit. Learn vicariously.](../../generated_pages/counters/C00130.md) | D07 | diff --git a/generated_pages/metatechniques/M002.md b/generated_pages/metatechniques/M002.md index 16605ef..116b6c6 100644 --- a/generated_pages/metatechniques/M002.md +++ b/generated_pages/metatechniques/M002.md @@ -1,11 +1,11 @@ -# Metatechnique M002: diversion +# Metatechnique M002: Diversion * **Summary:** Create alternative channels, messages etc in disinformation-prone systems | Counters | Response types | | -------- | -------------- | -| [C00195 Redirect searches away from disinformation or extremist content ](../../generated_pages/counters/C00195.md) | D02 | +| [C00195 Redirect searches away from disinformation or extremist content](../../generated_pages/counters/C00195.md) | D02 | | [C00202 Set data 'honeytraps'](../../generated_pages/counters/C00202.md) | D02 | | [C00029 Create fake website to issue counter narrative and counter narrative through physical merchandise](../../generated_pages/counters/C00029.md) | D03 | | [C00030 Develop a compelling counter narrative (truth based)](../../generated_pages/counters/C00030.md) | D03 | diff --git a/generated_pages/metatechniques/M003.md b/generated_pages/metatechniques/M003.md index fd58720..709ffa5 100644 --- a/generated_pages/metatechniques/M003.md +++ b/generated_pages/metatechniques/M003.md @@ -1,4 +1,4 @@ -# Metatechnique M003: daylight +# Metatechnique M003: Daylight * **Summary:** Make disinformation objects, mechanisms, messaging etc visible diff --git a/generated_pages/metatechniques/M004.md b/generated_pages/metatechniques/M004.md index 5d76f4f..803faff 100644 --- a/generated_pages/metatechniques/M004.md +++ b/generated_pages/metatechniques/M004.md @@ -1,4 +1,4 @@ -# Metatechnique M004: friction +# Metatechnique M004: Friction * **Summary:** Slow down transmission or uptake of disinformation objects, messaging etc diff --git a/generated_pages/metatechniques/M005.md b/generated_pages/metatechniques/M005.md index aa1704f..50e7f33 100644 --- a/generated_pages/metatechniques/M005.md +++ b/generated_pages/metatechniques/M005.md @@ -1,4 +1,4 @@ -# Metatechnique M005: removal +# Metatechnique M005: Removal * **Summary:** Remove disinformation objects from the system diff --git a/generated_pages/metatechniques/M006.md b/generated_pages/metatechniques/M006.md index b3cd2e3..81e6175 100644 --- a/generated_pages/metatechniques/M006.md +++ b/generated_pages/metatechniques/M006.md @@ -1,4 +1,4 @@ -# Metatechnique M006: scoring +# Metatechnique M006: Scoring * **Summary:** Use a rating system diff --git a/generated_pages/metatechniques/M007.md b/generated_pages/metatechniques/M007.md index 40476fa..a2eb0a4 100644 --- a/generated_pages/metatechniques/M007.md +++ b/generated_pages/metatechniques/M007.md @@ -1,4 +1,4 @@ -# Metatechnique M007: metatechnique +# Metatechnique M007: Metatechnique * **Summary:** diff --git a/generated_pages/metatechniques/M008.md b/generated_pages/metatechniques/M008.md index af4a500..f5f8ceb 100644 --- a/generated_pages/metatechniques/M008.md +++ b/generated_pages/metatechniques/M008.md @@ -1,4 +1,4 @@ -# Metatechnique M008: data pollution +# Metatechnique M008: Data Pollution * **Summary:** Add artefacts to the underlying system that deliberately confound disinformation monitoring diff --git a/generated_pages/metatechniques/M009.md b/generated_pages/metatechniques/M009.md index acd86c3..b7141df 100644 --- a/generated_pages/metatechniques/M009.md +++ b/generated_pages/metatechniques/M009.md @@ -1,4 +1,4 @@ -# Metatechnique M009: dilution +# Metatechnique M009: Dilution * **Summary:** Dilute disinformation artefacts and messaging with other content (kittens!) diff --git a/generated_pages/metatechniques/M010.md b/generated_pages/metatechniques/M010.md index 8102a02..0819660 100644 --- a/generated_pages/metatechniques/M010.md +++ b/generated_pages/metatechniques/M010.md @@ -1,4 +1,4 @@ -# Metatechnique M010: countermessaging +# Metatechnique M010: Countermessaging * **Summary:** Create and distribute alternative messages to disinformation @@ -9,7 +9,7 @@ | [C00017 Repair broken social connections](../../generated_pages/counters/C00017.md) | D03 | | [C00082 Ground truthing as automated response to pollution](../../generated_pages/counters/C00082.md) | D03 | | [C00136 Microtarget most likely targets then send them countermessages](../../generated_pages/counters/C00136.md) | D03 | -| [C00156 Better tell your country or organization story](../../generated_pages/counters/C00156.md) | D03 | +| [C00156 Better tell your country or organisation story](../../generated_pages/counters/C00156.md) | D03 | | [C00169 develop a creative content hub](../../generated_pages/counters/C00169.md) | D03 | | [C00200 Respected figure (influencer) disavows misinfo](../../generated_pages/counters/C00200.md) | D03 | | [C00211 Use humorous counter-narratives](../../generated_pages/counters/C00211.md) | D03 | diff --git a/generated_pages/metatechniques/M011.md b/generated_pages/metatechniques/M011.md index 9c42c16..b18810d 100644 --- a/generated_pages/metatechniques/M011.md +++ b/generated_pages/metatechniques/M011.md @@ -1,4 +1,4 @@ -# Metatechnique M011: verification +# Metatechnique M011: Verification * **Summary:** Verify objects, content, connections etc. Includes fact-checking diff --git a/generated_pages/metatechniques/M012.md b/generated_pages/metatechniques/M012.md index 11ee86d..8dcb96d 100644 --- a/generated_pages/metatechniques/M012.md +++ b/generated_pages/metatechniques/M012.md @@ -1,4 +1,4 @@ -# Metatechnique M012: cleaning +# Metatechnique M012: Cleaning * **Summary:** Clean unneeded resources (accounts etc) from the underlying system so they can't be used in disinformation diff --git a/generated_pages/metatechniques/M013.md b/generated_pages/metatechniques/M013.md index c3bb582..c20ae4d 100644 --- a/generated_pages/metatechniques/M013.md +++ b/generated_pages/metatechniques/M013.md @@ -1,4 +1,4 @@ -# Metatechnique M013: targeting +# Metatechnique M013: Targeting * **Summary:** Target the components of a disinformation campaign diff --git a/generated_pages/metatechniques/M014.md b/generated_pages/metatechniques/M014.md index 5a7d1f0..03ea6a4 100644 --- a/generated_pages/metatechniques/M014.md +++ b/generated_pages/metatechniques/M014.md @@ -1,11 +1,11 @@ -# Metatechnique M014: reduce resources +# Metatechnique M014: Reduce Resources * **Summary:** Reduce the resources available to disinformation creators | Counters | Response types | | -------- | -------------- | -| [C00129 Use banking to cut off access ](../../generated_pages/counters/C00129.md) | D02 | +| [C00129 Use banking to cut off access](../../generated_pages/counters/C00129.md) | D02 | | [C00216 Use advertiser controls to stem flow of funds to bad actors](../../generated_pages/counters/C00216.md) | D02 | | [C00138 Spam domestic actors with lawsuits](../../generated_pages/counters/C00138.md) | D03 | | [C00144 Buy out troll farm employees / offer them jobs](../../generated_pages/counters/C00144.md) | D04 | diff --git a/generated_pages/metatechniques_by_responsetype_table.md b/generated_pages/metatechniques_by_responsetype_table.md index 8028146..d597159 100644 --- a/generated_pages/metatechniques_by_responsetype_table.md +++ b/generated_pages/metatechniques_by_responsetype_table.md @@ -10,7 +10,7 @@ D06 D07 TOTALS -M001 resilience +M001 Resilience 5 6 4 @@ -20,7 +20,7 @@ 19 -M002 diversion +M002 Diversion 2 8 1 @@ -30,7 +30,7 @@ 14 -M003 daylight +M003 Daylight 3 4 3 @@ -40,7 +40,7 @@ 12 -M004 friction +M004 Friction 10 6 3 @@ -50,7 +50,7 @@ 19 -M005 removal +M005 Removal 12 3 0 @@ -60,7 +60,7 @@ 15 -M006 scoring +M006 Scoring 2 0 2 @@ -70,7 +70,7 @@ 6 -M007 metatechnique +M007 Metatechnique 2 6 0 @@ -80,7 +80,7 @@ 11 -M008 data pollution +M008 Data Pollution 0 1 2 @@ -90,7 +90,7 @@ 4 -M009 dilution +M009 Dilution 0 4 2 @@ -100,7 +100,7 @@ 6 -M010 countermessaging +M010 Countermessaging 1 7 5 @@ -110,7 +110,7 @@ 14 -M011 verification +M011 Verification 2 1 0 @@ -120,7 +120,7 @@ 3 -M012 cleaning +M012 Cleaning 1 0 1 @@ -130,7 +130,7 @@ 2 -M013 targeting +M013 Targeting 1 6 3 @@ -140,7 +140,7 @@ 11 -M014 reduce resources +M014 Reduce Resources 2 1 1 diff --git a/generated_pages/metatechniques_index.md b/generated_pages/metatechniques_index.md index 18ca0a9..bc703be 100644 --- a/generated_pages/metatechniques_index.md +++ b/generated_pages/metatechniques_index.md @@ -8,72 +8,72 @@ M001 -resilience +Resilience Increase the resilience to disinformation of the end subjects or other parts of the underlying system M002 -diversion +Diversion Create alternative channels, messages etc in disinformation-prone systems M003 -daylight +Daylight Make disinformation objects, mechanisms, messaging etc visible M004 -friction +Friction Slow down transmission or uptake of disinformation objects, messaging etc M005 -removal +Removal Remove disinformation objects from the system M006 -scoring +Scoring Use a rating system M007 -metatechnique +Metatechnique M008 -data pollution +Data Pollution Add artefacts to the underlying system that deliberately confound disinformation monitoring M009 -dilution +Dilution Dilute disinformation artefacts and messaging with other content (kittens!) M010 -countermessaging +Countermessaging Create and distribute alternative messages to disinformation M011 -verification +Verification Verify objects, content, connections etc. Includes fact-checking M012 -cleaning +Cleaning Clean unneeded resources (accounts etc) from the underlying system so they can't be used in disinformation M013 -targeting +Targeting Target the components of a disinformation campaign M014 -reduce resources +Reduce Resources Reduce the resources available to disinformation creators diff --git a/generated_pages/phases/P04.md b/generated_pages/phases/P04.md index 20023ce..23ec520 100644 --- a/generated_pages/phases/P04.md +++ b/generated_pages/phases/P04.md @@ -1,5 +1,5 @@ # Phase P04: Assess -* **Summary:** Evaluate effectiveness of action, for use in future plans +* **Summary:** Evaluate effectiveness of action, for use in future plans DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/phases_index.md b/generated_pages/phases_index.md index 73f20c9..2b27a95 100644 --- a/generated_pages/phases_index.md +++ b/generated_pages/phases_index.md @@ -24,6 +24,6 @@ P04 Assess -Evaluate effectiveness of action, for use in future plans +Evaluate effectiveness of action, for use in future plans diff --git a/generated_pages/responsetype_index.md b/generated_pages/responsetype_index.md index e083f90..6111d97 100644 --- a/generated_pages/responsetype_index.md +++ b/generated_pages/responsetype_index.md @@ -14,17 +14,17 @@ D02 Deny -Prevent disinformation creators from accessing and using critical information, systems, and services. Deny is for an indefinite time period. +Prevent disinformation creators from accessing and using critical information, systems, and services. Deny is for an indefinite time period. D03 Disrupt -Completely break or interrupt the flow of information, for a fixed amount of time. (Deny, for a limited time period). Not allowing any efficacy, for a short amount of time. +Completely break or interrupt the flow of information, for a fixed amount of time. (Deny, for a limited time period). Not allowing any efficacy, for a short amount of time. D04 Degrade -Reduce the effectiveness or efficiency of disinformation creators’ command and control or communications systems, and information collection efforts or means, either indefinitely, or for a limited time period. +Reduce the effectiveness or efficiency of disinformation creators’ command and control or communications systems, and information collection efforts or means, either indefinitely, or for a limited time period. D05 @@ -34,7 +34,7 @@ D06 Destroy -Damage a system or entity so badly that it cannot perform any function or be restored to a usable condition without being entirely rebuilt. Destroy is permanent, e.g. you can rebuild a website, but it’s not the same website. +Damage a system or entity so badly that it cannot perform any function or be restored to a usable condition without being entirely rebuilt. Destroy is permanent, e.g. you can rebuild a website, but it’s not the same website. D07 diff --git a/generated_pages/tactics/TA02.md b/generated_pages/tactics/TA02.md index afb28ca..9c2db32 100644 --- a/generated_pages/tactics/TA02.md +++ b/generated_pages/tactics/TA02.md @@ -1,10 +1,6 @@ # Tactic TA02: Plan Objectives -* **Summary:** Set clearly defined, measurable, and achievable objectives. Achieving objectives ties execution of tactical tasks to reaching the desired end state. There are four primary considerations: -- Each desired effect should link directly to one or more objectives -- The effect should be measurable -- The objective statement should not specify the way and means of accomplishment -- The effect should be distinguishable from the objective it supports as a condition for success, not as another objective or task. +* **Summary:** Set clearly defined, measurable, and achievable objectives. Achieving objectives ties execution of tactical tasks to reaching the desired end state. There are four primary considerations: - Each desired effect should link directly to one or more objectives - The effect should be measurable - The objective statement should not specify the way and means of accomplishment - The effect should be distinguishable from the objective it supports as a condition for success, not as another objective or task. * **Belongs to phase:** P01 @@ -45,7 +41,7 @@ | [C00030 Develop a compelling counter narrative (truth based)](../../generated_pages/counters/C00030.md) | D03 | | [C00031 Dilute the core narrative - create multiple permutations, target / amplify](../../generated_pages/counters/C00031.md) | D03 | | [C00060 Legal action against for-profit engagement factories](../../generated_pages/counters/C00060.md) | D03 | -| [C00156 Better tell your country or organization story](../../generated_pages/counters/C00156.md) | D03 | +| [C00156 Better tell your country or organisation story](../../generated_pages/counters/C00156.md) | D03 | | [C00164 compatriot policy](../../generated_pages/counters/C00164.md) | D03 | | [C00169 develop a creative content hub](../../generated_pages/counters/C00169.md) | D03 | | [C00222 Tabletop simulations](../../generated_pages/counters/C00222.md) | D03 | diff --git a/generated_pages/tactics/TA05.md b/generated_pages/tactics/TA05.md index 94ec184..080375a 100644 --- a/generated_pages/tactics/TA05.md +++ b/generated_pages/tactics/TA05.md @@ -16,7 +16,7 @@ | ---------- | | [T0016 Create Clickbait](../../generated_pages/techniques/T0016.md) | | [T0018 Purchase Targeted Advertisements](../../generated_pages/techniques/T0018.md) | -| [T0101 Create Localized Content](../../generated_pages/techniques/T0101.md) | +| [T0101 Create Localised Content](../../generated_pages/techniques/T0101.md) | | [T0102 Leverage Echo Chambers/Filter Bubbles](../../generated_pages/techniques/T0102.md) | | [T0102.001 Use Existing Echo Chambers/Filter Bubbles](../../generated_pages/techniques/T0102.001.md) | | [T0102.002 Create Echo Chambers/Filter Bubbles](../../generated_pages/techniques/T0102.002.md) | diff --git a/generated_pages/tactics/TA06.md b/generated_pages/tactics/TA06.md index ff0e209..da64537 100644 --- a/generated_pages/tactics/TA06.md +++ b/generated_pages/tactics/TA06.md @@ -16,7 +16,7 @@ | Techniques | | ---------- | -| [T0015 Create Hashtags and Search Artifacts](../../generated_pages/techniques/T0015.md) | +| [T0015 Create Hashtags and Search Artefacts](../../generated_pages/techniques/T0015.md) | | [T0019 Generate Information Pollution](../../generated_pages/techniques/T0019.md) | | [T0019.001 Create Fake Research](../../generated_pages/techniques/T0019.001.md) | | [T0019.002 Hijack Hashtags](../../generated_pages/techniques/T0019.002.md) | @@ -25,8 +25,8 @@ | [T0023.002 Edit Open-Source Content](../../generated_pages/techniques/T0023.002.md) | | [T0084 Reuse Existing Content](../../generated_pages/techniques/T0084.md) | | [T0084.001 Use Copypasta](../../generated_pages/techniques/T0084.001.md) | -| [T0084.002 Plagiarize Content](../../generated_pages/techniques/T0084.002.md) | -| [T0084.003 Deceptively Labeled or Translated](../../generated_pages/techniques/T0084.003.md) | +| [T0084.002 Plagiarise Content](../../generated_pages/techniques/T0084.002.md) | +| [T0084.003 Deceptively Labelled or Translated](../../generated_pages/techniques/T0084.003.md) | | [T0084.004 Appropriate Content](../../generated_pages/techniques/T0084.004.md) | | [T0085 Develop Text-Based Content](../../generated_pages/techniques/T0085.md) | | [T0085.001 Develop AI-Generated Text](../../generated_pages/techniques/T0085.001.md) | diff --git a/generated_pages/tactics/TA07.md b/generated_pages/tactics/TA07.md index 97086b0..244444a 100644 --- a/generated_pages/tactics/TA07.md +++ b/generated_pages/tactics/TA07.md @@ -1,6 +1,6 @@ # Tactic TA07: Select Channels and Affordances -* **Summary:** Selecting platforms and affordances assesses which online or offline platforms and their associated affordances maximize an influence operation’s ability to reach its target audience. To select the most appropriate platform(s), an operation may assess the technological affordances including platform algorithms, terms of service, permitted content types, or other attributes that determine platform usability and accessibility. Selecting platforms includes both choosing platforms on which the operation will publish its own content and platforms on which the operation will attempt to restrict adversarial content. +* **Summary:** Selecting platforms and affordances assesses which online or offline platforms and their associated affordances maximise an influence operation’s ability to reach its target audience. To select the most appropriate platform(s), an operation may assess the technological affordances including platform algorithms, terms of service, permitted content types, or other attributes that determine platform usability and accessibility. Selecting platforms includes both choosing platforms on which the operation will publish its own content and platforms on which the operation will attempt to restrict adversarial content. * **Belongs to phase:** P02 @@ -51,7 +51,7 @@ | [C00097 Require use of verified identities to contribute to poll or comment](../../generated_pages/counters/C00097.md) | D02 | | [C00098 Revocation of allowlisted or "verified" status](../../generated_pages/counters/C00098.md) | D02 | | [C00099 Strengthen verification methods](../../generated_pages/counters/C00099.md) | D02 | -| [C00195 Redirect searches away from disinformation or extremist content ](../../generated_pages/counters/C00195.md) | D02 | +| [C00195 Redirect searches away from disinformation or extremist content](../../generated_pages/counters/C00195.md) | D02 | | [C00105 Buy more advertising than misinformation creators](../../generated_pages/counters/C00105.md) | D03 | | [C00101 Create friction by rate-limiting engagement](../../generated_pages/counters/C00101.md) | D04 | | [C00090 Fake engagement system](../../generated_pages/counters/C00090.md) | D05 | diff --git a/generated_pages/tactics/TA08.md b/generated_pages/tactics/TA08.md index 497d3d3..39c73cc 100644 --- a/generated_pages/tactics/TA08.md +++ b/generated_pages/tactics/TA08.md @@ -1,6 +1,6 @@ # Tactic TA08: Conduct Pump Priming -* **Summary:** Release content on a targetted small scale, prior to general release, including releasing seed. Used for preparation before broader release, and as message honing. Used for preparation before broader release, and as message honing. +* **Summary:** Release content on a targetted small scale, prior to general release, including releasing seed. Used for preparation before broader release, and as message honing. Used for preparation before broader release, and as message honing. * **Belongs to phase:** P03 @@ -17,11 +17,11 @@ | Techniques | | ---------- | | [T0020 Trial Content](../../generated_pages/techniques/T0020.md) | -| [T0039 Bait Legitimate Influencers](../../generated_pages/techniques/T0039 .md) | +| [T0039 Bait Legitimate Influencers](../../generated_pages/techniques/T0039.md) | | [T0042 Seed Kernel of Truth](../../generated_pages/techniques/T0042.md) | | [T0044 Seed Distortions](../../generated_pages/techniques/T0044.md) | | [T0045 Use Fake Experts](../../generated_pages/techniques/T0045.md) | -| [T0046 Use Search Engine Optimization](../../generated_pages/techniques/T0046.md) | +| [T0046 Use Search Engine Optimisation](../../generated_pages/techniques/T0046.md) | | [T0113 Employ Commercial Analytic Firms](../../generated_pages/techniques/T0113.md) | @@ -42,7 +42,7 @@ | [C00184 Media exposure](../../generated_pages/counters/C00184.md) | D04 | | [C00119 Engage payload and debunk.](../../generated_pages/counters/C00119.md) | D07 | | [C00120 Open dialogue about design of platforms to produce different outcomes](../../generated_pages/counters/C00120.md) | D07 | -| [C00121 Tool transparency and literacy for channels people follow. ](../../generated_pages/counters/C00121.md) | D07 | +| [C00121 Tool transparency and literacy for channels people follow.](../../generated_pages/counters/C00121.md) | D07 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/tactics/TA09.md b/generated_pages/tactics/TA09.md index 077a2f3..8cfda70 100644 --- a/generated_pages/tactics/TA09.md +++ b/generated_pages/tactics/TA09.md @@ -32,7 +32,7 @@ | Counters | Response types | | -------- | -------------- | | [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00129 Use banking to cut off access ](../../generated_pages/counters/C00129.md) | D02 | +| [C00129 Use banking to cut off access](../../generated_pages/counters/C00129.md) | D02 | | [C00182 Redirection / malware detection/ remediation](../../generated_pages/counters/C00182.md) | D02 | | [C00109 Dampen Emotional Reaction](../../generated_pages/counters/C00109.md) | D03 | | [C00123 Remove or rate limit botnets](../../generated_pages/counters/C00123.md) | D03 | diff --git a/generated_pages/tactics/TA10.md b/generated_pages/tactics/TA10.md index d30b9c5..a1ebf8a 100644 --- a/generated_pages/tactics/TA10.md +++ b/generated_pages/tactics/TA10.md @@ -1,6 +1,6 @@ # Tactic TA10: Drive Offline Activity -* **Summary:** Move incident/campaign from online to offline. Encouraging users to move from the platform on which they initially viewed operation content and engage in the physical information space or offline world. This may include operation-aligned rallies or protests, radio, newspaper, or billboards. An influence operation may drive to physical forums to diversify its information channels and facilitate spaces where the target audience can engage with both operation content and like-minded individuals offline. +* **Summary:** Move incident/campaign from online to offline. Encouraging users to move from the platform on which they initially viewed operation content and engage in the physical information space or offline world. This may include operation-aligned rallies or protests, radio, newspaper, or billboards. An influence operation may drive to physical forums to diversify its information channels and facilitate spaces where the target audience can engage with both operation content and like-minded individuals offline. * **Belongs to phase:** P03 @@ -16,7 +16,7 @@ | ---------- | | [T0017 Conduct Fundraising](../../generated_pages/techniques/T0017.md) | | [T0017.001 Conduct Crowdfunding Campaigns](../../generated_pages/techniques/T0017.001.md) | -| [T0057 Organize Events](../../generated_pages/techniques/T0057.md) | +| [T0057 Organise Events](../../generated_pages/techniques/T0057.md) | | [T0057.001 Pay for Physical Action](../../generated_pages/techniques/T0057.001.md) | | [T0057.002 Conduct Symbolic Action](../../generated_pages/techniques/T0057.002.md) | | [T0061 Sell Merchandise](../../generated_pages/techniques/T0061.md) | diff --git a/generated_pages/tactics/TA11.md b/generated_pages/tactics/TA11.md index 909e9f8..e6aa785 100644 --- a/generated_pages/tactics/TA11.md +++ b/generated_pages/tactics/TA11.md @@ -1,6 +1,6 @@ # Tactic TA11: Persist in the Information Environment -* **Summary:** Persist in the Information Space refers to taking measures that allow an operation to maintain its presence and avoid takedown by an external entity. Techniques in Persist in the Information Space help campaigns operate without detection and appear legitimate to the target audience and platform monitoring services. Influence operations on social media often persist online by varying the type of information assets and platforms used throughout the campaign. +* **Summary:** Persist in the Information Space refers to taking measures that allow an operation to maintain its presence and avoid takedown by an external entity. Techniques in Persist in the Information Space help campaigns operate without detection and appear legitimate to the target audience and platform monitoring services. Influence operations on social media often persist online by varying the type of information assets and platforms used throughout the campaign. * **Belongs to phase:** P03 @@ -40,8 +40,8 @@ | [T0129.010 Misattribute Activity](../../generated_pages/techniques/T0129.010.md) | | [T0130 Conceal Infrastructure](../../generated_pages/techniques/T0130.md) | | [T0130.001 Conceal Sponsorship](../../generated_pages/techniques/T0130.001.md) | -| [T0130.002 Utilize Bulletproof Hosting](../../generated_pages/techniques/T0130.002.md) | -| [T0130.003 Use Shell Organizations](../../generated_pages/techniques/T0130.003.md) | +| [T0130.002 Utilise Bulletproof Hosting](../../generated_pages/techniques/T0130.002.md) | +| [T0130.003 Use Shell Organisations](../../generated_pages/techniques/T0130.003.md) | | [T0130.004 Use Cryptocurrency](../../generated_pages/techniques/T0130.004.md) | | [T0130.005 Obfuscate Payment](../../generated_pages/techniques/T0130.005.md) | | [T0131 Exploit TOS/Content Moderation](../../generated_pages/techniques/T0131.md) | diff --git a/generated_pages/tactics/TA12.md b/generated_pages/tactics/TA12.md index 2b9c3d8..9983ec3 100644 --- a/generated_pages/tactics/TA12.md +++ b/generated_pages/tactics/TA12.md @@ -1,6 +1,6 @@ # Tactic TA12: Assess Effectiveness -* **Summary:** Assess effectiveness of action, for use in future plans +* **Summary:** Assess effectiveness of action, for use in future plans * **Belongs to phase:** P04 @@ -22,7 +22,7 @@ | [T0132.002 Content Focused](../../generated_pages/techniques/T0132.002.md) | | [T0132.003 View Focused](../../generated_pages/techniques/T0132.003.md) | | [T0133 Measure Effectiveness](../../generated_pages/techniques/T0133.md) | -| [T0133.001 Behavior Changes](../../generated_pages/techniques/T0133.001.md) | +| [T0133.001 Behaviour Changes](../../generated_pages/techniques/T0133.001.md) | | [T0133.002 Content](../../generated_pages/techniques/T0133.002.md) | | [T0133.003 Awareness](../../generated_pages/techniques/T0133.003.md) | | [T0133.004 Knowledge](../../generated_pages/techniques/T0133.004.md) | diff --git a/generated_pages/tactics/TA13.md b/generated_pages/tactics/TA13.md index 2c9f7f5..fba78fe 100644 --- a/generated_pages/tactics/TA13.md +++ b/generated_pages/tactics/TA13.md @@ -1,6 +1,6 @@ # Tactic TA13: Target Audience Analysis -* **Summary:** Identifying and analyzing the target audience examines target audience member locations, political affiliations, financial situations, and other attributes that an influence operation may incorporate into its messaging strategy. During this tactic, influence operations may also identify existing similarities and differences between target audience members to unite like groups and divide opposing groups. Identifying and analyzing target audience members allows influence operations to tailor operation strategy and content to their analysis. +* **Summary:** Identifying and analysing the target audience examines target audience member locations, political affiliations, financial situations, and other attributes that an influence operation may incorporate into its messaging strategy. During this tactic, influence operations may also identify existing similarities and differences between target audience members to unite like groups and divide opposing groups. Identifying and analysing target audience members allows influence operations to tailor operation strategy and content to their analysis. * **Belongs to phase:** P01 diff --git a/generated_pages/tactics/TA14.md b/generated_pages/tactics/TA14.md index 5d1af02..a724cab 100644 --- a/generated_pages/tactics/TA14.md +++ b/generated_pages/tactics/TA14.md @@ -1,6 +1,6 @@ # Tactic TA14: Develop Narratives -* **Summary:** The promotion of beneficial master narratives is perhaps the most effective method for achieving long-term strategic narrative dominance. From a ""whole of society"" perspective the promotion of the society's core master narratives should occupy a central strategic role. From a misinformation campaign / cognitive security perpectve the tactics around master narratives center more precisely on the day-to-day promotion and reinforcement of this messaging. In other words, beneficial, high-coverage master narratives are a central strategic goal and their promotion constitutes an ongoing tactical struggle carried out at a whole-of-society level. Tactically, their promotion covers a broad spectrum of activities both on- and offline. +* **Summary:** The promotion of beneficial master narratives is perhaps the most effective method for achieving long-term strategic narrative dominance. From a ""whole of society"" perspective the promotion of the society's core master narratives should occupy a central strategic role. From a misinformation campaign / cognitive security perpectve the tactics around master narratives centre more precisely on the day-to-day promotion and reinforcement of this messaging. In other words, beneficial, high-coverage master narratives are a central strategic goal and their promotion constitutes an ongoing tactical struggle carried out at a whole-of-society level. Tactically, their promotion covers a broad spectrum of activities both on- and offline. * **Belongs to phase:** P02 diff --git a/generated_pages/tactics/TA15.md b/generated_pages/tactics/TA15.md index 2c96a17..3c36e21 100644 --- a/generated_pages/tactics/TA15.md +++ b/generated_pages/tactics/TA15.md @@ -1,7 +1,6 @@ # Tactic TA15: Establish Social Assets -* **Summary:** Establishing information assets generates messaging tools, including social media accounts, operation personnel, and organizations, including directly and indirectly managed assets. For assets under their direct control, the operation can add, change, or remove these assets at will. -Establishing information assets allows an influence operation to promote messaging directly to the target audience without navigating through external entities. Many online influence operations create or compromise social media accounts as a primary vector of information dissemination. +* **Summary:** Establishing information assets generates messaging tools, including social media accounts, operation personnel, and organisations, including directly and indirectly managed assets. For assets under their direct control, the operation can add, change, or remove these assets at will. Establishing information assets allows an influence operation to promote messaging directly to the target audience without navigating through external entities. Many online influence operations create or compromise social media accounts as a primary vector of information dissemination. * **Belongs to phase:** P02 @@ -40,7 +39,7 @@ Establishing information assets allows an influence operation to promote messagi | [T0091.002 Recruit Partisans](../../generated_pages/techniques/T0091.002.md) | | [T0091.003 Enlist Troll Accounts](../../generated_pages/techniques/T0091.003.md) | | [T0092 Build Network](../../generated_pages/techniques/T0092.md) | -| [T0092.001 Create Organizations](../../generated_pages/techniques/T0092.001.md) | +| [T0092.001 Create Organisations](../../generated_pages/techniques/T0092.001.md) | | [T0092.002 Use Follow Trains](../../generated_pages/techniques/T0092.002.md) | | [T0092.003 Create Community or Sub-Group](../../generated_pages/techniques/T0092.003.md) | | [T0093 Acquire/Recruit Network](../../generated_pages/techniques/T0093.md) | @@ -48,11 +47,11 @@ Establishing information assets allows an influence operation to promote messagi | [T0093.002 Acquire Botnets](../../generated_pages/techniques/T0093.002.md) | | [T0094 Infiltrate Existing Networks](../../generated_pages/techniques/T0094.md) | | [T0094.001 Identify Susceptible Targets in Networks](../../generated_pages/techniques/T0094.001.md) | -| [T0094.002 Utilize Butterfly Attacks](../../generated_pages/techniques/T0094.002.md) | +| [T0094.002 Utilise Butterfly Attacks](../../generated_pages/techniques/T0094.002.md) | | [T0095 Develop Owned Media Assets](../../generated_pages/techniques/T0095.md) | | [T0096 Leverage Content Farms](../../generated_pages/techniques/T0096.md) | | [T0096.001 Create Content Farms](../../generated_pages/techniques/T0096.001.md) | -| [T0096.002 Outsource Content Creation to External Organizations](../../generated_pages/techniques/T0096.002.md) | +| [T0096.002 Outsource Content Creation to External Organisations](../../generated_pages/techniques/T0096.002.md) | diff --git a/generated_pages/tactics/TA16.md b/generated_pages/tactics/TA16.md index f5f611e..14c8663 100644 --- a/generated_pages/tactics/TA16.md +++ b/generated_pages/tactics/TA16.md @@ -14,7 +14,7 @@ | Techniques | | ---------- | | [T0009 Create Fake Experts](../../generated_pages/techniques/T0009.md) | -| [T0009.001 Utilize Academic/Pseudoscientific Justifications](../../generated_pages/techniques/T0009.001.md) | +| [T0009.001 Utilise Academic/Pseudoscientific Justifications](../../generated_pages/techniques/T0009.001.md) | | [T0011 Compromise Legitimate Accounts](../../generated_pages/techniques/T0011.md) | | [T0097 Create Personas](../../generated_pages/techniques/T0097.md) | | [T0097.001 Backstop Personas](../../generated_pages/techniques/T0097.001.md) | diff --git a/generated_pages/tactics/TA17.md b/generated_pages/tactics/TA17.md index 540bf4b..080336e 100644 --- a/generated_pages/tactics/TA17.md +++ b/generated_pages/tactics/TA17.md @@ -1,6 +1,6 @@ -# Tactic TA17: Maximize Exposure +# Tactic TA17: Maximise Exposure -* **Summary:** Maximize exposure of the target audience to incident/campaign content via flooding, amplifying, and cross-posting. +* **Summary:** Maximise exposure of the target audience to incident/campaign content via flooding, amplifying, and cross-posting. * **Belongs to phase:** P03 @@ -17,7 +17,7 @@ | [T0049.001 Trolls Amplify and Manipulate](../../generated_pages/techniques/T0049.001.md) | | [T0049.002 Hijack Existing Hashtag](../../generated_pages/techniques/T0049.002.md) | | [T0049.003 Bots Amplify via Automated Forwarding and Reposting](../../generated_pages/techniques/T0049.003.md) | -| [T0049.004 Utilize Spamoflauge](../../generated_pages/techniques/T0049.004.md) | +| [T0049.004 Utilise Spamoflauge](../../generated_pages/techniques/T0049.004.md) | | [T0049.005 Conduct Swarming](../../generated_pages/techniques/T0049.005.md) | | [T0049.006 Conduct Keyword Squatting](../../generated_pages/techniques/T0049.006.md) | | [T0049.007 Inauthentic Sites Amplify News and Narratives](../../generated_pages/techniques/T0049.007.md) | @@ -27,7 +27,7 @@ | [T0119.002 Post across Platform](../../generated_pages/techniques/T0119.002.md) | | [T0119.003 Post across Disciplines](../../generated_pages/techniques/T0119.003.md) | | [T0120 Incentivize Sharing](../../generated_pages/techniques/T0120.md) | -| [T0120.001 Use Affiliate Marketing Programs](../../generated_pages/techniques/T0120.001.md) | +| [T0120.001 Use Affiliate Marketing Programmes](../../generated_pages/techniques/T0120.001.md) | | [T0120.002 Use Contests and Prizes](../../generated_pages/techniques/T0120.002.md) | | [T0121 Manipulate Platform Algorithm](../../generated_pages/techniques/T0121.md) | | [T0121.001 Bypass Content Blocking](../../generated_pages/techniques/T0121.001.md) | diff --git a/generated_pages/tactics/TA18.md b/generated_pages/tactics/TA18.md index 221de45..20e0223 100644 --- a/generated_pages/tactics/TA18.md +++ b/generated_pages/tactics/TA18.md @@ -1,6 +1,6 @@ # Tactic TA18: Drive Online Harms -* **Summary:** Actions taken by an influence operation to harm their opponents in online spaces through harassment, suppression, releasing private information, and controlling the information space through offensive cyberspace operations. +* **Summary:** Actions taken by an influence operation to harm their opponents in online spaces through harassment, suppression, releasing private information, and controlling the information space through offensive cyberspace operations. * **Belongs to phase:** P03 diff --git a/generated_pages/tactics_index.md b/generated_pages/tactics_index.md index 46d8ebe..3a81bdd 100644 --- a/generated_pages/tactics_index.md +++ b/generated_pages/tactics_index.md @@ -16,11 +16,7 @@ TA02 Plan Objectives -Set clearly defined, measurable, and achievable objectives. Achieving objectives ties execution of tactical tasks to reaching the desired end state. There are four primary considerations: -- Each desired effect should link directly to one or more objectives -- The effect should be measurable -- The objective statement should not specify the way and means of accomplishment -- The effect should be distinguishable from the objective it supports as a condition for success, not as another objective or task. +Set clearly defined, measurable, and achievable objectives. Achieving objectives ties execution of tactical tasks to reaching the desired end state. There are four primary considerations: - Each desired effect should link directly to one or more objectives - The effect should be measurable - The objective statement should not specify the way and means of accomplishment - The effect should be distinguishable from the objective it supports as a condition for success, not as another objective or task. P01 @@ -38,13 +34,13 @@ TA07 Select Channels and Affordances -Selecting platforms and affordances assesses which online or offline platforms and their associated affordances maximize an influence operation’s ability to reach its target audience. To select the most appropriate platform(s), an operation may assess the technological affordances including platform algorithms, terms of service, permitted content types, or other attributes that determine platform usability and accessibility. Selecting platforms includes both choosing platforms on which the operation will publish its own content and platforms on which the operation will attempt to restrict adversarial content. +Selecting platforms and affordances assesses which online or offline platforms and their associated affordances maximise an influence operation’s ability to reach its target audience. To select the most appropriate platform(s), an operation may assess the technological affordances including platform algorithms, terms of service, permitted content types, or other attributes that determine platform usability and accessibility. Selecting platforms includes both choosing platforms on which the operation will publish its own content and platforms on which the operation will attempt to restrict adversarial content. P02 TA08 Conduct Pump Priming -Release content on a targetted small scale, prior to general release, including releasing seed. Used for preparation before broader release, and as message honing. Used for preparation before broader release, and as message honing. +Release content on a targetted small scale, prior to general release, including releasing seed. Used for preparation before broader release, and as message honing. Used for preparation before broader release, and as message honing. P03 @@ -56,38 +52,37 @@ TA10 Drive Offline Activity -Move incident/campaign from online to offline. Encouraging users to move from the platform on which they initially viewed operation content and engage in the physical information space or offline world. This may include operation-aligned rallies or protests, radio, newspaper, or billboards. An influence operation may drive to physical forums to diversify its information channels and facilitate spaces where the target audience can engage with both operation content and like-minded individuals offline. +Move incident/campaign from online to offline. Encouraging users to move from the platform on which they initially viewed operation content and engage in the physical information space or offline world. This may include operation-aligned rallies or protests, radio, newspaper, or billboards. An influence operation may drive to physical forums to diversify its information channels and facilitate spaces where the target audience can engage with both operation content and like-minded individuals offline. P03 TA11 Persist in the Information Environment -Persist in the Information Space refers to taking measures that allow an operation to maintain its presence and avoid takedown by an external entity. Techniques in Persist in the Information Space help campaigns operate without detection and appear legitimate to the target audience and platform monitoring services. Influence operations on social media often persist online by varying the type of information assets and platforms used throughout the campaign. +Persist in the Information Space refers to taking measures that allow an operation to maintain its presence and avoid takedown by an external entity. Techniques in Persist in the Information Space help campaigns operate without detection and appear legitimate to the target audience and platform monitoring services. Influence operations on social media often persist online by varying the type of information assets and platforms used throughout the campaign. P03 TA12 Assess Effectiveness -Assess effectiveness of action, for use in future plans +Assess effectiveness of action, for use in future plans P04 TA13 Target Audience Analysis -Identifying and analyzing the target audience examines target audience member locations, political affiliations, financial situations, and other attributes that an influence operation may incorporate into its messaging strategy. During this tactic, influence operations may also identify existing similarities and differences between target audience members to unite like groups and divide opposing groups. Identifying and analyzing target audience members allows influence operations to tailor operation strategy and content to their analysis. +Identifying and analysing the target audience examines target audience member locations, political affiliations, financial situations, and other attributes that an influence operation may incorporate into its messaging strategy. During this tactic, influence operations may also identify existing similarities and differences between target audience members to unite like groups and divide opposing groups. Identifying and analysing target audience members allows influence operations to tailor operation strategy and content to their analysis. P01 TA14 Develop Narratives -The promotion of beneficial master narratives is perhaps the most effective method for achieving long-term strategic narrative dominance. From a ""whole of society"" perspective the promotion of the society's core master narratives should occupy a central strategic role. From a misinformation campaign / cognitive security perpectve the tactics around master narratives center more precisely on the day-to-day promotion and reinforcement of this messaging. In other words, beneficial, high-coverage master narratives are a central strategic goal and their promotion constitutes an ongoing tactical struggle carried out at a whole-of-society level. Tactically, their promotion covers a broad spectrum of activities both on- and offline. +The promotion of beneficial master narratives is perhaps the most effective method for achieving long-term strategic narrative dominance. From a ""whole of society"" perspective the promotion of the society's core master narratives should occupy a central strategic role. From a misinformation campaign / cognitive security perpectve the tactics around master narratives centre more precisely on the day-to-day promotion and reinforcement of this messaging. In other words, beneficial, high-coverage master narratives are a central strategic goal and their promotion constitutes an ongoing tactical struggle carried out at a whole-of-society level. Tactically, their promotion covers a broad spectrum of activities both on- and offline. P02 TA15 Establish Social Assets -Establishing information assets generates messaging tools, including social media accounts, operation personnel, and organizations, including directly and indirectly managed assets. For assets under their direct control, the operation can add, change, or remove these assets at will. -Establishing information assets allows an influence operation to promote messaging directly to the target audience without navigating through external entities. Many online influence operations create or compromise social media accounts as a primary vector of information dissemination. +Establishing information assets generates messaging tools, including social media accounts, operation personnel, and organisations, including directly and indirectly managed assets. For assets under their direct control, the operation can add, change, or remove these assets at will. Establishing information assets allows an influence operation to promote messaging directly to the target audience without navigating through external entities. Many online influence operations create or compromise social media accounts as a primary vector of information dissemination. P02 @@ -98,14 +93,14 @@ Establishing information assets allows an influence operation to promote messagi TA17 -Maximize Exposure -Maximize exposure of the target audience to incident/campaign content via flooding, amplifying, and cross-posting. +Maximise Exposure +Maximise exposure of the target audience to incident/campaign content via flooding, amplifying, and cross-posting. P03 TA18 Drive Online Harms -Actions taken by an influence operation to harm their opponents in online spaces through harassment, suppression, releasing private information, and controlling the information space through offensive cyberspace operations. +Actions taken by an influence operation to harm their opponents in online spaces through harassment, suppression, releasing private information, and controlling the information space through offensive cyberspace operations. P03 diff --git a/generated_pages/tasks/TK0001.md b/generated_pages/tasks/TK0001.md index 38b0f93..8bd9e4d 100644 --- a/generated_pages/tasks/TK0001.md +++ b/generated_pages/tasks/TK0001.md @@ -1,6 +1,6 @@ # Task TK0001: Goal setting -* **Summary:** Set the goals for this incident. +* **Summary:** Set the goals for this incident. * **Belongs to tactic stage:** TA01 diff --git a/generated_pages/tasks/TK0002.md b/generated_pages/tasks/TK0002.md index 52e9e12..895bd33 100644 --- a/generated_pages/tasks/TK0002.md +++ b/generated_pages/tasks/TK0002.md @@ -1,6 +1,6 @@ # Task TK0002: Population research / audience analysis (centre of gravity) -* **Summary:** Research intended audience. Includes audience segmentation, hot-button issues etc. +* **Summary:** Research intended audience. Includes audience segmentation, hot-button issues etc. * **Belongs to tactic stage:** TA01 diff --git a/generated_pages/tasks_index.md b/generated_pages/tasks_index.md index 5965080..c6de61c 100644 --- a/generated_pages/tasks_index.md +++ b/generated_pages/tasks_index.md @@ -10,13 +10,13 @@ TK0001 Goal setting -Set the goals for this incident. +Set the goals for this incident. TA01 TK0002 Population research / audience analysis (centre of gravity) -Research intended audience. Includes audience segmentation, hot-button issues etc. +Research intended audience. Includes audience segmentation, hot-button issues etc. TA01 diff --git a/generated_pages/techniques/T0002.md b/generated_pages/techniques/T0002.md index 482733e..ba0220c 100644 --- a/generated_pages/techniques/T0002.md +++ b/generated_pages/techniques/T0002.md @@ -1,6 +1,6 @@ # Technique T0002: Facilitate State Propaganda -* **Summary**: Organize citizens around pro-state messaging. Coordinate paid or volunteer groups to push state propaganda. +* **Summary**: Organise citizens around pro-state messaging. Coordinate paid or volunteer groups to push state propaganda. * **Belongs to tactic stage**: TA02 @@ -14,31 +14,11 @@ | Counters | Response types | | -------- | -------------- | -| [C00008 Create shared fact-checking database](../../generated_pages/counters/C00008.md) | D04 | -| [C00011 Media literacy. Games to identify fake news](../../generated_pages/counters/C00011.md) | D02 | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00013 Rating framework for news](../../generated_pages/counters/C00013.md) | D02 | -| [C00014 Real-time updates to fact-checking database](../../generated_pages/counters/C00014.md) | D04 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | -| [C00017 Repair broken social connections](../../generated_pages/counters/C00017.md) | D03 | -| [C00019 Reduce effect of division-enablers](../../generated_pages/counters/C00019.md) | D03 | -| [C00021 Encourage in-person communication](../../generated_pages/counters/C00021.md) | D04 | -| [C00022 Innoculate. Positive campaign to promote feeling of safety](../../generated_pages/counters/C00022.md) | D04 | -| [C00024 Promote healthy narratives](../../generated_pages/counters/C00024.md) | D04 | -| [C00026 Shore up democracy based messages](../../generated_pages/counters/C00026.md) | D04 | -| [C00027 Create culture of civility](../../generated_pages/counters/C00027.md) | D07 | | [C00029 Create fake website to issue counter narrative and counter narrative through physical merchandise](../../generated_pages/counters/C00029.md) | D03 | | [C00030 Develop a compelling counter narrative (truth based)](../../generated_pages/counters/C00030.md) | D03 | | [C00031 Dilute the core narrative - create multiple permutations, target / amplify](../../generated_pages/counters/C00031.md) | D03 | | [C00082 Ground truthing as automated response to pollution](../../generated_pages/counters/C00082.md) | D03 | | [C00084 Modify disinformation narratives, and rebroadcast them](../../generated_pages/counters/C00084.md) | D03 | -| [C00125 Prebunking](../../generated_pages/counters/C00125.md) | D03 | -| [C00126 Social media amber alert](../../generated_pages/counters/C00126.md) | D03 | -| [C00156 Better tell your country or organization story](../../generated_pages/counters/C00156.md) | D03 | -| [C00161 Coalition Building with stakeholders and Third-Party Inducements](../../generated_pages/counters/C00161.md) | D07 | -| [C00162 Unravel/target the Potemkin villages](../../generated_pages/counters/C00162.md) | D03 | -| [C00164 compatriot policy](../../generated_pages/counters/C00164.md) | D03 | -| [C00169 develop a creative content hub](../../generated_pages/counters/C00169.md) | D03 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0003.md b/generated_pages/techniques/T0003.md index 5d6034a..5b576d2 100644 --- a/generated_pages/techniques/T0003.md +++ b/generated_pages/techniques/T0003.md @@ -1,6 +1,6 @@ # Technique T0003: Leverage Existing Narratives -* **Summary**: Use or adapt existing narrative themes, where narratives are the baseline stories of a target audience. Narratives form the bedrock of our worldviews. New information is understood through a process firmly grounded in this bedrock. If new information is not consitent with the prevailing narratives of an audience, it will be ignored. Effective campaigns will frame their misinformation in the context of these narratives. Highly effective campaigns will make extensive use of audience-appropriate archetypes and meta-narratives throughout their content creation and amplifiction practices. +* **Summary**: Use or adapt existing narrative themes, where narratives are the baseline stories of a target audience. Narratives form the bedrock of our worldviews. New information is understood through a process firmly grounded in this bedrock. If new information is not consitent with the prevailing narratives of an audience, it will be ignored. Effective campaigns will frame their misinformation in the context of these narratives. Highly effective campaigns will make extensive use of audience-appropriate archetypes and meta-narratives throughout their content creation and amplifiction practices. * **Belongs to tactic stage**: TA14 @@ -12,37 +12,8 @@ | Counters | Response types | | -------- | -------------- | -| [C00008 Create shared fact-checking database](../../generated_pages/counters/C00008.md) | D04 | -| [C00011 Media literacy. Games to identify fake news](../../generated_pages/counters/C00011.md) | D02 | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00013 Rating framework for news](../../generated_pages/counters/C00013.md) | D02 | -| [C00014 Real-time updates to fact-checking database](../../generated_pages/counters/C00014.md) | D04 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | -| [C00017 Repair broken social connections](../../generated_pages/counters/C00017.md) | D03 | -| [C00019 Reduce effect of division-enablers](../../generated_pages/counters/C00019.md) | D03 | -| [C00021 Encourage in-person communication](../../generated_pages/counters/C00021.md) | D04 | -| [C00022 Innoculate. Positive campaign to promote feeling of safety](../../generated_pages/counters/C00022.md) | D04 | -| [C00024 Promote healthy narratives](../../generated_pages/counters/C00024.md) | D04 | -| [C00027 Create culture of civility](../../generated_pages/counters/C00027.md) | D07 | -| [C00029 Create fake website to issue counter narrative and counter narrative through physical merchandise](../../generated_pages/counters/C00029.md) | D03 | -| [C00030 Develop a compelling counter narrative (truth based)](../../generated_pages/counters/C00030.md) | D03 | -| [C00031 Dilute the core narrative - create multiple permutations, target / amplify](../../generated_pages/counters/C00031.md) | D03 | | [C00080 Create competing narrative](../../generated_pages/counters/C00080.md) | D03 | | [C00081 Highlight flooding and noise, and explain motivations](../../generated_pages/counters/C00081.md) | D03 | -| [C00082 Ground truthing as automated response to pollution](../../generated_pages/counters/C00082.md) | D03 | -| [C00084 Modify disinformation narratives, and rebroadcast them](../../generated_pages/counters/C00084.md) | D03 | -| [C00125 Prebunking](../../generated_pages/counters/C00125.md) | D03 | -| [C00126 Social media amber alert](../../generated_pages/counters/C00126.md) | D03 | -| [C00156 Better tell your country or organization story](../../generated_pages/counters/C00156.md) | D03 | -| [C00161 Coalition Building with stakeholders and Third-Party Inducements](../../generated_pages/counters/C00161.md) | D07 | -| [C00162 Unravel/target the Potemkin villages](../../generated_pages/counters/C00162.md) | D03 | -| [C00164 compatriot policy](../../generated_pages/counters/C00164.md) | D03 | -| [C00169 develop a creative content hub](../../generated_pages/counters/C00169.md) | D03 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | -| [C00188 Newsroom/Journalist training to counter influence moves](../../generated_pages/counters/C00188.md) | D03 | -| [C00190 open engagement with civil society](../../generated_pages/counters/C00190.md) | D03 | -| [C00205 strong dialogue between the federal government and private sector to encourage better reporting](../../generated_pages/counters/C00205.md) | D03 | -| [C00212 build public resilience by making civil society more vibrant](../../generated_pages/counters/C00212.md) | D03 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0004.md b/generated_pages/techniques/T0004.md index bf64de0..02be11e 100644 --- a/generated_pages/techniques/T0004.md +++ b/generated_pages/techniques/T0004.md @@ -1,6 +1,6 @@ # Technique T0004: Develop Competing Narratives -* **Summary**: Advance competing narratives connected to same issue ie: on one hand deny incident while at same time expresses dismiss. Suppressing or discouraging narratives already spreading requires an alternative. The most simple set of narrative techniques in response would be the construction and promotion of contradictory alternatives centered on denial, deflection, dismissal, counter-charges, excessive standards of proof, bias in prohibition or enforcement, and so on. These competing narratives allow loyalists cover, but are less compelling to opponents and fence-sitters than campaigns built around existing narratives or highly explanatory master narratives. Competing narratives, as such, are especially useful in the "firehose of misinformation" approach. +* **Summary**: Advance competing narratives connected to same issue ie: on one hand deny incident while at same time expresses dismiss. Suppressing or discouraging narratives already spreading requires an alternative. The most simple set of narrative techniques in response would be the construction and promotion of contradictory alternatives centred on denial, deflection, dismissal, counter-charges, excessive standards of proof, bias in prohibition or enforcement, and so on. These competing narratives allow loyalists cover, but are less compelling to opponents and fence-sitters than campaigns built around existing narratives or highly explanatory master narratives. Competing narratives, as such, are especially useful in the "firehose of misinformation" approach. * **Belongs to tactic stage**: TA14 @@ -13,17 +13,6 @@ | Counters | Response types | | -------- | -------------- | | [C00042 Address truth contained in narratives](../../generated_pages/counters/C00042.md) | D04 | -| [C00080 Create competing narrative](../../generated_pages/counters/C00080.md) | D03 | -| [C00081 Highlight flooding and noise, and explain motivations](../../generated_pages/counters/C00081.md) | D03 | -| [C00082 Ground truthing as automated response to pollution](../../generated_pages/counters/C00082.md) | D03 | -| [C00084 Modify disinformation narratives, and rebroadcast them](../../generated_pages/counters/C00084.md) | D03 | -| [C00125 Prebunking](../../generated_pages/counters/C00125.md) | D03 | -| [C00126 Social media amber alert](../../generated_pages/counters/C00126.md) | D03 | -| [C00156 Better tell your country or organization story](../../generated_pages/counters/C00156.md) | D03 | -| [C00161 Coalition Building with stakeholders and Third-Party Inducements](../../generated_pages/counters/C00161.md) | D07 | -| [C00162 Unravel/target the Potemkin villages](../../generated_pages/counters/C00162.md) | D03 | -| [C00164 compatriot policy](../../generated_pages/counters/C00164.md) | D03 | -| [C00169 develop a creative content hub](../../generated_pages/counters/C00169.md) | D03 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0007.md b/generated_pages/techniques/T0007.md index 5760ddb..ee72108 100644 --- a/generated_pages/techniques/T0007.md +++ b/generated_pages/techniques/T0007.md @@ -21,23 +21,7 @@ | Counters | Response types | | -------- | -------------- | | [C00006 Charge for social media](../../generated_pages/counters/C00006.md) | D02 | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | -| [C00034 Create more friction at account creation](../../generated_pages/counters/C00034.md) | D04 | -| [C00036 Infiltrate the in-group to discredit leaders (divide)](../../generated_pages/counters/C00036.md) | D02 | | [C00040 third party verification for people](../../generated_pages/counters/C00040.md) | D02 | -| [C00085 Mute content](../../generated_pages/counters/C00085.md) | D03 | -| [C00097 Require use of verified identities to contribute to poll or comment](../../generated_pages/counters/C00097.md) | D02 | -| [C00099 Strengthen verification methods](../../generated_pages/counters/C00099.md) | D02 | -| [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00133 Deplatform Account*](../../generated_pages/counters/C00133.md) | D03 | -| [C00135 Deplatform message groups and/or message boards](../../generated_pages/counters/C00135.md) | D03 | -| [C00153 Take pre-emptive action against actors' infrastructure](../../generated_pages/counters/C00153.md) | D03 | -| [C00172 social media source removal](../../generated_pages/counters/C00172.md) | D02 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | -| [C00189 Ensure that platforms are taking down flagged accounts](../../generated_pages/counters/C00189.md) | D06 | -| [C00197 remove suspicious accounts](../../generated_pages/counters/C00197.md) | D02 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0009.001.md b/generated_pages/techniques/T0009.001.md index 9fbc45e..2058c3f 100644 --- a/generated_pages/techniques/T0009.001.md +++ b/generated_pages/techniques/T0009.001.md @@ -1,6 +1,6 @@ -# Technique T0009.001: Utilize Academic/Pseudoscientific Justifications +# Technique T0009.001: Utilise Academic/Pseudoscientific Justifications -* **Summary**: Utilize Academic/Pseudoscientific Justifications +* **Summary**: Utilise Academic/Pseudoscientific Justifications * **Belongs to tactic stage**: TA16 diff --git a/generated_pages/techniques/T0009.md b/generated_pages/techniques/T0009.md index f25485f..e87f35f 100644 --- a/generated_pages/techniques/T0009.md +++ b/generated_pages/techniques/T0009.md @@ -1,6 +1,6 @@ # Technique T0009: Create Fake Experts -* **Summary**: Stories planted or promoted in computational propaganda operations often make use of experts fabricated from whole cloth, sometimes specifically for the story itself. +* **Summary**: Stories planted or promoted in computational propaganda operations often make use of experts fabricated from whole cloth, sometimes specifically for the story itself. * **Belongs to tactic stage**: TA16 @@ -13,19 +13,9 @@ | Counters | Response types | | -------- | -------------- | -| [C00008 Create shared fact-checking database](../../generated_pages/counters/C00008.md) | D04 | -| [C00011 Media literacy. Games to identify fake news](../../generated_pages/counters/C00011.md) | D02 | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00014 Real-time updates to fact-checking database](../../generated_pages/counters/C00014.md) | D04 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | | [C00034 Create more friction at account creation](../../generated_pages/counters/C00034.md) | D04 | -| [C00040 third party verification for people](../../generated_pages/counters/C00040.md) | D02 | -| [C00097 Require use of verified identities to contribute to poll or comment](../../generated_pages/counters/C00097.md) | D02 | -| [C00099 Strengthen verification methods](../../generated_pages/counters/C00099.md) | D02 | | [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | -| [C00133 Deplatform Account*](../../generated_pages/counters/C00133.md) | D03 | | [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | -| [C00189 Ensure that platforms are taking down flagged accounts](../../generated_pages/counters/C00189.md) | D06 | | [C00197 remove suspicious accounts](../../generated_pages/counters/C00197.md) | D02 | diff --git a/generated_pages/techniques/T0010.md b/generated_pages/techniques/T0010.md index 11ffdc3..9dd47de 100644 --- a/generated_pages/techniques/T0010.md +++ b/generated_pages/techniques/T0010.md @@ -24,32 +24,16 @@ | Counters | Response types | | -------- | -------------- | | [C00009 Educate high profile influencers on best practices](../../generated_pages/counters/C00009.md) | D02 | -| [C00036 Infiltrate the in-group to discredit leaders (divide)](../../generated_pages/counters/C00036.md) | D02 | | [C00046 Marginalise and discredit extremist groups](../../generated_pages/counters/C00046.md) | D04 | | [C00048 Name and Shame Influencers](../../generated_pages/counters/C00048.md) | D07 | | [C00051 Counter social engineering training](../../generated_pages/counters/C00051.md) | D02 | -| [C00072 Remove non-relevant content from special interest groups - not recommended](../../generated_pages/counters/C00072.md) | D02 | -| [C00073 Inoculate populations through media literacy training](../../generated_pages/counters/C00073.md) | D02 | -| [C00092 Establish a truth teller reputation score for influencers](../../generated_pages/counters/C00092.md) | D07 | -| [C00093 Influencer code of conduct](../../generated_pages/counters/C00093.md) | D07 | | [C00111 Reduce polarisation by connecting and presenting sympathetic renditions of opposite views](../../generated_pages/counters/C00111.md) | D04 | -| [C00125 Prebunking](../../generated_pages/counters/C00125.md) | D03 | -| [C00126 Social media amber alert](../../generated_pages/counters/C00126.md) | D03 | | [C00130 Mentorship: elders, youth, credit. Learn vicariously.](../../generated_pages/counters/C00130.md) | D07 | -| [C00136 Microtarget most likely targets then send them countermessages](../../generated_pages/counters/C00136.md) | D03 | -| [C00156 Better tell your country or organization story](../../generated_pages/counters/C00156.md) | D03 | -| [C00160 find and train influencers](../../generated_pages/counters/C00160.md) | D02 | | [C00162 Unravel/target the Potemkin villages](../../generated_pages/counters/C00162.md) | D03 | | [C00169 develop a creative content hub](../../generated_pages/counters/C00169.md) | D03 | -| [C00174 Create a healthier news environment](../../generated_pages/counters/C00174.md) | D02 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | -| [C00184 Media exposure](../../generated_pages/counters/C00184.md) | D04 | -| [C00188 Newsroom/Journalist training to counter influence moves](../../generated_pages/counters/C00188.md) | D03 | -| [C00190 open engagement with civil society](../../generated_pages/counters/C00190.md) | D03 | -| [C00195 Redirect searches away from disinformation or extremist content ](../../generated_pages/counters/C00195.md) | D02 | +| [C00195 Redirect searches away from disinformation or extremist content](../../generated_pages/counters/C00195.md) | D02 | | [C00200 Respected figure (influencer) disavows misinfo](../../generated_pages/counters/C00200.md) | D03 | | [C00203 Stop offering press credentials to propaganda outlets](../../generated_pages/counters/C00203.md) | D03 | -| [C00212 build public resilience by making civil society more vibrant](../../generated_pages/counters/C00212.md) | D03 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0011.md b/generated_pages/techniques/T0011.md index 3e1a69b..d33f5d4 100644 --- a/generated_pages/techniques/T0011.md +++ b/generated_pages/techniques/T0011.md @@ -14,12 +14,7 @@ | Counters | Response types | | -------- | -------------- | | [C00053 Delete old accounts / Remove unused social media accounts](../../generated_pages/counters/C00053.md) | D04 | -| [C00098 Revocation of allowlisted or "verified" status](../../generated_pages/counters/C00098.md) | D02 | -| [C00133 Deplatform Account*](../../generated_pages/counters/C00133.md) | D03 | -| [C00153 Take pre-emptive action against actors' infrastructure](../../generated_pages/counters/C00153.md) | D03 | | [C00182 Redirection / malware detection/ remediation](../../generated_pages/counters/C00182.md) | D02 | -| [C00189 Ensure that platforms are taking down flagged accounts](../../generated_pages/counters/C00189.md) | D06 | -| [C00197 remove suspicious accounts](../../generated_pages/counters/C00197.md) | D02 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0013.md b/generated_pages/techniques/T0013.md index b18986a..b2b8272 100644 --- a/generated_pages/techniques/T0013.md +++ b/generated_pages/techniques/T0013.md @@ -1,6 +1,6 @@ # Technique T0013: Create Inauthentic Websites -* **Summary**: Create media assets to support inauthentic organizations (e.g. think tank), people (e.g. experts) and/or serve as sites to distribute malware/launch phishing operations. +* **Summary**: Create media assets to support inauthentic organisations (e.g. think tank), people (e.g. experts) and/or serve as sites to distribute malware/launch phishing operations. * **Belongs to tactic stage**: TA15 @@ -12,15 +12,6 @@ | Counters | Response types | | -------- | -------------- | -| [C00008 Create shared fact-checking database](../../generated_pages/counters/C00008.md) | D04 | -| [C00011 Media literacy. Games to identify fake news](../../generated_pages/counters/C00011.md) | D02 | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00014 Real-time updates to fact-checking database](../../generated_pages/counters/C00014.md) | D04 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | -| [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | -| [C00153 Take pre-emptive action against actors' infrastructure](../../generated_pages/counters/C00153.md) | D03 | -| [C00172 social media source removal](../../generated_pages/counters/C00172.md) | D02 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0014.001.md b/generated_pages/techniques/T0014.001.md index 573cff4..a2414e7 100644 --- a/generated_pages/techniques/T0014.001.md +++ b/generated_pages/techniques/T0014.001.md @@ -1,6 +1,6 @@ # Technique T0014.001: Raise Funds from Malign Actors -* **Summary**: Raising funds from malign actors may include contributions from foreign agents, cutouts or proxies, shell companies, dark money groups, etc. +* **Summary**: Raising funds from malign actors may include contributions from foreign agents, cutouts or proxies, shell companies, dark money groups, etc. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0014.002.md b/generated_pages/techniques/T0014.002.md index 3dc51e5..3e9b6c4 100644 --- a/generated_pages/techniques/T0014.002.md +++ b/generated_pages/techniques/T0014.002.md @@ -1,6 +1,6 @@ # Technique T0014.002: Raise Funds from Ignorant Agents -* **Summary**: Raising funds from ignorant agents may include scams, donations intended for one stated purpose but then used for another, etc. +* **Summary**: Raising funds from ignorant agents may include scams, donations intended for one stated purpose but then used for another, etc. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0014.md b/generated_pages/techniques/T0014.md index 6e524af..9329053 100644 --- a/generated_pages/techniques/T0014.md +++ b/generated_pages/techniques/T0014.md @@ -1,6 +1,6 @@ # Technique T0014: Prepare Fundraising Campaigns -* **Summary**: Fundraising campaigns refer to an influence operation’s systematic effort to seek financial support for a charity, cause, or other enterprise using online activities that further promote operation information pathways while raising a profit. Many influence operations have engaged in crowdfunding services on platforms including Tipee, Patreon, and GoFundMe. An operation may use its previously prepared fundraising campaigns (see: Develop Information Pathways) to promote operation messaging while raising money to support its activities. +* **Summary**: Fundraising campaigns refer to an influence operation’s systematic effort to seek financial support for a charity, cause, or other enterprise using online activities that further promote operation information pathways while raising a profit. Many influence operations have engaged in crowdfunding services on platforms including Tipee, Patreon, and GoFundMe. An operation may use its previously prepared fundraising campaigns (see: Develop Information Pathways) to promote operation messaging while raising money to support its activities. * **Belongs to tactic stage**: TA15 @@ -12,19 +12,8 @@ | Counters | Response types | | -------- | -------------- | -| [C00008 Create shared fact-checking database](../../generated_pages/counters/C00008.md) | D04 | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00014 Real-time updates to fact-checking database](../../generated_pages/counters/C00014.md) | D04 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | | [C00059 Verification of project before posting fund requests](../../generated_pages/counters/C00059.md) | D02 | -| [C00070 Block access to disinformation resources](../../generated_pages/counters/C00070.md) | D02 | -| [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00129 Use banking to cut off access ](../../generated_pages/counters/C00129.md) | D02 | -| [C00153 Take pre-emptive action against actors' infrastructure](../../generated_pages/counters/C00153.md) | D03 | | [C00155 Ban incident actors from funding sites](../../generated_pages/counters/C00155.md) | D02 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | -| [C00189 Ensure that platforms are taking down flagged accounts](../../generated_pages/counters/C00189.md) | D06 | | [C00216 Use advertiser controls to stem flow of funds to bad actors](../../generated_pages/counters/C00216.md) | D02 | diff --git a/generated_pages/techniques/T0015.md b/generated_pages/techniques/T0015.md index 973ccaa..aceddf4 100644 --- a/generated_pages/techniques/T0015.md +++ b/generated_pages/techniques/T0015.md @@ -1,6 +1,6 @@ -# Technique T0015: Create Hashtags and Search Artifacts +# Technique T0015: Create Hashtags and Search Artefacts -* **Summary**: Create one or more hashtags and/or hashtag groups. Many incident-based campaigns will create hashtags to promote their fabricated event. Creating a hashtag for an incident can have two important effects: 1. Create a perception of reality around an event. Certainly only "real" events would be discussed in a hashtag. After all, the event has a name!, and 2. Publicize the story more widely through trending lists and search behavior. Asset needed to direct/control/manage "conversation" connected to launching new incident/campaign with new hashtag for applicable social media sites). +* **Summary**: Create one or more hashtags and/or hashtag groups. Many incident-based campaigns will create hashtags to promote their fabricated event. Creating a hashtag for an incident can have two important effects: 1. Create a perception of reality around an event. Certainly only "real" events would be discussed in a hashtag. After all, the event has a name!, and 2. Publicise the story more widely through trending lists and search behaviour. Asset needed to direct/control/manage "conversation" connected to launching new incident/campaign with new hashtag for applicable social media sites). * **Belongs to tactic stage**: TA06 @@ -13,14 +13,7 @@ | Counters | Response types | | -------- | -------------- | -| [C00006 Charge for social media](../../generated_pages/counters/C00006.md) | D02 | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | | [C00066 Co-opt a hashtag and drown it out (hijack it back)](../../generated_pages/counters/C00066.md) | D03 | -| [C00070 Block access to disinformation resources](../../generated_pages/counters/C00070.md) | D02 | -| [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0016.md b/generated_pages/techniques/T0016.md index ff73576..efef607 100644 --- a/generated_pages/techniques/T0016.md +++ b/generated_pages/techniques/T0016.md @@ -1,6 +1,6 @@ # Technique T0016: Create Clickbait -* **Summary**: Create attention grabbing headlines (outrage, doubt, humor) required to drive traffic & engagement. This is a key asset. +* **Summary**: Create attention grabbing headlines (outrage, doubt, humour) required to drive traffic & engagement. This is a key asset. * **Belongs to tactic stage**: TA05 @@ -13,19 +13,11 @@ | Counters | Response types | | -------- | -------------- | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | | [C00073 Inoculate populations through media literacy training](../../generated_pages/counters/C00073.md) | D02 | | [C00076 Prohibit images in political discourse channels](../../generated_pages/counters/C00076.md) | D02 | | [C00105 Buy more advertising than misinformation creators](../../generated_pages/counters/C00105.md) | D03 | | [C00106 Click-bait centrist content](../../generated_pages/counters/C00106.md) | D03 | -| [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00142 Platform adds warning label and decision point when sharing content](../../generated_pages/counters/C00142.md) | D04 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | | [C00178 Fill information voids with non-disinformation content](../../generated_pages/counters/C00178.md) | D04 | -| [C00195 Redirect searches away from disinformation or extremist content ](../../generated_pages/counters/C00195.md) | D02 | -| [C00216 Use advertiser controls to stem flow of funds to bad actors](../../generated_pages/counters/C00216.md) | D02 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0017.md b/generated_pages/techniques/T0017.md index 46d65b8..f145cc9 100644 --- a/generated_pages/techniques/T0017.md +++ b/generated_pages/techniques/T0017.md @@ -1,6 +1,6 @@ # Technique T0017: Conduct Fundraising -* **Summary**: Fundraising campaigns refer to an influence operation’s systematic effort to seek financial support for a charity, cause, or other enterprise using online activities that further promote operation information pathways while raising a profit. Many influence operations have engaged in crowdfunding services166 on platforms including Tipee, Patreon, and GoFundMe. An operation may use its previously prepared fundraising campaigns to promote operation messaging while raising money to support its activities. +* **Summary**: Fundraising campaigns refer to an influence operation’s systematic effort to seek financial support for a charity, cause, or other enterprise using online activities that further promote operation information pathways while raising a profit. Many influence operations have engaged in crowdfunding services166 on platforms including Tipee, Patreon, and GoFundMe. An operation may use its previously prepared fundraising campaigns to promote operation messaging while raising money to support its activities. * **Belongs to tactic stage**: TA10 @@ -13,21 +13,8 @@ | Counters | Response types | | -------- | -------------- | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | | [C00058 Report crowdfunder as violator](../../generated_pages/counters/C00058.md) | D02 | | [C00067 Denigrate the recipient/ project (of online funding)](../../generated_pages/counters/C00067.md) | D03 | -| [C00070 Block access to disinformation resources](../../generated_pages/counters/C00070.md) | D02 | -| [C00085 Mute content](../../generated_pages/counters/C00085.md) | D03 | -| [C00093 Influencer code of conduct](../../generated_pages/counters/C00093.md) | D07 | -| [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00129 Use banking to cut off access ](../../generated_pages/counters/C00129.md) | D02 | -| [C00153 Take pre-emptive action against actors' infrastructure](../../generated_pages/counters/C00153.md) | D03 | -| [C00155 Ban incident actors from funding sites](../../generated_pages/counters/C00155.md) | D02 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | -| [C00178 Fill information voids with non-disinformation content](../../generated_pages/counters/C00178.md) | D04 | -| [C00216 Use advertiser controls to stem flow of funds to bad actors](../../generated_pages/counters/C00216.md) | D02 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0018.md b/generated_pages/techniques/T0018.md index 1a532f9..a280ffa 100644 --- a/generated_pages/techniques/T0018.md +++ b/generated_pages/techniques/T0018.md @@ -15,21 +15,7 @@ | Counters | Response types | | -------- | -------------- | -| [C00006 Charge for social media](../../generated_pages/counters/C00006.md) | D02 | -| [C00010 Enhanced privacy regulation for social media](../../generated_pages/counters/C00010.md) | D02 | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | | [C00065 Reduce political targeting](../../generated_pages/counters/C00065.md) | D03 | -| [C00076 Prohibit images in political discourse channels](../../generated_pages/counters/C00076.md) | D02 | -| [C00105 Buy more advertising than misinformation creators](../../generated_pages/counters/C00105.md) | D03 | -| [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00129 Use banking to cut off access ](../../generated_pages/counters/C00129.md) | D02 | -| [C00153 Take pre-emptive action against actors' infrastructure](../../generated_pages/counters/C00153.md) | D03 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | -| [C00178 Fill information voids with non-disinformation content](../../generated_pages/counters/C00178.md) | D04 | -| [C00195 Redirect searches away from disinformation or extremist content ](../../generated_pages/counters/C00195.md) | D02 | -| [C00216 Use advertiser controls to stem flow of funds to bad actors](../../generated_pages/counters/C00216.md) | D02 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0019.md b/generated_pages/techniques/T0019.md index c2d3bf6..ab68f37 100644 --- a/generated_pages/techniques/T0019.md +++ b/generated_pages/techniques/T0019.md @@ -24,10 +24,8 @@ | Counters | Response types | | -------- | -------------- | -| [C00042 Address truth contained in narratives](../../generated_pages/counters/C00042.md) | D04 | | [C00071 Block source of pollution](../../generated_pages/counters/C00071.md) | D02 | | [C00072 Remove non-relevant content from special interest groups - not recommended](../../generated_pages/counters/C00072.md) | D02 | -| [C00073 Inoculate populations through media literacy training](../../generated_pages/counters/C00073.md) | D02 | | [C00074 Identify and delete or rate limit identical content](../../generated_pages/counters/C00074.md) | D02 | diff --git a/generated_pages/techniques/T0020.md b/generated_pages/techniques/T0020.md index 0391f3a..895f401 100644 --- a/generated_pages/techniques/T0020.md +++ b/generated_pages/techniques/T0020.md @@ -17,10 +17,6 @@ | Counters | Response types | | -------- | -------------- | | [C00090 Fake engagement system](../../generated_pages/counters/C00090.md) | D05 | -| [C00136 Microtarget most likely targets then send them countermessages](../../generated_pages/counters/C00136.md) | D03 | -| [C00149 Poison the monitoring & evaluation data](../../generated_pages/counters/C00149.md) | D04 | -| [C00178 Fill information voids with non-disinformation content](../../generated_pages/counters/C00178.md) | D04 | -| [C00211 Use humorous counter-narratives](../../generated_pages/counters/C00211.md) | D03 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0022.001.md b/generated_pages/techniques/T0022.001.md index f9f3744..97c3c35 100644 --- a/generated_pages/techniques/T0022.001.md +++ b/generated_pages/techniques/T0022.001.md @@ -1,6 +1,6 @@ # Technique T0022.001: Amplify Existing Conspiracy Theory Narratives -* **Summary**: An influence operation may amplify an existing conspiracy theory narrative that aligns with its incident or campaign goals. By amplifying existing conspiracy theory narratives, operators can leverage the power of the existing communities that support and propagate those theories without needing to expend resources creating new narratives or building momentum and buy in around new narratives. +* **Summary**: An influence operation may amplify an existing conspiracy theory narrative that aligns with its incident or campaign goals. By amplifying existing conspiracy theory narratives, operators can leverage the power of the existing communities that support and propagate those theories without needing to expend resources creating new narratives or building momentum and buy in around new narratives. * **Belongs to tactic stage**: TA14 diff --git a/generated_pages/techniques/T0022.002.md b/generated_pages/techniques/T0022.002.md index b1a355d..f6d43c6 100644 --- a/generated_pages/techniques/T0022.002.md +++ b/generated_pages/techniques/T0022.002.md @@ -1,6 +1,6 @@ # Technique T0022.002: Develop Original Conspiracy Theory Narratives -* **Summary**: While this requires more resources than amplifying existing conspiracy theory narratives, an influence operation may develop original conspiracy theory narratives in order to achieve greater control and alignment over the narrative and their campaign goals. Prominent examples include the USSR's Operation INFEKTION disinformation campaign run by the KGB in the 1980s to plant the idea that the United States had invented HIV/AIDS as part of a biological weapons research project at Fort Detrick, Maryland. More recently, Fort Detrick featured prominently in a new conspiracy theory narratives around the origins of the COVID-19 outbreak and pandemic. +* **Summary**: While this requires more resources than amplifying existing conspiracy theory narratives, an influence operation may develop original conspiracy theory narratives in order to achieve greater control and alignment over the narrative and their campaign goals. Prominent examples include the USSR's Operation INFEKTION disinformation campaign run by the KGB in the 1980s to plant the idea that the United States had invented HIV/AIDS as part of a biological weapons research project at Fort Detrick, Maryland. More recently, Fort Detrick featured prominently in a new conspiracy theory narratives around the origins of the COVID-19 outbreak and pandemic. * **Belongs to tactic stage**: TA14 diff --git a/generated_pages/techniques/T0022.md b/generated_pages/techniques/T0022.md index 2e53182..c604c98 100644 --- a/generated_pages/techniques/T0022.md +++ b/generated_pages/techniques/T0022.md @@ -1,6 +1,6 @@ # Technique T0022: Leverage Conspiracy Theory Narratives -* **Summary**: "Conspiracy narratives" appeal to the human desire for explanatory order, by invoking the participation of poweful (often sinister) actors in pursuit of their own political goals. These narratives are especially appealing when an audience is low-information, marginalized or otherwise inclined to reject the prevailing explanation. Conspiracy narratives are an important component of the "firehose of falsehoods" model. +* **Summary**: "Conspiracy narratives" appeal to the human desire for explanatory order, by invoking the participation of poweful (often sinister) actors in pursuit of their own political goals. These narratives are especially appealing when an audience is low-information, marginalised or otherwise inclined to reject the prevailing explanation. Conspiracy narratives are an important component of the "firehose of falsehoods" model. * **Belongs to tactic stage**: TA14 @@ -13,48 +13,11 @@ | Counters | Response types | | -------- | -------------- | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | -| [C00017 Repair broken social connections](../../generated_pages/counters/C00017.md) | D03 | -| [C00019 Reduce effect of division-enablers](../../generated_pages/counters/C00019.md) | D03 | -| [C00021 Encourage in-person communication](../../generated_pages/counters/C00021.md) | D04 | -| [C00022 Innoculate. Positive campaign to promote feeling of safety](../../generated_pages/counters/C00022.md) | D04 | -| [C00024 Promote healthy narratives](../../generated_pages/counters/C00024.md) | D04 | -| [C00027 Create culture of civility](../../generated_pages/counters/C00027.md) | D07 | -| [C00029 Create fake website to issue counter narrative and counter narrative through physical merchandise](../../generated_pages/counters/C00029.md) | D03 | -| [C00030 Develop a compelling counter narrative (truth based)](../../generated_pages/counters/C00030.md) | D03 | -| [C00042 Address truth contained in narratives](../../generated_pages/counters/C00042.md) | D04 | -| [C00046 Marginalise and discredit extremist groups](../../generated_pages/counters/C00046.md) | D04 | -| [C00072 Remove non-relevant content from special interest groups - not recommended](../../generated_pages/counters/C00072.md) | D02 | -| [C00073 Inoculate populations through media literacy training](../../generated_pages/counters/C00073.md) | D02 | -| [C00074 Identify and delete or rate limit identical content](../../generated_pages/counters/C00074.md) | D02 | -| [C00080 Create competing narrative](../../generated_pages/counters/C00080.md) | D03 | -| [C00081 Highlight flooding and noise, and explain motivations](../../generated_pages/counters/C00081.md) | D03 | -| [C00082 Ground truthing as automated response to pollution](../../generated_pages/counters/C00082.md) | D03 | -| [C00084 Modify disinformation narratives, and rebroadcast them](../../generated_pages/counters/C00084.md) | D03 | -| [C00085 Mute content](../../generated_pages/counters/C00085.md) | D03 | | [C00096 Strengthen institutions that are always truth tellers](../../generated_pages/counters/C00096.md) | D07 | -| [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | -| [C00117 Downgrade / de-amplify so message is seen by fewer people](../../generated_pages/counters/C00117.md) | D04 | | [C00119 Engage payload and debunk.](../../generated_pages/counters/C00119.md) | D07 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00125 Prebunking](../../generated_pages/counters/C00125.md) | D03 | -| [C00126 Social media amber alert](../../generated_pages/counters/C00126.md) | D03 | -| [C00156 Better tell your country or organization story](../../generated_pages/counters/C00156.md) | D03 | +| [C00156 Better tell your country or organisation story](../../generated_pages/counters/C00156.md) | D03 | | [C00161 Coalition Building with stakeholders and Third-Party Inducements](../../generated_pages/counters/C00161.md) | D07 | -| [C00162 Unravel/target the Potemkin villages](../../generated_pages/counters/C00162.md) | D03 | | [C00164 compatriot policy](../../generated_pages/counters/C00164.md) | D03 | -| [C00169 develop a creative content hub](../../generated_pages/counters/C00169.md) | D03 | -| [C00174 Create a healthier news environment](../../generated_pages/counters/C00174.md) | D02 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | -| [C00188 Newsroom/Journalist training to counter influence moves](../../generated_pages/counters/C00188.md) | D03 | -| [C00190 open engagement with civil society](../../generated_pages/counters/C00190.md) | D03 | -| [C00195 Redirect searches away from disinformation or extremist content ](../../generated_pages/counters/C00195.md) | D02 | -| [C00200 Respected figure (influencer) disavows misinfo](../../generated_pages/counters/C00200.md) | D03 | -| [C00203 Stop offering press credentials to propaganda outlets](../../generated_pages/counters/C00203.md) | D03 | -| [C00205 strong dialogue between the federal government and private sector to encourage better reporting](../../generated_pages/counters/C00205.md) | D03 | -| [C00211 Use humorous counter-narratives](../../generated_pages/counters/C00211.md) | D03 | -| [C00212 build public resilience by making civil society more vibrant](../../generated_pages/counters/C00212.md) | D03 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0023.001.md b/generated_pages/techniques/T0023.001.md index 745c0af..b4f7b31 100644 --- a/generated_pages/techniques/T0023.001.md +++ b/generated_pages/techniques/T0023.001.md @@ -1,6 +1,6 @@ # Technique T0023.001: Reframe Context -* **Summary**: Reframing context refers to removing an event from its surrounding context to distort its intended meaning. Rather than deny that an event occurred, reframing context frames an event in a manner that may lead the target audience to draw a different conclusion about its intentions. +* **Summary**: Reframing context refers to removing an event from its surrounding context to distort its intended meaning. Rather than deny that an event occurred, reframing context frames an event in a manner that may lead the target audience to draw a different conclusion about its intentions. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0023.002.md b/generated_pages/techniques/T0023.002.md index e804563..86fbba7 100644 --- a/generated_pages/techniques/T0023.002.md +++ b/generated_pages/techniques/T0023.002.md @@ -1,6 +1,6 @@ # Technique T0023.002: Edit Open-Source Content -* **Summary**: An influence operation may edit open-source content, such as collaborative blogs or encyclopedias, to promote its narratives on outlets with existing credibility and audiences. Editing open-source content may allow an operation to post content on platforms without dedicating resources to the creation and maintenance of its own assets. +* **Summary**: An influence operation may edit open-source content, such as collaborative blogs or encyclopaedias, to promote its narratives on outlets with existing credibility and audiences. Editing open-source content may allow an operation to post content on platforms without dedicating resources to the creation and maintenance of its own assets. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0023.md b/generated_pages/techniques/T0023.md index 54c7028..04d4124 100644 --- a/generated_pages/techniques/T0023.md +++ b/generated_pages/techniques/T0023.md @@ -14,19 +14,6 @@ | Counters | Response types | | -------- | -------------- | -| [C00017 Repair broken social connections](../../generated_pages/counters/C00017.md) | D03 | -| [C00019 Reduce effect of division-enablers](../../generated_pages/counters/C00019.md) | D03 | -| [C00021 Encourage in-person communication](../../generated_pages/counters/C00021.md) | D04 | -| [C00022 Innoculate. Positive campaign to promote feeling of safety](../../generated_pages/counters/C00022.md) | D04 | -| [C00024 Promote healthy narratives](../../generated_pages/counters/C00024.md) | D04 | -| [C00027 Create culture of civility](../../generated_pages/counters/C00027.md) | D07 | -| [C00046 Marginalise and discredit extremist groups](../../generated_pages/counters/C00046.md) | D04 | -| [C00072 Remove non-relevant content from special interest groups - not recommended](../../generated_pages/counters/C00072.md) | D02 | -| [C00073 Inoculate populations through media literacy training](../../generated_pages/counters/C00073.md) | D02 | -| [C00081 Highlight flooding and noise, and explain motivations](../../generated_pages/counters/C00081.md) | D03 | -| [C00082 Ground truthing as automated response to pollution](../../generated_pages/counters/C00082.md) | D03 | -| [C00092 Establish a truth teller reputation score for influencers](../../generated_pages/counters/C00092.md) | D07 | -| [C00096 Strengthen institutions that are always truth tellers](../../generated_pages/counters/C00096.md) | D07 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0039.md b/generated_pages/techniques/T0039.md index b6c8aa2..c1c3b11 100644 --- a/generated_pages/techniques/T0039.md +++ b/generated_pages/techniques/T0039.md @@ -1,17 +1,24 @@ -# Technique T0039 : Bait Legitimate Influencers +# Technique T0039: Bait Legitimate Influencers -* **Summary**: Credibility in a social media environment is often a function of the size of a user's network. "Influencers" are so-called because of their reach, typically understood as: 1) the size of their network (i.e. the number of followers, perhaps weighted by their own influence); and 2) The rate at which their comments are re-circulated (these two metrics are related). Add traditional media players at all levels of credibility and professionalism to this, and the number of potential influencial carriers available for unwitting amplification becomes substantial. By targeting high-influence people and organizations in all types of media with narratives and content engineered to appeal their emotional or ideological drivers, influence campaigns are able to add perceived credibility to their messaging via saturation and adoption by trusted agents such as celebrities, journalists and local leaders. +* **Summary**: Credibility in a social media environment is often a function of the size of a user's network. "Influencers" are so-called because of their reach, typically understood as: 1) the size of their network (i.e. the number of followers, perhaps weighted by their own influence); and 2) The rate at which their comments are re-circulated (these two metrics are related). Add traditional media players at all levels of credibility and professionalism to this, and the number of potential influencial carriers available for unwitting amplification becomes substantial. By targeting high-influence people and organisations in all types of media with narratives and content engineered to appeal their emotional or ideological drivers, influence campaigns are able to add perceived credibility to their messaging via saturation and adoption by trusted agents such as celebrities, journalists and local leaders. * **Belongs to tactic stage**: TA08 | Incident | Descriptions given for this incident | | -------- | -------------------- | +| [I00006 Columbian Chemicals](../../generated_pages/incidents/I00006.md) | bait journalists/media/politicians | +| [I00010 ParklandTeens](../../generated_pages/incidents/I00010.md) | journalist/media baiting | +| [I00015 ConcordDiscovery](../../generated_pages/incidents/I00015.md) | journalist/media baiting | | Counters | Response types | | -------- | -------------- | +| [C00087 Make more noise than the disinformation](../../generated_pages/counters/C00087.md) | D04 | +| [C00114 Don't engage with payloads](../../generated_pages/counters/C00114.md) | D02 | +| [C00154 Ask media not to report false information](../../generated_pages/counters/C00154.md) | D02 | +| [C00160 find and train influencers](../../generated_pages/counters/C00160.md) | D02 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0040.md b/generated_pages/techniques/T0040.md index cad11af..df5588c 100644 --- a/generated_pages/techniques/T0040.md +++ b/generated_pages/techniques/T0040.md @@ -14,7 +14,6 @@ | Counters | Response types | | -------- | -------------- | -| [C00073 Inoculate populations through media literacy training](../../generated_pages/counters/C00073.md) | D02 | | [C00112 "Prove they are not an op!"](../../generated_pages/counters/C00112.md) | D02 | diff --git a/generated_pages/techniques/T0042.md b/generated_pages/techniques/T0042.md index 3d34fa0..113dd14 100644 --- a/generated_pages/techniques/T0042.md +++ b/generated_pages/techniques/T0042.md @@ -12,9 +12,6 @@ | Counters | Response types | | -------- | -------------- | -| [C00009 Educate high profile influencers on best practices](../../generated_pages/counters/C00009.md) | D02 | -| [C00042 Address truth contained in narratives](../../generated_pages/counters/C00042.md) | D04 | -| [C00112 "Prove they are not an op!"](../../generated_pages/counters/C00112.md) | D02 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0043.md b/generated_pages/techniques/T0043.md index 3857e3d..f2e1553 100644 --- a/generated_pages/techniques/T0043.md +++ b/generated_pages/techniques/T0043.md @@ -13,13 +13,7 @@ | Counters | Response types | | -------- | -------------- | -| [C00006 Charge for social media](../../generated_pages/counters/C00006.md) | D02 | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | -| [C00074 Identify and delete or rate limit identical content](../../generated_pages/counters/C00074.md) | D02 | -| [C00121 Tool transparency and literacy for channels people follow. ](../../generated_pages/counters/C00121.md) | D07 | -| [C00135 Deplatform message groups and/or message boards](../../generated_pages/counters/C00135.md) | D03 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | +| [C00121 Tool transparency and literacy for channels people follow.](../../generated_pages/counters/C00121.md) | D07 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0044.md b/generated_pages/techniques/T0044.md index c9a889e..149918c 100644 --- a/generated_pages/techniques/T0044.md +++ b/generated_pages/techniques/T0044.md @@ -1,6 +1,6 @@ # Technique T0044: Seed Distortions -* **Summary**: Try a wide variety of messages in the early hours surrounding an incident or event, to give a misleading account or impression. +* **Summary**: Try a wide variety of messages in the early hours surrounding an incident or event, to give a misleading account or impression. * **Belongs to tactic stage**: TA08 @@ -13,34 +13,8 @@ | Counters | Response types | | -------- | -------------- | -| [C00008 Create shared fact-checking database](../../generated_pages/counters/C00008.md) | D04 | -| [C00009 Educate high profile influencers on best practices](../../generated_pages/counters/C00009.md) | D02 | -| [C00011 Media literacy. Games to identify fake news](../../generated_pages/counters/C00011.md) | D02 | -| [C00014 Real-time updates to fact-checking database](../../generated_pages/counters/C00014.md) | D04 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | -| [C00019 Reduce effect of division-enablers](../../generated_pages/counters/C00019.md) | D03 | -| [C00021 Encourage in-person communication](../../generated_pages/counters/C00021.md) | D04 | -| [C00022 Innoculate. Positive campaign to promote feeling of safety](../../generated_pages/counters/C00022.md) | D04 | -| [C00024 Promote healthy narratives](../../generated_pages/counters/C00024.md) | D04 | -| [C00027 Create culture of civility](../../generated_pages/counters/C00027.md) | D07 | -| [C00042 Address truth contained in narratives](../../generated_pages/counters/C00042.md) | D04 | -| [C00046 Marginalise and discredit extremist groups](../../generated_pages/counters/C00046.md) | D04 | -| [C00072 Remove non-relevant content from special interest groups - not recommended](../../generated_pages/counters/C00072.md) | D02 | -| [C00073 Inoculate populations through media literacy training](../../generated_pages/counters/C00073.md) | D02 | -| [C00076 Prohibit images in political discourse channels](../../generated_pages/counters/C00076.md) | D02 | -| [C00078 Change Search Algorithms for Disinformation Content](../../generated_pages/counters/C00078.md) | D03 | -| [C00081 Highlight flooding and noise, and explain motivations](../../generated_pages/counters/C00081.md) | D03 | -| [C00082 Ground truthing as automated response to pollution](../../generated_pages/counters/C00082.md) | D03 | -| [C00085 Mute content](../../generated_pages/counters/C00085.md) | D03 | | [C00086 Distract from noise with addictive content](../../generated_pages/counters/C00086.md) | D04 | -| [C00087 Make more noise than the disinformation](../../generated_pages/counters/C00087.md) | D04 | -| [C00092 Establish a truth teller reputation score for influencers](../../generated_pages/counters/C00092.md) | D07 | -| [C00117 Downgrade / de-amplify so message is seen by fewer people](../../generated_pages/counters/C00117.md) | D04 | | [C00118 Repurpose images with new text](../../generated_pages/counters/C00118.md) | D04 | -| [C00119 Engage payload and debunk.](../../generated_pages/counters/C00119.md) | D07 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00174 Create a healthier news environment](../../generated_pages/counters/C00174.md) | D02 | -| [C00184 Media exposure](../../generated_pages/counters/C00184.md) | D04 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0045.md b/generated_pages/techniques/T0045.md index bb58687..a6ee98e 100644 --- a/generated_pages/techniques/T0045.md +++ b/generated_pages/techniques/T0045.md @@ -13,38 +13,8 @@ | Counters | Response types | | -------- | -------------- | -| [C00008 Create shared fact-checking database](../../generated_pages/counters/C00008.md) | D04 | -| [C00011 Media literacy. Games to identify fake news](../../generated_pages/counters/C00011.md) | D02 | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00014 Real-time updates to fact-checking database](../../generated_pages/counters/C00014.md) | D04 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | -| [C00036 Infiltrate the in-group to discredit leaders (divide)](../../generated_pages/counters/C00036.md) | D02 | -| [C00046 Marginalise and discredit extremist groups](../../generated_pages/counters/C00046.md) | D04 | -| [C00048 Name and Shame Influencers](../../generated_pages/counters/C00048.md) | D07 | -| [C00073 Inoculate populations through media literacy training](../../generated_pages/counters/C00073.md) | D02 | -| [C00085 Mute content](../../generated_pages/counters/C00085.md) | D03 | -| [C00092 Establish a truth teller reputation score for influencers](../../generated_pages/counters/C00092.md) | D07 | -| [C00097 Require use of verified identities to contribute to poll or comment](../../generated_pages/counters/C00097.md) | D02 | -| [C00099 Strengthen verification methods](../../generated_pages/counters/C00099.md) | D02 | -| [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | | [C00113 Debunk and defuse a fake expert / credentials.](../../generated_pages/counters/C00113.md) | D02 | -| [C00117 Downgrade / de-amplify so message is seen by fewer people](../../generated_pages/counters/C00117.md) | D04 | -| [C00119 Engage payload and debunk.](../../generated_pages/counters/C00119.md) | D07 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00125 Prebunking](../../generated_pages/counters/C00125.md) | D03 | -| [C00126 Social media amber alert](../../generated_pages/counters/C00126.md) | D03 | -| [C00133 Deplatform Account*](../../generated_pages/counters/C00133.md) | D03 | -| [C00133 Deplatform Account*](../../generated_pages/counters/C00133.md) | D03 | -| [C00154 Ask media not to report false information](../../generated_pages/counters/C00154.md) | D02 | -| [C00174 Create a healthier news environment](../../generated_pages/counters/C00174.md) | D02 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | | [C00184 Media exposure](../../generated_pages/counters/C00184.md) | D04 | -| [C00184 Media exposure](../../generated_pages/counters/C00184.md) | D04 | -| [C00188 Newsroom/Journalist training to counter influence moves](../../generated_pages/counters/C00188.md) | D03 | -| [C00195 Redirect searches away from disinformation or extremist content ](../../generated_pages/counters/C00195.md) | D02 | -| [C00200 Respected figure (influencer) disavows misinfo](../../generated_pages/counters/C00200.md) | D03 | -| [C00203 Stop offering press credentials to propaganda outlets](../../generated_pages/counters/C00203.md) | D03 | -| [C00211 Use humorous counter-narratives](../../generated_pages/counters/C00211.md) | D03 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0046.md b/generated_pages/techniques/T0046.md index 9bc4d05..ab36822 100644 --- a/generated_pages/techniques/T0046.md +++ b/generated_pages/techniques/T0046.md @@ -1,6 +1,6 @@ -# Technique T0046: Use Search Engine Optimization +# Technique T0046: Use Search Engine Optimisation -* **Summary**: Manipulate content engagement metrics (ie: Reddit & Twitter) to influence/impact news search results (e.g. Google), also elevates RT & Sputnik headline into Google news alert emails. aka "Black-hat SEO" +* **Summary**: Manipulate content engagement metrics (ie: Reddit & Twitter) to influence/impact news search results (e.g. Google), also elevates RT & Sputnik headline into Google news alert emails. aka "Black-hat SEO" * **Belongs to tactic stage**: TA08 @@ -24,13 +24,7 @@ | Counters | Response types | | -------- | -------------- | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00070 Block access to disinformation resources](../../generated_pages/counters/C00070.md) | D02 | -| [C00078 Change Search Algorithms for Disinformation Content](../../generated_pages/counters/C00078.md) | D03 | | [C00117 Downgrade / de-amplify so message is seen by fewer people](../../generated_pages/counters/C00117.md) | D04 | -| [C00149 Poison the monitoring & evaluation data](../../generated_pages/counters/C00149.md) | D04 | -| [C00188 Newsroom/Journalist training to counter influence moves](../../generated_pages/counters/C00188.md) | D03 | -| [C00195 Redirect searches away from disinformation or extremist content ](../../generated_pages/counters/C00195.md) | D02 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0047.md b/generated_pages/techniques/T0047.md index b035d19..f87aeef 100644 --- a/generated_pages/techniques/T0047.md +++ b/generated_pages/techniques/T0047.md @@ -13,9 +13,6 @@ | Counters | Response types | | -------- | -------------- | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00060 Legal action against for-profit engagement factories](../../generated_pages/counters/C00060.md) | D03 | -| [C00093 Influencer code of conduct](../../generated_pages/counters/C00093.md) | D07 | | [C00120 Open dialogue about design of platforms to produce different outcomes](../../generated_pages/counters/C00120.md) | D07 | diff --git a/generated_pages/techniques/T0048.001.md b/generated_pages/techniques/T0048.001.md index 2068424..60486ab 100644 --- a/generated_pages/techniques/T0048.001.md +++ b/generated_pages/techniques/T0048.001.md @@ -1,6 +1,6 @@ # Technique T0048.001: Boycott/"Cancel" Opponents -* **Summary**: Cancel culture refers to the phenomenon in which individuals collectively refrain from supporting an individual, organization, business, or other entity, usually following a real or falsified controversy. An influence operation may exploit cancel culture by emphasizing an adversary’s problematic or disputed behavior and presenting its own content as an alternative. +* **Summary**: Cancel culture refers to the phenomenon in which individuals collectively refrain from supporting an individual, organisation, business, or other entity, usually following a real or falsified controversy. An influence operation may exploit cancel culture by emphasising an adversary’s problematic or disputed behaviour and presenting its own content as an alternative. * **Belongs to tactic stage**: TA18 diff --git a/generated_pages/techniques/T0048.003.md b/generated_pages/techniques/T0048.003.md index b1046b9..fc50358 100644 --- a/generated_pages/techniques/T0048.003.md +++ b/generated_pages/techniques/T0048.003.md @@ -1,6 +1,6 @@ # Technique T0048.003: Threaten to Dox -* **Summary**: Doxing refers to online harassment in which individuals publicly release private information about another individual, including names, addresses, employment information, pictures, family members, and other sensitive information. An influence operation may dox its opposition to encourage individuals aligned with operation narratives to harass the doxed individuals themselves or otherwise discourage the doxed individuals from posting or proliferating conflicting content. +* **Summary**: Doxing refers to online harassment in which individuals publicly release private information about another individual, including names, addresses, employment information, pictures, family members, and other sensitive information. An influence operation may dox its opposition to encourage individuals aligned with operation narratives to harass the doxed individuals themselves or otherwise discourage the doxed individuals from posting or proliferating conflicting content. * **Belongs to tactic stage**: TA18 diff --git a/generated_pages/techniques/T0048.004.md b/generated_pages/techniques/T0048.004.md index 6cc6b23..731fcf8 100644 --- a/generated_pages/techniques/T0048.004.md +++ b/generated_pages/techniques/T0048.004.md @@ -1,6 +1,6 @@ # Technique T0048.004: Dox -* **Summary**: Doxing refers to online harassment in which individuals publicly release private information about another individual, including names, addresses, employment information, pictures, family members, and other sensitive information. An influence operation may dox its opposition to encourage individuals aligned with operation narratives to harass the doxed individuals themselves or otherwise discourage the doxed individuals from posting or proliferating conflicting content. +* **Summary**: Doxing refers to online harassment in which individuals publicly release private information about another individual, including names, addresses, employment information, pictures, family members, and other sensitive information. An influence operation may dox its opposition to encourage individuals aligned with operation narratives to harass the doxed individuals themselves or otherwise discourage the doxed individuals from posting or proliferating conflicting content. * **Belongs to tactic stage**: TA18 diff --git a/generated_pages/techniques/T0048.md b/generated_pages/techniques/T0048.md index a374048..c4dcd34 100644 --- a/generated_pages/techniques/T0048.md +++ b/generated_pages/techniques/T0048.md @@ -1,6 +1,6 @@ # Technique T0048: Harass -* **Summary**: Threatening or harassing believers of opposing narratives refers to the use of intimidation techniques, including cyberbullying and doxing, to discourage opponents from voicing their dissent. An influence operation may threaten or harass believers of the opposing narratives to deter individuals from posting or proliferating conflicting content. +* **Summary**: Threatening or harassing believers of opposing narratives refers to the use of intimidation techniques, including cyberbullying and doxing, to discourage opponents from voicing their dissent. An influence operation may threaten or harass believers of the opposing narratives to deter individuals from posting or proliferating conflicting content. * **Belongs to tactic stage**: TA18 @@ -13,18 +13,6 @@ | Counters | Response types | | -------- | -------------- | -| [C00009 Educate high profile influencers on best practices](../../generated_pages/counters/C00009.md) | D02 | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00027 Create culture of civility](../../generated_pages/counters/C00027.md) | D07 | -| [C00046 Marginalise and discredit extremist groups](../../generated_pages/counters/C00046.md) | D04 | -| [C00048 Name and Shame Influencers](../../generated_pages/counters/C00048.md) | D07 | -| [C00073 Inoculate populations through media literacy training](../../generated_pages/counters/C00073.md) | D02 | -| [C00087 Make more noise than the disinformation](../../generated_pages/counters/C00087.md) | D04 | -| [C00093 Influencer code of conduct](../../generated_pages/counters/C00093.md) | D07 | -| [C00114 Don't engage with payloads](../../generated_pages/counters/C00114.md) | D02 | -| [C00115 Expose actor and intentions](../../generated_pages/counters/C00115.md) | D02 | -| [C00154 Ask media not to report false information](../../generated_pages/counters/C00154.md) | D02 | -| [C00160 find and train influencers](../../generated_pages/counters/C00160.md) | D02 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0049.003.md b/generated_pages/techniques/T0049.003.md index 4bfec7c..944d37d 100644 --- a/generated_pages/techniques/T0049.003.md +++ b/generated_pages/techniques/T0049.003.md @@ -1,7 +1,6 @@ # Technique T0049.003: Bots Amplify via Automated Forwarding and Reposting -* **Summary**: Automated forwarding and reposting refer to the proliferation of operation content using automated means, such as artificial intelligence or social media bots. An influence operation may use automated activity to increase content exposure without dedicating the resources, including personnel and time, traditionally required to forward and repost content. -Use bots to amplify narratives above algorithm thresholds. Bots are automated/programmed profiles designed to amplify content (ie: automatically retweet or like) and give appearance it's more "popular" than it is. They can operate as a network, to function in a coordinated/orchestrated manner. In some cases (more so now) they are an inexpensive/disposable assets used for minimal deployment as bot detection tools improve and platforms are more responsive. +* **Summary**: Automated forwarding and reposting refer to the proliferation of operation content using automated means, such as artificial intelligence or social media bots. An influence operation may use automated activity to increase content exposure without dedicating the resources, including personnel and time, traditionally required to forward and repost content. Use bots to amplify narratives above algorithm thresholds. Bots are automated/programmed profiles designed to amplify content (ie: automatically retweet or like) and give appearance it's more "popular" than it is. They can operate as a network, to function in a coordinated/orchestrated manner. In some cases (more so now) they are an inexpensive/disposable assets used for minimal deployment as bot detection tools improve and platforms are more responsive. * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0049.004.md b/generated_pages/techniques/T0049.004.md index a574d11..fac68ff 100644 --- a/generated_pages/techniques/T0049.004.md +++ b/generated_pages/techniques/T0049.004.md @@ -1,6 +1,6 @@ -# Technique T0049.004: Utilize Spamoflauge +# Technique T0049.004: Utilise Spamoflauge -* **Summary**: Spamoflauge refers to the practice of disguising spam messages as legitimate. Spam refers to the use of electronic messaging systems to send out unrequested or unwanted messages in bulk. Simple methods of spamoflauge include replacing letters with numbers to fool keyword-based email spam filters, for example, "you've w0n our jackp0t!". Spamoflauge may extend to more complex techniques such as modifying the grammar or word choice of the language, casting messages as images which spam detectors cannot automatically read, or encapsulating messages in password protected attachments, such as .pdf or .zip files. Influence operations may use spamoflauge to avoid spam filtering systems and increase the likelihood of the target audience receiving operation messaging. +* **Summary**: Spamoflauge refers to the practice of disguising spam messages as legitimate. Spam refers to the use of electronic messaging systems to send out unrequested or unwanted messages in bulk. Simple methods of spamoflauge include replacing letters with numbers to fool keyword-based email spam filters, for example, "you've w0n our jackp0t!". Spamoflauge may extend to more complex techniques such as modifying the grammar or word choice of the language, casting messages as images which spam detectors cannot automatically read, or encapsulating messages in password protected attachments, such as .pdf or .zip files. Influence operations may use spamoflauge to avoid spam filtering systems and increase the likelihood of the target audience receiving operation messaging. * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0049.005.md b/generated_pages/techniques/T0049.005.md index e3a547b..699d738 100644 --- a/generated_pages/techniques/T0049.005.md +++ b/generated_pages/techniques/T0049.005.md @@ -1,6 +1,6 @@ # Technique T0049.005: Conduct Swarming -* **Summary**: Swarming refers to the coordinated use of accounts to overwhelm the information space with operation content. Unlike information flooding, swarming centers exclusively around a specific event or actor rather than a general narrative. Swarming relies on “horizontal communication” between information assets rather than a top-down, vertical command-and-control approach. +* **Summary**: Swarming refers to the coordinated use of accounts to overwhelm the information space with operation content. Unlike information flooding, swarming centres exclusively around a specific event or actor rather than a general narrative. Swarming relies on “horizontal communication” between information assets rather than a top-down, vertical command-and-control approach. * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0049.006.md b/generated_pages/techniques/T0049.006.md index 3a46735..068f583 100644 --- a/generated_pages/techniques/T0049.006.md +++ b/generated_pages/techniques/T0049.006.md @@ -1,6 +1,6 @@ # Technique T0049.006: Conduct Keyword Squatting -* **Summary**: Keyword squatting refers to the creation of online content, such as websites, articles, or social media accounts, around a specific search engine-optimized term to overwhelm the search results of that term. An influence may keyword squat to increase content exposure to target audience members who query the exploited term in a search engine and manipulate the narrative around the term. +* **Summary**: Keyword squatting refers to the creation of online content, such as websites, articles, or social media accounts, around a specific search engine-optimized term to overwhelm the search results of that term. An influence may keyword squat to increase content exposure to target audience members who query the exploited term in a search engine and manipulate the narrative around the term. * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0049.md b/generated_pages/techniques/T0049.md index 82c2548..d5f0a90 100644 --- a/generated_pages/techniques/T0049.md +++ b/generated_pages/techniques/T0049.md @@ -14,22 +14,7 @@ | Counters | Response types | | -------- | -------------- | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | -| [C00044 Keep people from posting to social media immediately](../../generated_pages/counters/C00044.md) | D03 | -| [C00072 Remove non-relevant content from special interest groups - not recommended](../../generated_pages/counters/C00072.md) | D02 | -| [C00074 Identify and delete or rate limit identical content](../../generated_pages/counters/C00074.md) | D02 | -| [C00085 Mute content](../../generated_pages/counters/C00085.md) | D03 | -| [C00086 Distract from noise with addictive content](../../generated_pages/counters/C00086.md) | D04 | -| [C00087 Make more noise than the disinformation](../../generated_pages/counters/C00087.md) | D04 | -| [C00091 Honeypot social community](../../generated_pages/counters/C00091.md) | D05 | -| [C00101 Create friction by rate-limiting engagement](../../generated_pages/counters/C00101.md) | D04 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00123 Remove or rate limit botnets](../../generated_pages/counters/C00123.md) | D03 | -| [C00128 Create friction by marking content with ridicule or other "decelerants"](../../generated_pages/counters/C00128.md) | D03 | | [C00131 Seize and analyse botnet servers](../../generated_pages/counters/C00131.md) | D02 | -| [C00153 Take pre-emptive action against actors' infrastructure](../../generated_pages/counters/C00153.md) | D03 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0057.001.md b/generated_pages/techniques/T0057.001.md index d88af6a..fdc79a5 100644 --- a/generated_pages/techniques/T0057.001.md +++ b/generated_pages/techniques/T0057.001.md @@ -1,6 +1,6 @@ # Technique T0057.001: Pay for Physical Action -* **Summary**: Paying for physical action occurs when an influence operation pays individuals to act in the physical realm. An influence operation may pay for physical action to create specific situations and frame them in a way that supports operation narratives, for example, paying a group of people to burn a car to later post an image of the burning car and frame it as an act of protest. +* **Summary**: Paying for physical action occurs when an influence operation pays individuals to act in the physical realm. An influence operation may pay for physical action to create specific situations and frame them in a way that supports operation narratives, for example, paying a group of people to burn a car to later post an image of the burning car and frame it as an act of protest. * **Belongs to tactic stage**: TA10 diff --git a/generated_pages/techniques/T0057.002.md b/generated_pages/techniques/T0057.002.md index c8108bb..4a2f0ab 100644 --- a/generated_pages/techniques/T0057.002.md +++ b/generated_pages/techniques/T0057.002.md @@ -1,6 +1,6 @@ # Technique T0057.002: Conduct Symbolic Action -* **Summary**: Symbolic action refers to activities specifically intended to advance an operation’s narrative by signaling something to the audience, for example, a military parade supporting a state’s narrative of military superiority. An influence operation may use symbolic action to create falsified evidence supporting operation narratives in the physical information space. +* **Summary**: Symbolic action refers to activities specifically intended to advance an operation’s narrative by signalling something to the audience, for example, a military parade supporting a state’s narrative of military superiority. An influence operation may use symbolic action to create falsified evidence supporting operation narratives in the physical information space. * **Belongs to tactic stage**: TA10 diff --git a/generated_pages/techniques/T0057.md b/generated_pages/techniques/T0057.md index b3dd95a..908eb81 100644 --- a/generated_pages/techniques/T0057.md +++ b/generated_pages/techniques/T0057.md @@ -1,4 +1,4 @@ -# Technique T0057: Organize Events +# Technique T0057: Organise Events * **Summary**: Coordinate and promote real-world events across media platforms, e.g. rallies, protests, gatherings in support of incident narratives. @@ -16,24 +16,7 @@ | Counters | Response types | | -------- | -------------- | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00016 Censorship](../../generated_pages/counters/C00016.md) | D02 | -| [C00036 Infiltrate the in-group to discredit leaders (divide)](../../generated_pages/counters/C00036.md) | D02 | -| [C00046 Marginalise and discredit extremist groups](../../generated_pages/counters/C00046.md) | D04 | -| [C00048 Name and Shame Influencers](../../generated_pages/counters/C00048.md) | D07 | -| [C00070 Block access to disinformation resources](../../generated_pages/counters/C00070.md) | D02 | -| [C00074 Identify and delete or rate limit identical content](../../generated_pages/counters/C00074.md) | D02 | -| [C00085 Mute content](../../generated_pages/counters/C00085.md) | D03 | -| [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00126 Social media amber alert](../../generated_pages/counters/C00126.md) | D03 | -| [C00128 Create friction by marking content with ridicule or other "decelerants"](../../generated_pages/counters/C00128.md) | D03 | -| [C00129 Use banking to cut off access ](../../generated_pages/counters/C00129.md) | D02 | -| [C00149 Poison the monitoring & evaluation data](../../generated_pages/counters/C00149.md) | D04 | -| [C00153 Take pre-emptive action against actors' infrastructure](../../generated_pages/counters/C00153.md) | D03 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | -| [C00211 Use humorous counter-narratives](../../generated_pages/counters/C00211.md) | D03 | -| [C00216 Use advertiser controls to stem flow of funds to bad actors](../../generated_pages/counters/C00216.md) | D02 | +| [C00129 Use banking to cut off access](../../generated_pages/counters/C00129.md) | D02 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0059.md b/generated_pages/techniques/T0059.md index b8cd911..5c2140b 100644 --- a/generated_pages/techniques/T0059.md +++ b/generated_pages/techniques/T0059.md @@ -12,8 +12,6 @@ | Counters | Response types | | -------- | -------------- | -| [C00042 Address truth contained in narratives](../../generated_pages/counters/C00042.md) | D04 | -| [C00084 Modify disinformation narratives, and rebroadcast them](../../generated_pages/counters/C00084.md) | D03 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0060.md b/generated_pages/techniques/T0060.md index 076414b..b51991b 100644 --- a/generated_pages/techniques/T0060.md +++ b/generated_pages/techniques/T0060.md @@ -12,17 +12,6 @@ | Counters | Response types | | -------- | -------------- | -| [C00046 Marginalise and discredit extremist groups](../../generated_pages/counters/C00046.md) | D04 | -| [C00048 Name and Shame Influencers](../../generated_pages/counters/C00048.md) | D07 | -| [C00073 Inoculate populations through media literacy training](../../generated_pages/counters/C00073.md) | D02 | -| [C00074 Identify and delete or rate limit identical content](../../generated_pages/counters/C00074.md) | D02 | -| [C00078 Change Search Algorithms for Disinformation Content](../../generated_pages/counters/C00078.md) | D03 | -| [C00085 Mute content](../../generated_pages/counters/C00085.md) | D03 | -| [C00117 Downgrade / de-amplify so message is seen by fewer people](../../generated_pages/counters/C00117.md) | D04 | -| [C00119 Engage payload and debunk.](../../generated_pages/counters/C00119.md) | D07 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00123 Remove or rate limit botnets](../../generated_pages/counters/C00123.md) | D03 | -| [C00131 Seize and analyse botnet servers](../../generated_pages/counters/C00131.md) | D02 | | [C00138 Spam domestic actors with lawsuits](../../generated_pages/counters/C00138.md) | D03 | | [C00143 (botnet) DMCA takedown requests to waste group time](../../generated_pages/counters/C00143.md) | D04 | | [C00147 Make amplification of social media posts expire (e.g. can't like/ retweet after n days)](../../generated_pages/counters/C00147.md) | D03 | diff --git a/generated_pages/techniques/T0061.md b/generated_pages/techniques/T0061.md index d804f01..b859aab 100644 --- a/generated_pages/techniques/T0061.md +++ b/generated_pages/techniques/T0061.md @@ -12,20 +12,6 @@ | Counters | Response types | | -------- | -------------- | -| [C00012 Platform regulation](../../generated_pages/counters/C00012.md) | D02 | -| [C00046 Marginalise and discredit extremist groups](../../generated_pages/counters/C00046.md) | D04 | -| [C00048 Name and Shame Influencers](../../generated_pages/counters/C00048.md) | D07 | -| [C00058 Report crowdfunder as violator](../../generated_pages/counters/C00058.md) | D02 | -| [C00067 Denigrate the recipient/ project (of online funding)](../../generated_pages/counters/C00067.md) | D03 | -| [C00074 Identify and delete or rate limit identical content](../../generated_pages/counters/C00074.md) | D02 | -| [C00085 Mute content](../../generated_pages/counters/C00085.md) | D03 | -| [C00107 Content moderation](../../generated_pages/counters/C00107.md) | D02 | -| [C00122 Content moderation](../../generated_pages/counters/C00122.md) | D02 | -| [C00128 Create friction by marking content with ridicule or other "decelerants"](../../generated_pages/counters/C00128.md) | D03 | -| [C00129 Use banking to cut off access ](../../generated_pages/counters/C00129.md) | D02 | -| [C00153 Take pre-emptive action against actors' infrastructure](../../generated_pages/counters/C00153.md) | D03 | -| [C00176 Improve Coordination amongst stakeholders: public and private](../../generated_pages/counters/C00176.md) | D07 | -| [C00216 Use advertiser controls to stem flow of funds to bad actors](../../generated_pages/counters/C00216.md) | D02 | DO NOT EDIT ABOVE THIS LINE - PLEASE ADD NOTES BELOW \ No newline at end of file diff --git a/generated_pages/techniques/T0068.md b/generated_pages/techniques/T0068.md index 3afdeaf..235f391 100644 --- a/generated_pages/techniques/T0068.md +++ b/generated_pages/techniques/T0068.md @@ -1,6 +1,6 @@ # Technique T0068: Respond to Breaking News Event or Active Crisis -* **Summary**: Media attention on a story or event is heightened during a breaking news event, where unclear facts and incomplete information increase speculation, rumors, and conspiracy theories, which are all vulnerable to manipulation. +* **Summary**: Media attention on a story or event is heightened during a breaking news event, where unclear facts and incomplete information increase speculation, rumours, and conspiracy theories, which are all vulnerable to manipulation. * **Belongs to tactic stage**: TA14 diff --git a/generated_pages/techniques/T0072.001.md b/generated_pages/techniques/T0072.001.md index 366e630..7f5732f 100644 --- a/generated_pages/techniques/T0072.001.md +++ b/generated_pages/techniques/T0072.001.md @@ -1,6 +1,6 @@ # Technique T0072.001: Geographic Segmentation -* **Summary**: An influence operation may target populations in a specific geographic location, such as a region, state, or city. An influence operation may use geographic segmentation to Create Localized Content (see: Establish Legitimacy). +* **Summary**: An influence operation may target populations in a specific geographic location, such as a region, state, or city. An influence operation may use geographic segmentation to Create Localised Content (see: Establish Legitimacy). * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0072.002.md b/generated_pages/techniques/T0072.002.md index 5cf2870..8163ce7 100644 --- a/generated_pages/techniques/T0072.002.md +++ b/generated_pages/techniques/T0072.002.md @@ -1,6 +1,6 @@ # Technique T0072.002: Demographic Segmentation -* **Summary**: An influence operation may target populations based on demographic segmentation, including age, gender, and income. Demographic segmentation may be useful for influence operations aiming to change state policies that affect a specific population sector. For example, an influence operation attempting to influence Medicare funding in the United States would likely target U.S. voters over 65 years of age. +* **Summary**: An influence operation may target populations based on demographic segmentation, including age, gender, and income. Demographic segmentation may be useful for influence operations aiming to change state policies that affect a specific population sector. For example, an influence operation attempting to influence Medicare funding in the United States would likely target U.S. voters over 65 years of age. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0072.003.md b/generated_pages/techniques/T0072.003.md index c5e5cdb..ab8aa20 100644 --- a/generated_pages/techniques/T0072.003.md +++ b/generated_pages/techniques/T0072.003.md @@ -1,6 +1,6 @@ # Technique T0072.003: Economic Segmentation -* **Summary**: An influence operation may target populations based on their income bracket, wealth, or other financial or economic division. +* **Summary**: An influence operation may target populations based on their income bracket, wealth, or other financial or economic division. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0072.004.md b/generated_pages/techniques/T0072.004.md index 9351bc2..8f8c01b 100644 --- a/generated_pages/techniques/T0072.004.md +++ b/generated_pages/techniques/T0072.004.md @@ -1,6 +1,6 @@ # Technique T0072.004: Psychographic Segmentation -* **Summary**: An influence operation may target populations based on psychographic segmentation, which uses audience values and decision-making processes. An operation may individually gather psychographic data with its own surveys or collection tools or externally purchase data from social media companies or online surveys, such as personality quizzes. +* **Summary**: An influence operation may target populations based on psychographic segmentation, which uses audience values and decision-making processes. An operation may individually gather psychographic data with its own surveys or collection tools or externally purchase data from social media companies or online surveys, such as personality quizzes. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0072.005.md b/generated_pages/techniques/T0072.005.md index 03e6fbe..f5a4fcf 100644 --- a/generated_pages/techniques/T0072.005.md +++ b/generated_pages/techniques/T0072.005.md @@ -1,6 +1,6 @@ # Technique T0072.005: Political Segmentation -* **Summary**: An influence operation may target populations based on their political affiliations, especially when aiming to manipulate voting or change policy. +* **Summary**: An influence operation may target populations based on their political affiliations, especially when aiming to manipulate voting or change policy. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0072.md b/generated_pages/techniques/T0072.md index 3d8760b..97a8b5b 100644 --- a/generated_pages/techniques/T0072.md +++ b/generated_pages/techniques/T0072.md @@ -1,6 +1,6 @@ # Technique T0072: Segment Audiences -* **Summary**: Create audience segmentations by features of interest to the influence campaign, including political affiliation, geographic location, income, demographics, and psychographics. +* **Summary**: Create audience segmentations by features of interest to the influence campaign, including political affiliation, geographic location, income, demographics, and psychographics. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0073.md b/generated_pages/techniques/T0073.md index 19bc2fd..a97d932 100644 --- a/generated_pages/techniques/T0073.md +++ b/generated_pages/techniques/T0073.md @@ -1,6 +1,6 @@ # Technique T0073: Determine Target Audiences -* **Summary**: Determining the target audiences (segments of the population) who will receive campaign narratives and artifacts intended to achieve the strategic ends. +* **Summary**: Determining the target audiences (segments of the population) who will receive campaign narratives and artefacts intended to achieve the strategic ends. * **Belongs to tactic stage**: TA01 diff --git a/generated_pages/techniques/T0074.md b/generated_pages/techniques/T0074.md index 65649f9..6790db4 100644 --- a/generated_pages/techniques/T0074.md +++ b/generated_pages/techniques/T0074.md @@ -1,6 +1,6 @@ # Technique T0074: Determine Strategic Ends -* **Summary**: Determining the campaigns goals or objectives. Examples include achieving achieving geopolitical advantage like undermining trust in an adversary, gaining domestic political advantage, achieving financial gain, or attaining a policy change, +* **Summary**: Determining the campaigns goals or objectives. Examples include achieving achieving geopolitical advantage like undermining trust in an adversary, gaining domestic political advantage, achieving financial gain, or attaining a policy change, * **Belongs to tactic stage**: TA01 diff --git a/generated_pages/techniques/T0075.md b/generated_pages/techniques/T0075.md index ff55257..9b4f458 100644 --- a/generated_pages/techniques/T0075.md +++ b/generated_pages/techniques/T0075.md @@ -1,6 +1,6 @@ # Technique T0075: Dismiss -* **Summary**: Push back against criticism by dismissing your critics. This might be arguing that the critics use a different standard for you than with other actors or themselves; or arguing that their criticism is biased. +* **Summary**: Push back against criticism by dismissing your critics. This might be arguing that the critics use a different standard for you than with other actors or themselves; or arguing that their criticism is biassed. * **Belongs to tactic stage**: TA02 diff --git a/generated_pages/techniques/T0076.md b/generated_pages/techniques/T0076.md index b391342..38ed0d6 100644 --- a/generated_pages/techniques/T0076.md +++ b/generated_pages/techniques/T0076.md @@ -1,6 +1,6 @@ # Technique T0076: Distort -* **Summary**: Twist the narrative. Take information, or artifacts like images, and change the framing around them. +* **Summary**: Twist the narrative. Take information, or artefacts like images, and change the framing around them. * **Belongs to tactic stage**: TA02 diff --git a/generated_pages/techniques/T0080.001.md b/generated_pages/techniques/T0080.001.md index b43c5a5..b0987ee 100644 --- a/generated_pages/techniques/T0080.001.md +++ b/generated_pages/techniques/T0080.001.md @@ -1,6 +1,6 @@ # Technique T0080.001: Monitor Social Media Analytics -* **Summary**: An influence operation may use social media analytics to determine which factors will increase the operation content’s exposure to its target audience on social media platforms, including views, interactions, and sentiment relating to topics and content types. The social media platform itself or a third-party tool may collect the metrics. +* **Summary**: An influence operation may use social media analytics to determine which factors will increase the operation content’s exposure to its target audience on social media platforms, including views, interactions, and sentiment relating to topics and content types. The social media platform itself or a third-party tool may collect the metrics. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0080.002.md b/generated_pages/techniques/T0080.002.md index 7120a99..713c2f6 100644 --- a/generated_pages/techniques/T0080.002.md +++ b/generated_pages/techniques/T0080.002.md @@ -1,6 +1,6 @@ # Technique T0080.002: Evaluate Media Surveys -* **Summary**: An influence operation may evaluate its own or third-party media surveys to determine what type of content appeals to its target audience. Media surveys may provide insight into an audience’s political views, social class, general interests, or other indicators used to tailor operation messaging to its target audience. +* **Summary**: An influence operation may evaluate its own or third-party media surveys to determine what type of content appeals to its target audience. Media surveys may provide insight into an audience’s political views, social class, general interests, or other indicators used to tailor operation messaging to its target audience. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0080.003.md b/generated_pages/techniques/T0080.003.md index 9923af4..6afed42 100644 --- a/generated_pages/techniques/T0080.003.md +++ b/generated_pages/techniques/T0080.003.md @@ -1,6 +1,6 @@ # Technique T0080.003: Identify Trending Topics/Hashtags -* **Summary**: An influence operation may identify trending hashtags on social media platforms for later use in boosting operation content. A hashtag40 refers to a word or phrase preceded by the hash symbol (#) on social media used to identify messages and posts relating to a specific topic. All public posts that use the same hashtag are aggregated onto a centralized page dedicated to the word or phrase and sorted either chronologically or by popularity. +* **Summary**: An influence operation may identify trending hashtags on social media platforms for later use in boosting operation content. A hashtag40 refers to a word or phrase preceded by the hash symbol (#) on social media used to identify messages and posts relating to a specific topic. All public posts that use the same hashtag are aggregated onto a centralised page dedicated to the word or phrase and sorted either chronologically or by popularity. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0080.004.md b/generated_pages/techniques/T0080.004.md index cd86e6d..b77b6e5 100644 --- a/generated_pages/techniques/T0080.004.md +++ b/generated_pages/techniques/T0080.004.md @@ -1,6 +1,6 @@ # Technique T0080.004: Conduct Web Traffic Analysis -* **Summary**: An influence operation may conduct web traffic analysis to determine which search engines, keywords, websites, and advertisements gain the most traction with its target audience. +* **Summary**: An influence operation may conduct web traffic analysis to determine which search engines, keywords, websites, and advertisements gain the most traction with its target audience. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0080.005.md b/generated_pages/techniques/T0080.005.md index 1ce7359..637792b 100644 --- a/generated_pages/techniques/T0080.005.md +++ b/generated_pages/techniques/T0080.005.md @@ -1,6 +1,6 @@ # Technique T0080.005: Assess Degree/Type of Media Access -* **Summary**: An influence operation may survey a target audience’s Internet availability and degree of media freedom to determine which target audience members will have access to operation content and on which platforms. An operation may face more difficulty targeting an information environment with heavy restrictions and media control than an environment with independent media, freedom of speech and of the press, and individual liberties. +* **Summary**: An influence operation may survey a target audience’s Internet availability and degree of media freedom to determine which target audience members will have access to operation content and on which platforms. An operation may face more difficulty targeting an information environment with heavy restrictions and media control than an environment with independent media, freedom of speech and of the press, and individual liberties. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0080.md b/generated_pages/techniques/T0080.md index 6e40cde..b11fe98 100644 --- a/generated_pages/techniques/T0080.md +++ b/generated_pages/techniques/T0080.md @@ -1,7 +1,6 @@ # Technique T0080: Map Target Audience Information Environment -* **Summary**: Mapping the target audience information environment analyzes the information space itself, including social media analytics, web traffic, and media surveys. Mapping the information environment may help the influence operation determine the most realistic and popular information channels to reach its target audience. -Mapping the target audience information environment aids influence operations in determining the most vulnerable areas of the information space to target with messaging. +* **Summary**: Mapping the target audience information environment analyses the information space itself, including social media analytics, web traffic, and media surveys. Mapping the information environment may help the influence operation determine the most realistic and popular information channels to reach its target audience. Mapping the target audience information environment aids influence operations in determining the most vulnerable areas of the information space to target with messaging. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0081.001.md b/generated_pages/techniques/T0081.001.md index a9af349..53ccc20 100644 --- a/generated_pages/techniques/T0081.001.md +++ b/generated_pages/techniques/T0081.001.md @@ -1,6 +1,6 @@ # Technique T0081.001: Find Echo Chambers -* **Summary**: Find or plan to create areas (social media groups, search term groups, hashtag groups etc) where individuals only engage with people they agree with. +* **Summary**: Find or plan to create areas (social media groups, search term groups, hashtag groups etc) where individuals only engage with people they agree with. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0081.002.md b/generated_pages/techniques/T0081.002.md index 5247e57..e9d2c8b 100644 --- a/generated_pages/techniques/T0081.002.md +++ b/generated_pages/techniques/T0081.002.md @@ -1,7 +1,6 @@ # Technique T0081.002: Identify Data Voids -* **Summary**: A data void refers to a word or phrase that results in little, manipulative, or low-quality search engine data. Data voids are hard to detect and relatively harmless until exploited by an entity aiming to quickly proliferate false or misleading information during a phenomenon that causes a high number of individuals to query the term or phrase. In the Plan phase, an influence operation may identify data voids for later exploitation in the operation. -A 2019 report by Michael Golebiewski identifies five types of data voids. (1) “Breaking news” data voids occur when a keyword gains popularity during a short period of time, allowing an influence operation to publish false content before legitimate news outlets have an opportunity to publish relevant information. (2) An influence operation may create a “strategic new terms” data void by creating their own terms and publishing information online before promoting their keyword to the target audience. (3) An influence operation may publish content on “outdated terms” that have decreased in popularity, capitalizing on most search engines’ preferences for recency. (4) “Fragmented concepts” data voids separate connections between similar ideas, isolating segment queries to distinct search engine results. (5) An influence operation may use “problematic queries” that previously resulted in disturbing or inappropriate content to promote messaging until mainstream media recontextualizes the term. +* **Summary**: A data void refers to a word or phrase that results in little, manipulative, or low-quality search engine data. Data voids are hard to detect and relatively harmless until exploited by an entity aiming to quickly proliferate false or misleading information during a phenomenon that causes a high number of individuals to query the term or phrase. In the Plan phase, an influence operation may identify data voids for later exploitation in the operation. A 2019 report by Michael Golebiewski identifies five types of data voids. (1) “Breaking news” data voids occur when a keyword gains popularity during a short period of time, allowing an influence operation to publish false content before legitimate news outlets have an opportunity to publish relevant information. (2) An influence operation may create a “strategic new terms” data void by creating their own terms and publishing information online before promoting their keyword to the target audience. (3) An influence operation may publish content on “outdated terms” that have decreased in popularity, capitalising on most search engines’ preferences for recency. (4) “Fragmented concepts” data voids separate connections between similar ideas, isolating segment queries to distinct search engine results. (5) An influence operation may use “problematic queries” that previously resulted in disturbing or inappropriate content to promote messaging until mainstream media recontextualizes the term. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0081.003.md b/generated_pages/techniques/T0081.003.md index 3a8168d..e81d50d 100644 --- a/generated_pages/techniques/T0081.003.md +++ b/generated_pages/techniques/T0081.003.md @@ -1,6 +1,6 @@ # Technique T0081.003: Identify Existing Prejudices -* **Summary**: An influence operation may exploit existing racial, religious, demographic, or social prejudices to further polarize its target audience from the rest of the public. +* **Summary**: An influence operation may exploit existing racial, religious, demographic, or social prejudices to further polarise its target audience from the rest of the public. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0081.004.md b/generated_pages/techniques/T0081.004.md index 627da38..a76001d 100644 --- a/generated_pages/techniques/T0081.004.md +++ b/generated_pages/techniques/T0081.004.md @@ -1,6 +1,6 @@ # Technique T0081.004: Identify Existing Fissures -* **Summary**: An influence operation may identify existing fissures to pit target populations against one another or facilitate a “divide-and-conquer" approach to tailor operation narratives along the divides. +* **Summary**: An influence operation may identify existing fissures to pit target populations against one another or facilitate a “divide-and-conquer" approach to tailor operation narratives along the divides. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0081.005.md b/generated_pages/techniques/T0081.005.md index ca663d8..17a206b 100644 --- a/generated_pages/techniques/T0081.005.md +++ b/generated_pages/techniques/T0081.005.md @@ -1,6 +1,6 @@ # Technique T0081.005: Identify Existing Conspiracy Narratives/Suspicions -* **Summary**: An influence operation may assess preexisting conspiracy theories or suspicions in a population to identify existing narratives that support operational objectives. +* **Summary**: An influence operation may assess preexisting conspiracy theories or suspicions in a population to identify existing narratives that support operational objectives. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0081.006.md b/generated_pages/techniques/T0081.006.md index a51463c..d6c394a 100644 --- a/generated_pages/techniques/T0081.006.md +++ b/generated_pages/techniques/T0081.006.md @@ -1,6 +1,6 @@ # Technique T0081.006: Identify Wedge Issues -* **Summary**: A wedge issue is a divisive political issue, usually concerning a social phenomenon, that divides individuals along a defined line. An influence operation may exploit wedge issues by intentionally polarizing the public along the wedge issue line and encouraging opposition between factions. +* **Summary**: A wedge issue is a divisive political issue, usually concerning a social phenomenon, that divides individuals along a defined line. An influence operation may exploit wedge issues by intentionally polarising the public along the wedge issue line and encouraging opposition between factions. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0081.007.md b/generated_pages/techniques/T0081.007.md index 9f6cb96..2f8f484 100644 --- a/generated_pages/techniques/T0081.007.md +++ b/generated_pages/techniques/T0081.007.md @@ -1,6 +1,6 @@ # Technique T0081.007: Identify Target Audience Adversaries -* **Summary**: An influence operation may identify or create a real or imaginary adversary to center operation narratives against. A real adversary may include certain politicians or political parties while imaginary adversaries may include falsified “deep state”62 actors that, according to conspiracies, run the state behind public view. +* **Summary**: An influence operation may identify or create a real or imaginary adversary to centre operation narratives against. A real adversary may include certain politicians or political parties while imaginary adversaries may include falsified “deep state”62 actors that, according to conspiracies, run the state behind public view. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0081.008.md b/generated_pages/techniques/T0081.008.md index 03a651c..b641bb8 100644 --- a/generated_pages/techniques/T0081.008.md +++ b/generated_pages/techniques/T0081.008.md @@ -1,6 +1,6 @@ # Technique T0081.008: Identify Media System Vulnerabilities -* **Summary**: An influence operation may exploit existing weaknesses in a target’s media system. These weaknesses may include existing biases among media agencies, vulnerability to false news agencies on social media, or existing distrust of traditional media sources. An existing distrust among the public in the media system’s credibility holds high potential for exploitation by an influence operation when establishing alternative news agencies to spread operation content. +* **Summary**: An influence operation may exploit existing weaknesses in a target’s media system. These weaknesses may include existing biases among media agencies, vulnerability to false news agencies on social media, or existing distrust of traditional media sources. An existing distrust among the public in the media system’s credibility holds high potential for exploitation by an influence operation when establishing alternative news agencies to spread operation content. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0081.md b/generated_pages/techniques/T0081.md index dc6b997..4a1680c 100644 --- a/generated_pages/techniques/T0081.md +++ b/generated_pages/techniques/T0081.md @@ -1,7 +1,6 @@ # Technique T0081: Identify Social and Technical Vulnerabilities -* **Summary**: Identifying social and technical vulnerabilities determines weaknesses within the target audience information environment for later exploitation. Vulnerabilities include decisive political issues, weak cybersecurity infrastructure, search engine data voids, and other technical and non technical weaknesses in the target information environment. -Identifying social and technical vulnerabilities facilitates the later exploitation of the identified weaknesses to advance operation objectives. +* **Summary**: Identifying social and technical vulnerabilities determines weaknesses within the target audience information environment for later exploitation. Vulnerabilities include decisive political issues, weak cybersecurity infrastructure, search engine data voids, and other technical and non technical weaknesses in the target information environment. Identifying social and technical vulnerabilities facilitates the later exploitation of the identified weaknesses to advance operation objectives. * **Belongs to tactic stage**: TA13 diff --git a/generated_pages/techniques/T0082.md b/generated_pages/techniques/T0082.md index 4ff7c6d..6f65637 100644 --- a/generated_pages/techniques/T0082.md +++ b/generated_pages/techniques/T0082.md @@ -1,6 +1,6 @@ # Technique T0082: Develop New Narratives -* **Summary**: Actors may develop new narratives to further strategic or tactical goals, especially when existing narratives adequately align with the campaign goals. New narratives provide more control in terms of crafting the message to achieve specific goals. However, new narratives may require more effort to disseminate than adapting or adopting existing narratives. +* **Summary**: Actors may develop new narratives to further strategic or tactical goals, especially when existing narratives adequately align with the campaign goals. New narratives provide more control in terms of crafting the message to achieve specific goals. However, new narratives may require more effort to disseminate than adapting or adopting existing narratives. * **Belongs to tactic stage**: TA14 diff --git a/generated_pages/techniques/T0083.md b/generated_pages/techniques/T0083.md index 0ff7fea..7bd17c8 100644 --- a/generated_pages/techniques/T0083.md +++ b/generated_pages/techniques/T0083.md @@ -1,6 +1,6 @@ # Technique T0083: Integrate Target Audience Vulnerabilities into Narrative -* **Summary**: An influence operation may seek to exploit the preexisting weaknesses, fears, and enemies of the target audience for integration into the operation’s narratives and overall strategy. Integrating existing vulnerabilities into the operational approach conserves resources by exploiting already weak areas of the target information environment instead of forcing the operation to create new vulnerabilities in the environment. +* **Summary**: An influence operation may seek to exploit the preexisting weaknesses, fears, and enemies of the target audience for integration into the operation’s narratives and overall strategy. Integrating existing vulnerabilities into the operational approach conserves resources by exploiting already weak areas of the target information environment instead of forcing the operation to create new vulnerabilities in the environment. * **Belongs to tactic stage**: TA14 diff --git a/generated_pages/techniques/T0084.001.md b/generated_pages/techniques/T0084.001.md index bbd7ce2..cf09645 100644 --- a/generated_pages/techniques/T0084.001.md +++ b/generated_pages/techniques/T0084.001.md @@ -1,6 +1,6 @@ # Technique T0084.001: Use Copypasta -* **Summary**: Copypasta refers to a piece of text that has been copied and pasted multiple times across various online platforms. A copypasta’s final form may differ from its original source text as users add, delete, or otherwise edit the content as they repost the text. +* **Summary**: Copypasta refers to a piece of text that has been copied and pasted multiple times across various online platforms. A copypasta’s final form may differ from its original source text as users add, delete, or otherwise edit the content as they repost the text. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0084.002.md b/generated_pages/techniques/T0084.002.md index 485b2e0..973b504 100644 --- a/generated_pages/techniques/T0084.002.md +++ b/generated_pages/techniques/T0084.002.md @@ -1,6 +1,6 @@ -# Technique T0084.002: Plagiarize Content +# Technique T0084.002: Plagiarise Content -* **Summary**: An influence operation may take content from other sources without proper attribution. This content may be either misinformation content shared by others without malicious intent but now leveraged by the campaign as disinformation or disinformation content from other sources. +* **Summary**: An influence operation may take content from other sources without proper attribution. This content may be either misinformation content shared by others without malicious intent but now leveraged by the campaign as disinformation or disinformation content from other sources. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0084.003.md b/generated_pages/techniques/T0084.003.md index c1dedbe..967e960 100644 --- a/generated_pages/techniques/T0084.003.md +++ b/generated_pages/techniques/T0084.003.md @@ -1,6 +1,6 @@ -# Technique T0084.003: Deceptively Labeled or Translated +# Technique T0084.003: Deceptively Labelled or Translated -* **Summary**: An influence operation may take authentic content from other sources and add deceptive labels or deceptively translate the content into other langauges. +* **Summary**: An influence operation may take authentic content from other sources and add deceptive labels or deceptively translate the content into other langauges. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0084.004.md b/generated_pages/techniques/T0084.004.md index d0f4be7..d144b9c 100644 --- a/generated_pages/techniques/T0084.004.md +++ b/generated_pages/techniques/T0084.004.md @@ -1,6 +1,6 @@ # Technique T0084.004: Appropriate Content -* **Summary**: An influence operation may take content from other sources with proper attribution. This content may be either misinformation content shared by others without malicious intent but now leveraged by the campaign as disinformation or disinformation content from other sources. Examples include the appropriation of content from one inauthentic news site to another inauthentic news site or network in ways that align with the originators licensing or terms of service. +* **Summary**: An influence operation may take content from other sources with proper attribution. This content may be either misinformation content shared by others without malicious intent but now leveraged by the campaign as disinformation or disinformation content from other sources. Examples include the appropriation of content from one inauthentic news site to another inauthentic news site or network in ways that align with the originators licencing or terms of service. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0084.md b/generated_pages/techniques/T0084.md index b3ef09e..71248e5 100644 --- a/generated_pages/techniques/T0084.md +++ b/generated_pages/techniques/T0084.md @@ -1,6 +1,6 @@ # Technique T0084: Reuse Existing Content -* **Summary**: When an operation recycles content from its own previous operations or plagiarizes from external operations. An operation may launder information to conserve resources that would have otherwise been utilized to develop new content. +* **Summary**: When an operation recycles content from its own previous operations or plagiarises from external operations. An operation may launder information to conserve resources that would have otherwise been utilised to develop new content. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0085.001.md b/generated_pages/techniques/T0085.001.md index ffdd029..8bb0573 100644 --- a/generated_pages/techniques/T0085.001.md +++ b/generated_pages/techniques/T0085.001.md @@ -1,6 +1,6 @@ # Technique T0085.001: Develop AI-Generated Text -* **Summary**: AI-generated texts refers to synthetic text composed by computers using text-generating AI technology. Autonomous generation refers to content created by a bot without human input, also known as bot-created content generation. Autonomous generation represents the next step in automation after language generation and may lead to automated journalism. An influence operation may use read fakes or autonomous generation to quickly develop and distribute content to the target audience. +* **Summary**: AI-generated texts refers to synthetic text composed by computers using text-generating AI technology. Autonomous generation refers to content created by a bot without human input, also known as bot-created content generation. Autonomous generation represents the next step in automation after language generation and may lead to automated journalism. An influence operation may use read fakes or autonomous generation to quickly develop and distribute content to the target audience. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0085.003.md b/generated_pages/techniques/T0085.003.md index 77e5737..bf05712 100644 --- a/generated_pages/techniques/T0085.003.md +++ b/generated_pages/techniques/T0085.003.md @@ -1,6 +1,6 @@ # Technique T0085.003: Develop Inauthentic News Articles -* **Summary**: An influence operation may develop false or misleading news articles aligned to their campaign goals or narratives. +* **Summary**: An influence operation may develop false or misleading news articles aligned to their campaign goals or narratives. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0085.md b/generated_pages/techniques/T0085.md index e09acc3..422db58 100644 --- a/generated_pages/techniques/T0085.md +++ b/generated_pages/techniques/T0085.md @@ -1,6 +1,6 @@ # Technique T0085: Develop Text-Based Content -* **Summary**: Creating and editing false or misleading text-based artifacts, often aligned with one or more specific narratives, for use in a disinformation campaign. +* **Summary**: Creating and editing false or misleading text-based artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0086.003.md b/generated_pages/techniques/T0086.003.md index b04ac9d..96b088d 100644 --- a/generated_pages/techniques/T0086.003.md +++ b/generated_pages/techniques/T0086.003.md @@ -1,6 +1,6 @@ # Technique T0086.003: Deceptively Edit Images (Cheap Fakes) -* **Summary**: Cheap fakes utilize less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. +* **Summary**: Cheap fakes utilise less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0086.md b/generated_pages/techniques/T0086.md index 0664c9e..df5dd64 100644 --- a/generated_pages/techniques/T0086.md +++ b/generated_pages/techniques/T0086.md @@ -1,6 +1,6 @@ # Technique T0086: Develop Image-Based Content -* **Summary**: Creating and editing false or misleading visual artifacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include photographing staged real-life situations, repurposing existing digital images, or using image creation and editing technologies. +* **Summary**: Creating and editing false or misleading visual artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include photographing staged real-life situations, repurposing existing digital images, or using image creation and editing technologies. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0087.002.md b/generated_pages/techniques/T0087.002.md index ee4fa04..68e47a3 100644 --- a/generated_pages/techniques/T0087.002.md +++ b/generated_pages/techniques/T0087.002.md @@ -1,6 +1,6 @@ # Technique T0087.002: Deceptively Edit Video (Cheap Fakes) -* **Summary**: Cheap fakes utilize less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. +* **Summary**: Cheap fakes utilise less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0087.md b/generated_pages/techniques/T0087.md index 341b912..e9cac90 100644 --- a/generated_pages/techniques/T0087.md +++ b/generated_pages/techniques/T0087.md @@ -1,6 +1,6 @@ # Technique T0087: Develop Video-Based Content -* **Summary**: Creating and editing false or misleading video artifacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include staging videos of purportedly real situations, repurposing existing video artifacts, or using AI-generated video creation and editing technologies (including deepfakes). +* **Summary**: Creating and editing false or misleading video artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include staging videos of purportedly real situations, repurposing existing video artefacts, or using AI-generated video creation and editing technologies (including deepfakes). * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0088.002.md b/generated_pages/techniques/T0088.002.md index c3f43e8..af88bf0 100644 --- a/generated_pages/techniques/T0088.002.md +++ b/generated_pages/techniques/T0088.002.md @@ -1,6 +1,6 @@ # Technique T0088.002: Deceptively Edit Audio (Cheap Fakes) -* **Summary**: Cheap fakes utilize less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. +* **Summary**: Cheap fakes utilise less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0088.md b/generated_pages/techniques/T0088.md index c28ad11..4f6e9c8 100644 --- a/generated_pages/techniques/T0088.md +++ b/generated_pages/techniques/T0088.md @@ -1,6 +1,6 @@ # Technique T0088: Develop Audio-Based Content -* **Summary**: Creating and editing false or misleading audio artifacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include creating completely new audio content, repurposing existing audio artifacts (including cheap fakes), or using AI-generated audio creation and editing technologies (including deepfakes). +* **Summary**: Creating and editing false or misleading audio artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include creating completely new audio content, repurposing existing audio artefacts (including cheap fakes), or using AI-generated audio creation and editing technologies (including deepfakes). * **Belongs to tactic stage**: TA06 diff --git a/generated_pages/techniques/T0090.001.md b/generated_pages/techniques/T0090.001.md index bfe8dcf..f51f764 100644 --- a/generated_pages/techniques/T0090.001.md +++ b/generated_pages/techniques/T0090.001.md @@ -1,6 +1,6 @@ # Technique T0090.001: Create Anonymous Accounts -* **Summary**: Anonymous accounts or anonymous users refer to users that access network resources without providing a username or password. An influence operation may use anonymous accounts to spread content without direct attribution to the operation. +* **Summary**: Anonymous accounts or anonymous users refer to users that access network resources without providing a username or password. An influence operation may use anonymous accounts to spread content without direct attribution to the operation. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0090.002.md b/generated_pages/techniques/T0090.002.md index 52698a5..f0990a1 100644 --- a/generated_pages/techniques/T0090.002.md +++ b/generated_pages/techniques/T0090.002.md @@ -1,6 +1,6 @@ # Technique T0090.002: Create Cyborg Accounts -* **Summary**: Cyborg accounts refer to partly manned, partly automated social media accounts. Cyborg accounts primarily act as bots, but a human operator periodically takes control of the account to engage with real social media users by responding to comments and posting original content. Influence operations may use cyborg accounts to reduce the amount of direct human input required to maintain a regular account but increase the apparent legitimacy of the cyborg account by occasionally breaking its bot-like behavior with human interaction. +* **Summary**: Cyborg accounts refer to partly manned, partly automated social media accounts. Cyborg accounts primarily act as bots, but a human operator periodically takes control of the account to engage with real social media users by responding to comments and posting original content. Influence operations may use cyborg accounts to reduce the amount of direct human input required to maintain a regular account but increase the apparent legitimacy of the cyborg account by occasionally breaking its bot-like behaviour with human interaction. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0090.003.md b/generated_pages/techniques/T0090.003.md index dacce4e..7581efb 100644 --- a/generated_pages/techniques/T0090.003.md +++ b/generated_pages/techniques/T0090.003.md @@ -1,7 +1,6 @@ # Technique T0090.003: Create Bot Accounts -* **Summary**: Bots refer to autonomous internet users that interact with systems or other users while imitating traditional human behavior. Bots use a variety of tools to stay active without direct human operation, including artificial intelligence and big data analytics. For example, an individual may program a Twitter bot to retweet a tweet every time it contains a certain keyword or hashtag. An influence operation may use bots to increase its exposure and artificially promote its content across the internet without dedicating additional time or human resources. -Amplifier bots promote operation content through reposts, shares, and likes to increase the content’s online popularity. Hacker bots are traditionally covert bots running on computer scripts that rarely engage with users and work primarily as agents of larger cyberattacks, such as a Distributed Denial of Service attacks. Spammer bots are programmed to post content on social media or in comment sections, usually as a supplementary tool. Impersonator bots102 pose as real people by mimicking human behavior, complicating their detection. +* **Summary**: Bots refer to autonomous internet users that interact with systems or other users while imitating traditional human behaviour. Bots use a variety of tools to stay active without direct human operation, including artificial intelligence and big data analytics. For example, an individual may programme a Twitter bot to retweet a tweet every time it contains a certain keyword or hashtag. An influence operation may use bots to increase its exposure and artificially promote its content across the internet without dedicating additional time or human resources. Amplifier bots promote operation content through reposts, shares, and likes to increase the content’s online popularity. Hacker bots are traditionally covert bots running on computer scripts that rarely engage with users and work primarily as agents of larger cyberattacks, such as a Distributed Denial of Service attacks. Spammer bots are programmed to post content on social media or in comment sections, usually as a supplementary tool. Impersonator bots102 pose as real people by mimicking human behaviour, complicating their detection. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0090.004.md b/generated_pages/techniques/T0090.004.md index 86ce4f2..3a2b097 100644 --- a/generated_pages/techniques/T0090.004.md +++ b/generated_pages/techniques/T0090.004.md @@ -1,6 +1,6 @@ # Technique T0090.004: Create Sockpuppet Accounts -* **Summary**: Sockpuppet accounts refer to falsified accounts that either promote the influence operation’s own material or attack critics of the material online. Individuals who control sockpuppet accounts also man at least one other user account.67 Sockpuppet accounts help legitimize operation narratives by providing an appearance of external support for the material and discrediting opponents of the operation. +* **Summary**: Sockpuppet accounts refer to falsified accounts that either promote the influence operation’s own material or attack critics of the material online. Individuals who control sockpuppet accounts also man at least one other user account.67 Sockpuppet accounts help legitimise operation narratives by providing an appearance of external support for the material and discrediting opponents of the operation. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0091.003.md b/generated_pages/techniques/T0091.003.md index 19fdffd..3933ed8 100644 --- a/generated_pages/techniques/T0091.003.md +++ b/generated_pages/techniques/T0091.003.md @@ -1,7 +1,6 @@ # Technique T0091.003: Enlist Troll Accounts -* **Summary**: An influence operation may hire trolls, or human operators of fake accounts that aim to provoke others by posting and amplifying content about controversial issues. Trolls can serve to discredit an influence operation’s opposition or bring attention to the operation’s cause through debate. -Classic trolls refer to regular people who troll for personal reasons, such as attention-seeking or boredom. Classic trolls may advance operation narratives by coincidence but are not directly affiliated with any larger operation. Conversely, hybrid trolls act on behalf of another institution, such as a state or financial organization, and post content with a specific ideological goal. Hybrid trolls may be highly advanced and institutionalized or less organized and work for a single individual. +* **Summary**: An influence operation may hire trolls, or human operators of fake accounts that aim to provoke others by posting and amplifying content about controversial issues. Trolls can serve to discredit an influence operation’s opposition or bring attention to the operation’s cause through debate. Classic trolls refer to regular people who troll for personal reasons, such as attention-seeking or boredom. Classic trolls may advance operation narratives by coincidence but are not directly affiliated with any larger operation. Conversely, hybrid trolls act on behalf of another institution, such as a state or financial organisation, and post content with a specific ideological goal. Hybrid trolls may be highly advanced and institutionalised or less organised and work for a single individual. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0092.001.md b/generated_pages/techniques/T0092.001.md index e1414ce..d96441d 100644 --- a/generated_pages/techniques/T0092.001.md +++ b/generated_pages/techniques/T0092.001.md @@ -1,6 +1,6 @@ -# Technique T0092.001: Create Organizations +# Technique T0092.001: Create Organisations -* **Summary**: Influence operations may establish organizations with legitimate or falsified hierarchies, staff, and content to structure operation assets, provide a sense of legitimacy to the operation, or provide institutional backing to operation activities. +* **Summary**: Influence operations may establish organisations with legitimate or falsified hierarchies, staff, and content to structure operation assets, provide a sense of legitimacy to the operation, or provide institutional backing to operation activities. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0092.002.md b/generated_pages/techniques/T0092.002.md index 7c68835..171aad5 100644 --- a/generated_pages/techniques/T0092.002.md +++ b/generated_pages/techniques/T0092.002.md @@ -1,6 +1,6 @@ # Technique T0092.002: Use Follow Trains -* **Summary**: A follow train is a group of people who follow each other on a social media platform, often as a way for an individual or campaign to grow its social media following. Follow trains may be a violation of platform Terms of Service. They are also known as follow-for-follow groups. +* **Summary**: A follow train is a group of people who follow each other on a social media platform, often as a way for an individual or campaign to grow its social media following. Follow trains may be a violation of platform Terms of Service. They are also known as follow-for-follow groups. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0092.003.md b/generated_pages/techniques/T0092.003.md index 2f1db8c..9ef4f2e 100644 --- a/generated_pages/techniques/T0092.003.md +++ b/generated_pages/techniques/T0092.003.md @@ -1,6 +1,6 @@ # Technique T0092.003: Create Community or Sub-Group -* **Summary**: When there is not an existing community or sub-group that meets a campaign's goals, an influence operation may seek to create a community or sub-group. +* **Summary**: When there is not an existing community or sub-group that meets a campaign's goals, an influence operation may seek to create a community or sub-group. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0092.md b/generated_pages/techniques/T0092.md index f21ade5..8023991 100644 --- a/generated_pages/techniques/T0092.md +++ b/generated_pages/techniques/T0092.md @@ -1,6 +1,6 @@ # Technique T0092: Build Network -* **Summary**: Operators build their own network, creating links between accounts -- whether authentic or inauthentic -- in order amplify and promote narratives and artifacts, and encourage further growth of ther network, as well as the ongoing sharing and engagement with operational content. +* **Summary**: Operators build their own network, creating links between accounts -- whether authentic or inauthentic -- in order amplify and promote narratives and artefacts, and encourage further growth of ther network, as well as the ongoing sharing and engagement with operational content. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0093.001.md b/generated_pages/techniques/T0093.001.md index 1888591..6d4edbe 100644 --- a/generated_pages/techniques/T0093.001.md +++ b/generated_pages/techniques/T0093.001.md @@ -1,8 +1,6 @@ # Technique T0093.001: Fund Proxies -* **Summary**: An influence operation may fund proxies, or external entities that work for the operation. An operation may recruit/train users with existing sympathies towards the operation’s narratives and/or goals as proxies. Funding proxies serves various purposes including: -- Diversifying operation locations to complicate attribution -- Reducing the workload for direct operation assets +* **Summary**: An influence operation may fund proxies, or external entities that work for the operation. An operation may recruit/train users with existing sympathies towards the operation’s narratives and/or goals as proxies. Funding proxies serves various purposes including: - Diversifying operation locations to complicate attribution - Reducing the workload for direct operation assets * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0093.002.md b/generated_pages/techniques/T0093.002.md index 21dc025..f8062a7 100644 --- a/generated_pages/techniques/T0093.002.md +++ b/generated_pages/techniques/T0093.002.md @@ -1,6 +1,6 @@ # Technique T0093.002: Acquire Botnets -* **Summary**: A botnet is a group of bots that can function in coordination with each other. +* **Summary**: A botnet is a group of bots that can function in coordination with each other. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0093.md b/generated_pages/techniques/T0093.md index 44ef131..68cd97a 100644 --- a/generated_pages/techniques/T0093.md +++ b/generated_pages/techniques/T0093.md @@ -1,6 +1,6 @@ # Technique T0093: Acquire/Recruit Network -* **Summary**: Operators acquire an existing network by paying, recruiting, or exerting control over the leaders of the existing network. +* **Summary**: Operators acquire an existing network by paying, recruiting, or exerting control over the leaders of the existing network. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0094.002.md b/generated_pages/techniques/T0094.002.md index ab4bd45..7890671 100644 --- a/generated_pages/techniques/T0094.002.md +++ b/generated_pages/techniques/T0094.002.md @@ -1,6 +1,6 @@ -# Technique T0094.002: Utilize Butterfly Attacks +# Technique T0094.002: Utilise Butterfly Attacks -* **Summary**: Butterfly attacks occur when operators pretend to be members of a certain social group, usually a group that struggles for representation. An influence operation may mimic a group to insert controversial statements into the discourse, encourage the spread of operation content, or promote harassment among group members. Unlike astroturfing, butterfly attacks aim to infiltrate and discredit existing grassroots movements, organizations, and media campaigns. +* **Summary**: Butterfly attacks occur when operators pretend to be members of a certain social group, usually a group that struggles for representation. An influence operation may mimic a group to insert controversial statements into the discourse, encourage the spread of operation content, or promote harassment among group members. Unlike astroturfing, butterfly attacks aim to infiltrate and discredit existing grassroots movements, organisations, and media campaigns. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0095.md b/generated_pages/techniques/T0095.md index e5a3fd1..e49bd28 100644 --- a/generated_pages/techniques/T0095.md +++ b/generated_pages/techniques/T0095.md @@ -1,6 +1,6 @@ # Technique T0095: Develop Owned Media Assets -* **Summary**: An owned media asset refers to an agency or organization through which an influence operation may create, develop, and host content and narratives. Owned media assets include websites, blogs, social media pages, forums, and other platforms that facilitate the creation and organization of content. +* **Summary**: An owned media asset refers to an agency or organisation through which an influence operation may create, develop, and host content and narratives. Owned media assets include websites, blogs, social media pages, forums, and other platforms that facilitate the creation and organisation of content. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0096.001.md b/generated_pages/techniques/T0096.001.md index 1156cfa..64e75bd 100644 --- a/generated_pages/techniques/T0096.001.md +++ b/generated_pages/techniques/T0096.001.md @@ -1,6 +1,6 @@ # Technique T0096.001: Create Content Farms -* **Summary**: An influence operation may create an organization for creating and amplifying campaign artifacts at scale. +* **Summary**: An influence operation may create an organisation for creating and amplifying campaign artefacts at scale. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0096.002.md b/generated_pages/techniques/T0096.002.md index c6cafc8..49e2a7d 100644 --- a/generated_pages/techniques/T0096.002.md +++ b/generated_pages/techniques/T0096.002.md @@ -1,6 +1,6 @@ -# Technique T0096.002: Outsource Content Creation to External Organizations +# Technique T0096.002: Outsource Content Creation to External Organisations -* **Summary**: An influence operation may outsource content creation to external companies to avoid attribution, increase the rate of content creation, or improve content quality, i.e., by employing an organization that can create content in the target audience’s native language. Employed organizations may include marketing companies for tailored advertisements or external content farms for high volumes of targeted media. +* **Summary**: An influence operation may outsource content creation to external companies to avoid attribution, increase the rate of content creation, or improve content quality, i.e., by employing an organisation that can create content in the target audience’s native language. Employed organisations may include marketing companies for tailored advertisements or external content farms for high volumes of targeted media. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0096.md b/generated_pages/techniques/T0096.md index d644027..1ea5ef5 100644 --- a/generated_pages/techniques/T0096.md +++ b/generated_pages/techniques/T0096.md @@ -1,6 +1,6 @@ # Technique T0096: Leverage Content Farms -* **Summary**: Using the services of large-scale content providers for creating and amplifying campaign artifacts at scale. +* **Summary**: Using the services of large-scale content providers for creating and amplifying campaign artefacts at scale. * **Belongs to tactic stage**: TA15 diff --git a/generated_pages/techniques/T0097.md b/generated_pages/techniques/T0097.md index beb22bd..c13c0fb 100644 --- a/generated_pages/techniques/T0097.md +++ b/generated_pages/techniques/T0097.md @@ -1,6 +1,6 @@ # Technique T0097: Create Personas -* **Summary**: Creating fake people, often with accounts across multiple platforms. These personas can be as simple as a name, can contain slightly more background like location, profile pictures, backstory, or can be effectively backstopped with indicators like fake identity documents. +* **Summary**: Creating fake people, often with accounts across multiple platforms. These personas can be as simple as a name, can contain slightly more background like location, profile pictures, backstory, or can be effectively backstopped with indicators like fake identity documents. * **Belongs to tactic stage**: TA16 diff --git a/generated_pages/techniques/T0099.001.md b/generated_pages/techniques/T0099.001.md index 36ed72b..8d1e688 100644 --- a/generated_pages/techniques/T0099.001.md +++ b/generated_pages/techniques/T0099.001.md @@ -1,6 +1,6 @@ # Technique T0099.001: Astroturfing -* **Summary**: Astroturfing occurs when an influence operation disguises itself as grassroots movement or organization that supports operation narratives. Unlike butterfly attacks, astroturfing aims to increase the appearance of popular support for the operation cause and does not infiltrate existing groups to discredit their objectives. +* **Summary**: Astroturfing occurs when an influence operation disguises itself as grassroots movement or organisation that supports operation narratives. Unlike butterfly attacks, astroturfing aims to increase the appearance of popular support for the operation cause and does not infiltrate existing groups to discredit their objectives. * **Belongs to tactic stage**: TA16 diff --git a/generated_pages/techniques/T0099.002.md b/generated_pages/techniques/T0099.002.md index a529eaf..2de758c 100644 --- a/generated_pages/techniques/T0099.002.md +++ b/generated_pages/techniques/T0099.002.md @@ -1,6 +1,6 @@ # Technique T0099.002: Spoof/Parody Account/Site -* **Summary**: An influence operation may prepare assets impersonating legitimate entities to further conceal its network identity and add a layer of legitimacy to its operation content. Users will more likely believe and less likely fact-check news from recognizable sources rather than unknown sites. Legitimate entities may include authentic news outlets, public figures, organizations, or state entities. +* **Summary**: An influence operation may prepare assets impersonating legitimate entities to further conceal its network identity and add a layer of legitimacy to its operation content. Users will more likely believe and less likely fact-check news from recognisable sources rather than unknown sites. Legitimate entities may include authentic news outlets, public figures, organisations, or state entities. * **Belongs to tactic stage**: TA16 diff --git a/generated_pages/techniques/T0099.md b/generated_pages/techniques/T0099.md index e57a621..ae8d9e0 100644 --- a/generated_pages/techniques/T0099.md +++ b/generated_pages/techniques/T0099.md @@ -1,7 +1,6 @@ # Technique T0099: Prepare Assets Impersonating Legitimate Entities -* **Summary**: An influence operation may prepare assets impersonating legitimate entities to further conceal its network identity and add a layer of legitimacy to its operation content. Users will more likely believe and less likely fact-check news from recognizable sources rather than unknown sites. Legitimate entities may include authentic news outlets, public figures, organizations, or state entities. -An influence operation may use a wide variety of cyber techniques to impersonate a legitimate entity’s website or social media account. Typosquatting87 is the international registration of a domain name with purposeful variations of the impersonated domain name through intentional typos, top-level domain (TLD) manipulation, or punycode. Typosquatting facilitates the creation of falsified websites by creating similar domain names in the URL box, leaving it to the user to confirm that the URL is correct. +* **Summary**: An influence operation may prepare assets impersonating legitimate entities to further conceal its network identity and add a layer of legitimacy to its operation content. Users will more likely believe and less likely fact-check news from recognisable sources rather than unknown sites. Legitimate entities may include authentic news outlets, public figures, organisations, or state entities. An influence operation may use a wide variety of cyber techniques to impersonate a legitimate entity’s website or social media account. Typosquatting87 is the international registration of a domain name with purposeful variations of the impersonated domain name through intentional typos, top-level domain (TLD) manipulation, or punycode. Typosquatting facilitates the creation of falsified websites by creating similar domain names in the URL box, leaving it to the user to confirm that the URL is correct. * **Belongs to tactic stage**: TA16 diff --git a/generated_pages/techniques/T0100.md b/generated_pages/techniques/T0100.md index b88849d..8f29d42 100644 --- a/generated_pages/techniques/T0100.md +++ b/generated_pages/techniques/T0100.md @@ -1,9 +1,6 @@ # Technique T0100: Co-Opt Trusted Sources -* **Summary**: An influence operation may co-opt trusted sources by infiltrating or repurposing a source to reach a target audience through existing, previously reliable networks. Co-opted trusted sources may include: -- National or local new outlets -- Research or academic publications -- Online blogs or websites +* **Summary**: An influence operation may co-opt trusted sources by infiltrating or repurposing a source to reach a target audience through existing, previously reliable networks. Co-opted trusted sources may include: - National or local new outlets - Research or academic publications - Online blogs or websites * **Belongs to tactic stage**: TA16 diff --git a/generated_pages/techniques/T0101.md b/generated_pages/techniques/T0101.md index dd8506d..676d595 100644 --- a/generated_pages/techniques/T0101.md +++ b/generated_pages/techniques/T0101.md @@ -1,6 +1,6 @@ -# Technique T0101: Create Localized Content +# Technique T0101: Create Localised Content -* **Summary**: Localized content refers to content that appeals to a specific community of individuals, often in defined geographic areas. An operation may create localized content using local language and dialects to resonate with its target audience and blend in with other local news and social media. Localized content may help an operation increase legitimacy, avoid detection, and complicate external attribution. +* **Summary**: Localised content refers to content that appeals to a specific community of individuals, often in defined geographic areas. An operation may create localised content using local language and dialects to resonate with its target audience and blend in with other local news and social media. Localised content may help an operation increase legitimacy, avoid detection, and complicate external attribution. * **Belongs to tactic stage**: TA05 diff --git a/generated_pages/techniques/T0102.003.md b/generated_pages/techniques/T0102.003.md index 0b059f7..7ab30d8 100644 --- a/generated_pages/techniques/T0102.003.md +++ b/generated_pages/techniques/T0102.003.md @@ -1,7 +1,6 @@ # Technique T0102.003: Exploit Data Voids -* **Summary**: A data void refers to a word or phrase that results in little, manipulative, or low-quality search engine data. Data voids are hard to detect and relatively harmless until exploited by an entity aiming to quickly proliferate false or misleading information during a phenomenon that causes a high number of individuals to query the term or phrase. In the Plan phase, an influence operation may identify data voids for later exploitation in the operation. -A 2019 report by Michael Golebiewski identifies five types of data voids. (1) “Breaking news” data voids occur when a keyword gains popularity during a short period of time, allowing an influence operation to publish false content before legitimate news outlets have an opportunity to publish relevant information. (2) An influence operation may create a “strategic new terms” data void by creating their own terms and publishing information online before promoting their keyword to the target audience. (3) An influence operation may publish content on “outdated terms” that have decreased in popularity, capitalizing on most search engines’ preferences for recency. (4) “Fragmented concepts” data voids separate connections between similar ideas, isolating segment queries to distinct search engine results. (5) An influence operation may use “problematic queries” that previously resulted in disturbing or inappropriate content to promote messaging until mainstream media recontextualizes the term. +* **Summary**: A data void refers to a word or phrase that results in little, manipulative, or low-quality search engine data. Data voids are hard to detect and relatively harmless until exploited by an entity aiming to quickly proliferate false or misleading information during a phenomenon that causes a high number of individuals to query the term or phrase. In the Plan phase, an influence operation may identify data voids for later exploitation in the operation. A 2019 report by Michael Golebiewski identifies five types of data voids. (1) “Breaking news” data voids occur when a keyword gains popularity during a short period of time, allowing an influence operation to publish false content before legitimate news outlets have an opportunity to publish relevant information. (2) An influence operation may create a “strategic new terms” data void by creating their own terms and publishing information online before promoting their keyword to the target audience. (3) An influence operation may publish content on “outdated terms” that have decreased in popularity, capitalising on most search engines’ preferences for recency. (4) “Fragmented concepts” data voids separate connections between similar ideas, isolating segment queries to distinct search engine results. (5) An influence operation may use “problematic queries” that previously resulted in disturbing or inappropriate content to promote messaging until mainstream media recontextualizes the term. * **Belongs to tactic stage**: TA05 diff --git a/generated_pages/techniques/T0102.md b/generated_pages/techniques/T0102.md index 9d94b89..e4cbf74 100644 --- a/generated_pages/techniques/T0102.md +++ b/generated_pages/techniques/T0102.md @@ -1,6 +1,6 @@ # Technique T0102: Leverage Echo Chambers/Filter Bubbles -* **Summary**: An echo chamber refers to an internet subgroup, often along ideological lines, where individuals only engage with “others with which they are already in agreement.” A filter bubble refers to an algorithm's placement of an individual in content that they agree with or regularly engage with, possibly entrapping the user into a bubble of their own making. An operation may create these isolated areas of the internet by match existing groups, or aggregating individuals into a single target audience based on shared interests, politics, values, demographics, and other characteristics. Echo chambers and filter bubbles help to reinforce similar biases and content to the same target audience members. +* **Summary**: An echo chamber refers to an internet subgroup, often along ideological lines, where individuals only engage with “others with which they are already in agreement.” A filter bubble refers to an algorithm's placement of an individual in content that they agree with or regularly engage with, possibly entrapping the user into a bubble of their own making. An operation may create these isolated areas of the internet by match existing groups, or aggregating individuals into a single target audience based on shared interests, politics, values, demographics, and other characteristics. Echo chambers and filter bubbles help to reinforce similar biases and content to the same target audience members. * **Belongs to tactic stage**: TA05 diff --git a/generated_pages/techniques/T0104.003.md b/generated_pages/techniques/T0104.003.md index 2c305b1..b9a68db 100644 --- a/generated_pages/techniques/T0104.003.md +++ b/generated_pages/techniques/T0104.003.md @@ -1,6 +1,6 @@ # Technique T0104.003: Private/Closed Social Networks -* **Summary**: An audio livestream refers to an online audio broadcast capability that allows for real-time communication to closed or open networks. Examples include Twitter Spaces, +* **Summary**: An audio livestream refers to an online audio broadcast capability that allows for real-time communication to closed or open networks. Examples include Twitter Spaces, * **Belongs to tactic stage**: TA07 diff --git a/generated_pages/techniques/T0108.md b/generated_pages/techniques/T0108.md index 6d1fc28..679b708 100644 --- a/generated_pages/techniques/T0108.md +++ b/generated_pages/techniques/T0108.md @@ -1,6 +1,6 @@ # Technique T0108: Blogging and Publishing Networks -* **Summary**: Examples include WordPress, Blogger, Weebly, Tumblr, Medium, etc. +* **Summary**: Examples include WordPress, Blogger, Weebly, Tumblr, Medium, etc. * **Belongs to tactic stage**: TA07 diff --git a/generated_pages/techniques/T0109.md b/generated_pages/techniques/T0109.md index 8984ed7..745e0ac 100644 --- a/generated_pages/techniques/T0109.md +++ b/generated_pages/techniques/T0109.md @@ -1,6 +1,6 @@ # Technique T0109: Consumer Review Networks -* **Summary**: Platforms for finding, reviewing, and sharing information about brands, products, services, restaurants, travel destinations, etc. Examples include Yelp, TripAdvisor, etc. +* **Summary**: Platforms for finding, reviewing, and sharing information about brands, products, services, restaurants, travel destinations, etc. Examples include Yelp, TripAdvisor, etc. * **Belongs to tactic stage**: TA07 diff --git a/generated_pages/techniques/T0110.md b/generated_pages/techniques/T0110.md index c963cdb..5e1030a 100644 --- a/generated_pages/techniques/T0110.md +++ b/generated_pages/techniques/T0110.md @@ -1,6 +1,6 @@ # Technique T0110: Formal Diplomatic Channels -* **Summary**: Leveraging formal, traditional, diplomatic channels to communicate with foreign governments (written documents, meetings, summits, diplomatic visits, etc). This type of diplomacy is conducted by diplomats of one nation with diplomats and other officials of another nation or international organization. +* **Summary**: Leveraging formal, traditional, diplomatic channels to communicate with foreign governments (written documents, meetings, summits, diplomatic visits, etc). This type of diplomacy is conducted by diplomats of one nation with diplomats and other officials of another nation or international organisation. * **Belongs to tactic stage**: TA07 diff --git a/generated_pages/techniques/T0113.md b/generated_pages/techniques/T0113.md index ff0418c..2bc3d64 100644 --- a/generated_pages/techniques/T0113.md +++ b/generated_pages/techniques/T0113.md @@ -1,6 +1,6 @@ # Technique T0113: Employ Commercial Analytic Firms -* **Summary**: Commercial analytic firms collect data on target audience activities and evaluate the data to detect trends, such as content receiving high click-rates. An influence operation may employ commercial analytic firms to facilitate external collection on its target audience, complicating attribution efforts and better tailoring the content to audience preferences. +* **Summary**: Commercial analytic firms collect data on target audience activities and evaluate the data to detect trends, such as content receiving high click-rates. An influence operation may employ commercial analytic firms to facilitate external collection on its target audience, complicating attribution efforts and better tailoring the content to audience preferences. * **Belongs to tactic stage**: TA08 diff --git a/generated_pages/techniques/T0115.003.md b/generated_pages/techniques/T0115.003.md index 4c36c05..ecc9819 100644 --- a/generated_pages/techniques/T0115.003.md +++ b/generated_pages/techniques/T0115.003.md @@ -1,6 +1,6 @@ # Technique T0115.003: One-Way Direct Posting -* **Summary**: Direct posting refers to a method of posting content via a one-way messaging service, where the recipient cannot directly respond to the poster’s messaging. An influence operation may post directly to promote operation narratives to the target audience without allowing opportunities for fact-checking or disagreement, creating a false sense of support for the narrative. +* **Summary**: Direct posting refers to a method of posting content via a one-way messaging service, where the recipient cannot directly respond to the poster’s messaging. An influence operation may post directly to promote operation narratives to the target audience without allowing opportunities for fact-checking or disagreement, creating a false sense of support for the narrative. * **Belongs to tactic stage**: TA09 diff --git a/generated_pages/techniques/T0115.md b/generated_pages/techniques/T0115.md index df570c3..83f772c 100644 --- a/generated_pages/techniques/T0115.md +++ b/generated_pages/techniques/T0115.md @@ -1,6 +1,6 @@ # Technique T0115: Post Content -* **Summary**: Delivering content by posting via owned media (assets that the operator controls). +* **Summary**: Delivering content by posting via owned media (assets that the operator controls). * **Belongs to tactic stage**: TA09 diff --git a/generated_pages/techniques/T0116.md b/generated_pages/techniques/T0116.md index c17af30..a2166fb 100644 --- a/generated_pages/techniques/T0116.md +++ b/generated_pages/techniques/T0116.md @@ -1,6 +1,6 @@ # Technique T0116: Comment or Reply on Content -* **Summary**: Delivering content by replying or commenting via owned media (assets that the operator controls). +* **Summary**: Delivering content by replying or commenting via owned media (assets that the operator controls). * **Belongs to tactic stage**: TA09 diff --git a/generated_pages/techniques/T0118.md b/generated_pages/techniques/T0118.md index f93295a..4a50d29 100644 --- a/generated_pages/techniques/T0118.md +++ b/generated_pages/techniques/T0118.md @@ -1,6 +1,6 @@ # Technique T0118: Amplify Existing Narrative -* **Summary**: An influence operation may amplify existing narratives that align with its narratives to support operation objectives. +* **Summary**: An influence operation may amplify existing narratives that align with its narratives to support operation objectives. * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0119.001.md b/generated_pages/techniques/T0119.001.md index 0911d8c..5ab34a2 100644 --- a/generated_pages/techniques/T0119.001.md +++ b/generated_pages/techniques/T0119.001.md @@ -1,6 +1,6 @@ # Technique T0119.001: Post across Groups -* **Summary**: An influence operation may post content across groups to spread narratives and content to new communities within the target audiences or to new target audiences. +* **Summary**: An influence operation may post content across groups to spread narratives and content to new communities within the target audiences or to new target audiences. * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0119.002.md b/generated_pages/techniques/T0119.002.md index 5a0129e..d7849e8 100644 --- a/generated_pages/techniques/T0119.002.md +++ b/generated_pages/techniques/T0119.002.md @@ -1,6 +1,6 @@ # Technique T0119.002: Post across Platform -* **Summary**: An influence operation may post content across platforms to spread narratives and content to new communities within the target audiences or to new target audiences. Posting across platforms can also remove opposition and context, helping the narrative spread with less opposition on the cross-posted platform. +* **Summary**: An influence operation may post content across platforms to spread narratives and content to new communities within the target audiences or to new target audiences. Posting across platforms can also remove opposition and context, helping the narrative spread with less opposition on the cross-posted platform. * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0119.md b/generated_pages/techniques/T0119.md index cf5df93..0c3a8ca 100644 --- a/generated_pages/techniques/T0119.md +++ b/generated_pages/techniques/T0119.md @@ -1,6 +1,6 @@ # Technique T0119: Cross-Posting -* **Summary**: Cross-posting refers to posting the same message to multiple internet discussions, social media platforms or accounts, or news groups at one time. An influence operation may post content online in multiple communities and platforms to increase the chances of content exposure to the target audience. +* **Summary**: Cross-posting refers to posting the same message to multiple internet discussions, social media platforms or accounts, or news groups at one time. An influence operation may post content online in multiple communities and platforms to increase the chances of content exposure to the target audience. * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0120.001.md b/generated_pages/techniques/T0120.001.md index b78500d..b67424e 100644 --- a/generated_pages/techniques/T0120.001.md +++ b/generated_pages/techniques/T0120.001.md @@ -1,6 +1,6 @@ -# Technique T0120.001: Use Affiliate Marketing Programs +# Technique T0120.001: Use Affiliate Marketing Programmes -* **Summary**: Use Affiliate Marketing Programs +* **Summary**: Use Affiliate Marketing Programmes * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0120.md b/generated_pages/techniques/T0120.md index 075b3af..1b22404 100644 --- a/generated_pages/techniques/T0120.md +++ b/generated_pages/techniques/T0120.md @@ -1,6 +1,6 @@ # Technique T0120: Incentivize Sharing -* **Summary**: Incentivizing content sharing refers to actions that encourage users to share content themselves, reducing the need for the operation itself to post and promote its own content. +* **Summary**: Incentivizing content sharing refers to actions that encourage users to share content themselves, reducing the need for the operation itself to post and promote its own content. * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0121.001.md b/generated_pages/techniques/T0121.001.md index eedb1a5..2c47c1a 100644 --- a/generated_pages/techniques/T0121.001.md +++ b/generated_pages/techniques/T0121.001.md @@ -1,12 +1,6 @@ # Technique T0121.001: Bypass Content Blocking -* **Summary**: Bypassing content blocking refers to actions taken to circumvent network security measures that prevent users from accessing certain servers, resources, or other online spheres. An influence operation may bypass content blocking to proliferate its content on restricted areas of the internet. Common strategies for bypassing content blocking include: -- Altering IP addresses to avoid IP filtering -- Using a Virtual Private Network (VPN) to avoid IP filtering -- Using a Content Delivery Network (CDN) to avoid IP filtering -- Enabling encryption to bypass packet inspection blocking -- Manipulating text to avoid filtering by keywords -- Posting content on multiple platforms to avoid platform-specific removals - Using local facilities or modified DNS servers to avoid DNS filtering +* **Summary**: Bypassing content blocking refers to actions taken to circumvent network security measures that prevent users from accessing certain servers, resources, or other online spheres. An influence operation may bypass content blocking to proliferate its content on restricted areas of the internet. Common strategies for bypassing content blocking include: - Altering IP addresses to avoid IP filtering - Using a Virtual Private Network (VPN) to avoid IP filtering - Using a Content Delivery Network (CDN) to avoid IP filtering - Enabling encryption to bypass packet inspection blocking - Manipulating text to avoid filtering by keywords - Posting content on multiple platforms to avoid platform-specific removals - Using local facilities or modified DNS servers to avoid DNS filtering * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0121.md b/generated_pages/techniques/T0121.md index a0196db..dad314f 100644 --- a/generated_pages/techniques/T0121.md +++ b/generated_pages/techniques/T0121.md @@ -1,6 +1,6 @@ # Technique T0121: Manipulate Platform Algorithm -* **Summary**: Manipulating a platform algorithm refers to conducting activity on a platform in a way that intentionally targets its underlying algorithm. After analyzing a platform’s algorithm (see: Select Platforms), an influence operation may use a platform in a way that increases its content exposure, avoids content removal, or otherwise benefits the operation’s strategy. For example, an influence operation may use bots to amplify its posts so that the platform’s algorithm recognizes engagement with operation content and further promotes the content on user timelines. +* **Summary**: Manipulating a platform algorithm refers to conducting activity on a platform in a way that intentionally targets its underlying algorithm. After analysing a platform’s algorithm (see: Select Platforms), an influence operation may use a platform in a way that increases its content exposure, avoids content removal, or otherwise benefits the operation’s strategy. For example, an influence operation may use bots to amplify its posts so that the platform’s algorithm recognises engagement with operation content and further promotes the content on user timelines. * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0122.md b/generated_pages/techniques/T0122.md index d62f51f..936e8d1 100644 --- a/generated_pages/techniques/T0122.md +++ b/generated_pages/techniques/T0122.md @@ -1,6 +1,6 @@ # Technique T0122: Direct Users to Alternative Platforms -* **Summary**: Direct users to alternative platforms refers to encouraging users to move from the platform on which they initially viewed operation content and engage with content on alternate information channels, including separate social media channels and inauthentic websites. An operation may drive users to alternative platforms to diversify its information channels and ensure the target audience knows where to access operation content if the initial platform suspends, flags, or otherwise removes original operation assets and content. +* **Summary**: Direct users to alternative platforms refers to encouraging users to move from the platform on which they initially viewed operation content and engage with content on alternate information channels, including separate social media channels and inauthentic websites. An operation may drive users to alternative platforms to diversify its information channels and ensure the target audience knows where to access operation content if the initial platform suspends, flags, or otherwise removes original operation assets and content. * **Belongs to tactic stage**: TA17 diff --git a/generated_pages/techniques/T0123.001.md b/generated_pages/techniques/T0123.001.md index bedac35..6910d7f 100644 --- a/generated_pages/techniques/T0123.001.md +++ b/generated_pages/techniques/T0123.001.md @@ -1,6 +1,6 @@ # Technique T0123.001: Delete Opposing Content -* **Summary**: Deleting opposing content refers to the removal of content that conflicts with operational narratives from selected platforms. An influence operation may delete opposing content to censor contradictory information from the target audience, allowing operation narratives to take priority in the information space. +* **Summary**: Deleting opposing content refers to the removal of content that conflicts with operational narratives from selected platforms. An influence operation may delete opposing content to censor contradictory information from the target audience, allowing operation narratives to take priority in the information space. * **Belongs to tactic stage**: TA18 diff --git a/generated_pages/techniques/T0123.002.md b/generated_pages/techniques/T0123.002.md index 2900bbe..604a93d 100644 --- a/generated_pages/techniques/T0123.002.md +++ b/generated_pages/techniques/T0123.002.md @@ -1,6 +1,6 @@ # Technique T0123.002: Block Content -* **Summary**: Content blocking refers to actions taken to restrict internet access or render certain areas of the internet inaccessible. An influence operation may restrict content based on both network and content attributes. +* **Summary**: Content blocking refers to actions taken to restrict internet access or render certain areas of the internet inaccessible. An influence operation may restrict content based on both network and content attributes. * **Belongs to tactic stage**: TA18 diff --git a/generated_pages/techniques/T0123.003.md b/generated_pages/techniques/T0123.003.md index 1431f29..a8b0104 100644 --- a/generated_pages/techniques/T0123.003.md +++ b/generated_pages/techniques/T0123.003.md @@ -1,6 +1,6 @@ # Technique T0123.003: Destroy Information Generation Capabilities -* **Summary**: Destroying information generation capabilities refers to actions taken to limit, degrade, or otherwise incapacitate an actor’s ability to generate conflicting information. An influence operation may destroy an actor’s information generation capabilities by physically dismantling the information infrastructure, disconnecting resources needed for information generation, or redirecting information generation personnel. An operation may destroy an adversary’s information generation capabilities to limit conflicting content exposure to the target audience and crowd the information space with its own narratives. +* **Summary**: Destroying information generation capabilities refers to actions taken to limit, degrade, or otherwise incapacitate an actor’s ability to generate conflicting information. An influence operation may destroy an actor’s information generation capabilities by physically dismantling the information infrastructure, disconnecting resources needed for information generation, or redirecting information generation personnel. An operation may destroy an adversary’s information generation capabilities to limit conflicting content exposure to the target audience and crowd the information space with its own narratives. * **Belongs to tactic stage**: TA18 diff --git a/generated_pages/techniques/T0123.004.md b/generated_pages/techniques/T0123.004.md index e535930..5c6177d 100644 --- a/generated_pages/techniques/T0123.004.md +++ b/generated_pages/techniques/T0123.004.md @@ -1,6 +1,6 @@ # Technique T0123.004: Conduct Server Redirect -* **Summary**: A server redirect, also known as a URL redirect, occurs when a server automatically forwards a user from one URL to another using server-side scripting languages. An influence operation may conduct a server redirect to divert target audience members from one website to another without their knowledge. The redirected website may pose as a legitimate source, host malware, or otherwise aid operation objectives. +* **Summary**: A server redirect, also known as a URL redirect, occurs when a server automatically forwards a user from one URL to another using server-side scripting languages. An influence operation may conduct a server redirect to divert target audience members from one website to another without their knowledge. The redirected website may pose as a legitimate source, host malware, or otherwise aid operation objectives. * **Belongs to tactic stage**: TA18 diff --git a/generated_pages/techniques/T0123.md b/generated_pages/techniques/T0123.md index 4f9c1bd..9b64334 100644 --- a/generated_pages/techniques/T0123.md +++ b/generated_pages/techniques/T0123.md @@ -1,6 +1,6 @@ # Technique T0123: Control Information Environment through Offensive Cyberspace Operations -* **Summary**: Controlling the information environment through offensive cyberspace operations uses cyber tools and techniques to alter the trajectory of content in the information space to either prioritize operation messaging or block opposition messaging. +* **Summary**: Controlling the information environment through offensive cyberspace operations uses cyber tools and techniques to alter the trajectory of content in the information space to either prioritise operation messaging or block opposition messaging. * **Belongs to tactic stage**: TA18 diff --git a/generated_pages/techniques/T0124.001.md b/generated_pages/techniques/T0124.001.md index 5ad8e72..a50a8ed 100644 --- a/generated_pages/techniques/T0124.001.md +++ b/generated_pages/techniques/T0124.001.md @@ -1,6 +1,6 @@ # Technique T0124.001: Report Non-Violative Opposing Content -* **Summary**: Reporting opposing content refers to notifying and providing an instance of a violation of a platform’s guidelines and policies for conduct on the platform. In addition to simply reporting the content, an operation may leverage copyright regulations to trick social media and web platforms into removing opposing content by manipulating the content to appear in violation of copyright laws. Reporting opposing content facilitates the suppression of contradictory information and allows operation narratives to take priority in the information space. +* **Summary**: Reporting opposing content refers to notifying and providing an instance of a violation of a platform’s guidelines and policies for conduct on the platform. In addition to simply reporting the content, an operation may leverage copyright regulations to trick social media and web platforms into removing opposing content by manipulating the content to appear in violation of copyright laws. Reporting opposing content facilitates the suppression of contradictory information and allows operation narratives to take priority in the information space. * **Belongs to tactic stage**: TA18 diff --git a/generated_pages/techniques/T0124.002.md b/generated_pages/techniques/T0124.002.md index f815929..f4119e8 100644 --- a/generated_pages/techniques/T0124.002.md +++ b/generated_pages/techniques/T0124.002.md @@ -1,6 +1,6 @@ # Technique T0124.002: Goad People into Harmful Action (Stop Hitting Yourself) -* **Summary**: Goad people into actions that violate terms of service or will lead to having their content or accounts taken down. +* **Summary**: Goad people into actions that violate terms of service or will lead to having their content or accounts taken down. * **Belongs to tactic stage**: TA18 diff --git a/generated_pages/techniques/T0124.md b/generated_pages/techniques/T0124.md index 559708e..9af1137 100644 --- a/generated_pages/techniques/T0124.md +++ b/generated_pages/techniques/T0124.md @@ -1,6 +1,6 @@ # Technique T0124: Suppress Opposition -* **Summary**: Operators can suppress the opposition by exploiting platform content moderation tools and processes like reporting non-violative content to platforms for takedown and goading opposition actors into taking actions that result in platform action or target audience disapproval. +* **Summary**: Operators can suppress the opposition by exploiting platform content moderation tools and processes like reporting non-violative content to platforms for takedown and goading opposition actors into taking actions that result in platform action or target audience disapproval. * **Belongs to tactic stage**: TA18 diff --git a/generated_pages/techniques/T0127.001.md b/generated_pages/techniques/T0127.001.md index a4f2177..b8a6edf 100644 --- a/generated_pages/techniques/T0127.001.md +++ b/generated_pages/techniques/T0127.001.md @@ -1,6 +1,6 @@ # Technique T0127.001: Conduct Physical Violence -* **Summary**: An influence operation may directly Conduct Physical Violence to achieve campaign goals. +* **Summary**: An influence operation may directly Conduct Physical Violence to achieve campaign goals. * **Belongs to tactic stage**: TA10 diff --git a/generated_pages/techniques/T0127.002.md b/generated_pages/techniques/T0127.002.md index a143db6..c6dc8fa 100644 --- a/generated_pages/techniques/T0127.002.md +++ b/generated_pages/techniques/T0127.002.md @@ -1,6 +1,6 @@ # Technique T0127.002: Encourage Physical Violence -* **Summary**: An influence operation may Encourage others to engage in Physical Violence to achieve campaign goals. +* **Summary**: An influence operation may Encourage others to engage in Physical Violence to achieve campaign goals. * **Belongs to tactic stage**: TA10 diff --git a/generated_pages/techniques/T0127.md b/generated_pages/techniques/T0127.md index 035d055..79008f2 100644 --- a/generated_pages/techniques/T0127.md +++ b/generated_pages/techniques/T0127.md @@ -1,6 +1,6 @@ # Technique T0127: Physical Violence -* **Summary**: Physical violence refers to the use of force to injure, abuse, damage, or destroy. An influence operation may conduct or encourage physical violence to discourage opponents from promoting conflicting content or draw attention to operation narratives using shock value. +* **Summary**: Physical violence refers to the use of force to injure, abuse, damage, or destroy. An influence operation may conduct or encourage physical violence to discourage opponents from promoting conflicting content or draw attention to operation narratives using shock value. * **Belongs to tactic stage**: TA10 diff --git a/generated_pages/techniques/T0128.001.md b/generated_pages/techniques/T0128.001.md index a728815..0fd2d34 100644 --- a/generated_pages/techniques/T0128.001.md +++ b/generated_pages/techniques/T0128.001.md @@ -1,6 +1,6 @@ # Technique T0128.001: Use Pseudonyms -* **Summary**: An operation may use pseudonyms, or fake names, to mask the identity of operation accounts, publish anonymous content, or otherwise use falsified personas to conceal identity of the operation. An operation may coordinate pseudonyms across multiple platforms, for example, by writing an article under a pseudonym and then posting a link to the article on social media on an account with the same falsified name. +* **Summary**: An operation may use pseudonyms, or fake names, to mask the identity of operation accounts, publish anonymous content, or otherwise use falsified personas to conceal identity of the operation. An operation may coordinate pseudonyms across multiple platforms, for example, by writing an article under a pseudonym and then posting a link to the article on social media on an account with the same falsified name. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0128.002.md b/generated_pages/techniques/T0128.002.md index 10a456c..11602f0 100644 --- a/generated_pages/techniques/T0128.002.md +++ b/generated_pages/techniques/T0128.002.md @@ -1,6 +1,6 @@ # Technique T0128.002: Conceal Network Identity -* **Summary**: Concealing network identity aims to hide the existence an influence operation’s network completely. Unlike concealing sponsorship, concealing network identity denies the existence of any sort of organization. +* **Summary**: Concealing network identity aims to hide the existence an influence operation’s network completely. Unlike concealing sponsorship, concealing network identity denies the existence of any sort of organisation. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0128.003.md b/generated_pages/techniques/T0128.003.md index 5d387c7..a075c64 100644 --- a/generated_pages/techniques/T0128.003.md +++ b/generated_pages/techniques/T0128.003.md @@ -1,6 +1,6 @@ # Technique T0128.003: Distance Reputable Individuals from Operation -* **Summary**: Distancing reputable individuals from the operation occurs when enlisted individuals, such as celebrities or subject matter experts, actively disengage themselves from operation activities and messaging. Individuals may distance themselves from the operation by deleting old posts or statements, unfollowing operation information assets, or otherwise detaching themselves from the operation’s timeline. An influence operation may want reputable individuals to distance themselves from the operation to reduce operation exposure, particularly if the operation aims to remove all evidence. +* **Summary**: Distancing reputable individuals from the operation occurs when enlisted individuals, such as celebrities or subject matter experts, actively disengage themselves from operation activities and messaging. Individuals may distance themselves from the operation by deleting old posts or statements, unfollowing operation information assets, or otherwise detaching themselves from the operation’s timeline. An influence operation may want reputable individuals to distance themselves from the operation to reduce operation exposure, particularly if the operation aims to remove all evidence. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0128.004.md b/generated_pages/techniques/T0128.004.md index 47e97db..68cac93 100644 --- a/generated_pages/techniques/T0128.004.md +++ b/generated_pages/techniques/T0128.004.md @@ -1,6 +1,6 @@ # Technique T0128.004: Launder Accounts -* **Summary**: Account laundering occurs when an influence operation acquires control of previously legitimate online accounts from third parties through sale or exchange and often in contravention of terms of use. Influence operations use laundered accounts to reach target audience members from an existing information channel and complicate attribution. +* **Summary**: Account laundering occurs when an influence operation acquires control of previously legitimate online accounts from third parties through sale or exchange and often in contravention of terms of use. Influence operations use laundered accounts to reach target audience members from an existing information channel and complicate attribution. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0128.005.md b/generated_pages/techniques/T0128.005.md index 7e30b10..442b712 100644 --- a/generated_pages/techniques/T0128.005.md +++ b/generated_pages/techniques/T0128.005.md @@ -1,6 +1,6 @@ # Technique T0128.005: Change Names of Accounts -* **Summary**: Changing names of accounts occurs when an operation changes the name of an existing social media account. An operation may change the names of its accounts throughout an operation to avoid detection or alter the names of newly acquired or repurposed accounts to fit operational narratives. +* **Summary**: Changing names of accounts occurs when an operation changes the name of an existing social media account. An operation may change the names of its accounts throughout an operation to avoid detection or alter the names of newly acquired or repurposed accounts to fit operational narratives. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0129.001.md b/generated_pages/techniques/T0129.001.md index 83c268f..1075049 100644 --- a/generated_pages/techniques/T0129.001.md +++ b/generated_pages/techniques/T0129.001.md @@ -1,6 +1,6 @@ # Technique T0129.001: Conceal Network Identity -* **Summary**: Concealing network identity aims to hide the existence an influence operation’s network completely. Unlike concealing sponsorship, concealing network identity denies the existence of any sort of organization. +* **Summary**: Concealing network identity aims to hide the existence an influence operation’s network completely. Unlike concealing sponsorship, concealing network identity denies the existence of any sort of organisation. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0129.002.md b/generated_pages/techniques/T0129.002.md index ad01b55..937b7c0 100644 --- a/generated_pages/techniques/T0129.002.md +++ b/generated_pages/techniques/T0129.002.md @@ -1,6 +1,6 @@ # Technique T0129.002: Generate Content Unrelated to Narrative -* **Summary**: An influence operation may mix its own operation content with legitimate news or external unrelated content to disguise operational objectives, narratives, or existence. For example, an operation may generate "lifestyle" or "cuisine" content alongside regular operation content. +* **Summary**: An influence operation may mix its own operation content with legitimate news or external unrelated content to disguise operational objectives, narratives, or existence. For example, an operation may generate "lifestyle" or "cuisine" content alongside regular operation content. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0129.003.md b/generated_pages/techniques/T0129.003.md index 285fea5..1eee125 100644 --- a/generated_pages/techniques/T0129.003.md +++ b/generated_pages/techniques/T0129.003.md @@ -1,6 +1,6 @@ # Technique T0129.003: Break Association with Content -* **Summary**: Breaking association with content occurs when an influence operation actively separates itself from its own content. An influence operation may break association with content by unfollowing, unliking, or unsharing its content, removing attribution from its content, or otherwise taking actions that distance the operation from its messaging. An influence operation may break association with its content to complicate attribution or regain credibility for a new operation. +* **Summary**: Breaking association with content occurs when an influence operation actively separates itself from its own content. An influence operation may break association with content by unfollowing, unliking, or unsharing its content, removing attribution from its content, or otherwise taking actions that distance the operation from its messaging. An influence operation may break association with its content to complicate attribution or regain credibility for a new operation. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0129.004.md b/generated_pages/techniques/T0129.004.md index 674518d..31f49c7 100644 --- a/generated_pages/techniques/T0129.004.md +++ b/generated_pages/techniques/T0129.004.md @@ -1,6 +1,6 @@ # Technique T0129.004: Delete URLs -* **Summary**: URL deletion occurs when an influence operation completely removes its website registration, rendering the URL inaccessible. An influence operation may delete its URLs to complicate attribution or remove online documentation that the operation ever occurred. +* **Summary**: URL deletion occurs when an influence operation completely removes its website registration, rendering the URL inaccessible. An influence operation may delete its URLs to complicate attribution or remove online documentation that the operation ever occurred. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0129.007.md b/generated_pages/techniques/T0129.007.md index 82e6c6d..03b9708 100644 --- a/generated_pages/techniques/T0129.007.md +++ b/generated_pages/techniques/T0129.007.md @@ -1,6 +1,6 @@ # Technique T0129.007: Delete Accounts/Account Activity -* **Summary**: Deleting accounts and account activity occurs when an influence operation removes its online social media assets, including social media accounts, posts, likes, comments, and other online artifacts. An influence operation may delete its accounts and account activity to complicate attribution or remove online documentation that the operation ever occurred. +* **Summary**: Deleting accounts and account activity occurs when an influence operation removes its online social media assets, including social media accounts, posts, likes, comments, and other online artefacts. An influence operation may delete its accounts and account activity to complicate attribution or remove online documentation that the operation ever occurred. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0129.008.md b/generated_pages/techniques/T0129.008.md index b76c268..db6f75c 100644 --- a/generated_pages/techniques/T0129.008.md +++ b/generated_pages/techniques/T0129.008.md @@ -1,6 +1,6 @@ # Technique T0129.008: Redirect URLs -* **Summary**: An influence operation may redirect its falsified or typosquatted URLs to legitimate websites to increase the operation's appearance of legitimacy, complicate attribution, and avoid detection. +* **Summary**: An influence operation may redirect its falsified or typosquatted URLs to legitimate websites to increase the operation's appearance of legitimacy, complicate attribution, and avoid detection. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0129.009.md b/generated_pages/techniques/T0129.009.md index 6e663f2..8ba019f 100644 --- a/generated_pages/techniques/T0129.009.md +++ b/generated_pages/techniques/T0129.009.md @@ -1,6 +1,6 @@ # Technique T0129.009: Remove Post Origins -* **Summary**: Removing post origins refers to the elimination of evidence that indicates the initial source of operation content, often to complicate attribution. An influence operation may remove post origins by deleting watermarks, renaming files, or removing embedded links in its content. +* **Summary**: Removing post origins refers to the elimination of evidence that indicates the initial source of operation content, often to complicate attribution. An influence operation may remove post origins by deleting watermarks, renaming files, or removing embedded links in its content. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0129.010.md b/generated_pages/techniques/T0129.010.md index b87b46e..0f2c7a5 100644 --- a/generated_pages/techniques/T0129.010.md +++ b/generated_pages/techniques/T0129.010.md @@ -1,6 +1,6 @@ # Technique T0129.010: Misattribute Activity -* **Summary**: Misattributed activity refers to incorrectly attributed operation activity. For example, a state sponsored influence operation may conduct operation activity in a way that mimics another state so that external entities misattribute activity to the incorrect state. An operation may misattribute their activities to complicate attribution, avoid detection, or frame an adversary for negative behavior. +* **Summary**: Misattributed activity refers to incorrectly attributed operation activity. For example, a state sponsored influence operation may conduct operation activity in a way that mimics another state so that external entities misattribute activity to the incorrect state. An operation may misattribute their activities to complicate attribution, avoid detection, or frame an adversary for negative behaviour. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0130.001.md b/generated_pages/techniques/T0130.001.md index 1143d01..196b4d9 100644 --- a/generated_pages/techniques/T0130.001.md +++ b/generated_pages/techniques/T0130.001.md @@ -1,7 +1,6 @@ # Technique T0130.001: Conceal Sponsorship -* **Summary**: Concealing sponsorship aims to mislead or obscure the identity of the hidden sponsor behind an operation rather than entity publicly running the operation. Operations that conceal sponsorship may maintain visible falsified groups, news outlets, non-profits, or other organizations, but seek to mislead or obscure the identity sponsoring, funding, or otherwise supporting these entities. -Influence operations may use a variety of techniques to mask the location of their social media accounts to complicate attribution and conceal evidence of foreign interference. Operation accounts may set their location to a false place, often the location of the operation’s target audience, and post in the region’s language +* **Summary**: Concealing sponsorship aims to mislead or obscure the identity of the hidden sponsor behind an operation rather than entity publicly running the operation. Operations that conceal sponsorship may maintain visible falsified groups, news outlets, non-profits, or other organisations, but seek to mislead or obscure the identity sponsoring, funding, or otherwise supporting these entities. Influence operations may use a variety of techniques to mask the location of their social media accounts to complicate attribution and conceal evidence of foreign interference. Operation accounts may set their location to a false place, often the location of the operation’s target audience, and post in the region’s language * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0130.002.md b/generated_pages/techniques/T0130.002.md index ce8afba..7604d65 100644 --- a/generated_pages/techniques/T0130.002.md +++ b/generated_pages/techniques/T0130.002.md @@ -1,6 +1,6 @@ -# Technique T0130.002: Utilize Bulletproof Hosting +# Technique T0130.002: Utilise Bulletproof Hosting -* **Summary**: Hosting refers to services through which storage and computing resources are provided to an individual or organization for the accommodation and maintenance of one or more websites and related services. Services may include web hosting, file sharing, and email distribution. Bulletproof hosting refers to services provided by an entity, such as a domain hosting or web hosting firm, that allows its customer considerable leniency in use of the service. An influence operation may utilize bulletproof hosting to maintain continuity of service for suspicious, illegal, or disruptive operation activities that stricter hosting services would limit, report, or suspend. +* **Summary**: Hosting refers to services through which storage and computing resources are provided to an individual or organisation for the accommodation and maintenance of one or more websites and related services. Services may include web hosting, file sharing, and email distribution. Bulletproof hosting refers to services provided by an entity, such as a domain hosting or web hosting firm, that allows its customer considerable leniency in use of the service. An influence operation may utilise bulletproof hosting to maintain continuity of service for suspicious, illegal, or disruptive operation activities that stricter hosting services would limit, report, or suspend. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0130.003.md b/generated_pages/techniques/T0130.003.md index b923382..5a73608 100644 --- a/generated_pages/techniques/T0130.003.md +++ b/generated_pages/techniques/T0130.003.md @@ -1,6 +1,6 @@ -# Technique T0130.003: Use Shell Organizations +# Technique T0130.003: Use Shell Organisations -* **Summary**: Use Shell Organizations to conceal sponsorship. +* **Summary**: Use Shell Organisations to conceal sponsorship. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0130.004.md b/generated_pages/techniques/T0130.004.md index 560d999..47fb566 100644 --- a/generated_pages/techniques/T0130.004.md +++ b/generated_pages/techniques/T0130.004.md @@ -1,6 +1,6 @@ # Technique T0130.004: Use Cryptocurrency -* **Summary**: Use Cryptocurrency to conceal sponsorship. Examples include Bitcoin, Monero, and Etherium. +* **Summary**: Use Cryptocurrency to conceal sponsorship. Examples include Bitcoin, Monero, and Etherium. * **Belongs to tactic stage**: TA11 diff --git a/generated_pages/techniques/T0133.001.md b/generated_pages/techniques/T0133.001.md index 72018d2..45b9818 100644 --- a/generated_pages/techniques/T0133.001.md +++ b/generated_pages/techniques/T0133.001.md @@ -1,6 +1,6 @@ -# Technique T0133.001: Behavior Changes +# Technique T0133.001: Behaviour Changes -* **Summary**: Monitor and evaluate behaviour changes from misinformation incidents. +* **Summary**: Monitor and evaluate behaviour changes from misinformation incidents. * **Belongs to tactic stage**: TA12 diff --git a/generated_pages/techniques/T0133.002.md b/generated_pages/techniques/T0133.002.md index c080cd2..867d9aa 100644 --- a/generated_pages/techniques/T0133.002.md +++ b/generated_pages/techniques/T0133.002.md @@ -1,6 +1,6 @@ # Technique T0133.002: Content -* **Summary**: Measure current system state with respect to the effectiveness of campaign content. +* **Summary**: Measure current system state with respect to the effectiveness of campaign content. * **Belongs to tactic stage**: TA12 diff --git a/generated_pages/techniques/T0133.003.md b/generated_pages/techniques/T0133.003.md index 0276888..2bc01f3 100644 --- a/generated_pages/techniques/T0133.003.md +++ b/generated_pages/techniques/T0133.003.md @@ -1,6 +1,6 @@ # Technique T0133.003: Awareness -* **Summary**: Measure current system state with respect to the effectiveness of influencing awareness. +* **Summary**: Measure current system state with respect to the effectiveness of influencing awareness. * **Belongs to tactic stage**: TA12 diff --git a/generated_pages/techniques/T0133.004.md b/generated_pages/techniques/T0133.004.md index a01e431..e045986 100644 --- a/generated_pages/techniques/T0133.004.md +++ b/generated_pages/techniques/T0133.004.md @@ -1,6 +1,6 @@ # Technique T0133.004: Knowledge -* **Summary**: Measure current system state with respect to the effectiveness of influencing knowledge. +* **Summary**: Measure current system state with respect to the effectiveness of influencing knowledge. * **Belongs to tactic stage**: TA12 diff --git a/generated_pages/techniques/T0133.005.md b/generated_pages/techniques/T0133.005.md index 12ed204..87bd8c6 100644 --- a/generated_pages/techniques/T0133.005.md +++ b/generated_pages/techniques/T0133.005.md @@ -1,6 +1,6 @@ # Technique T0133.005: Action/Attitude -* **Summary**: Measure current system state with respect to the effectiveness of influencing action/attitude. +* **Summary**: Measure current system state with respect to the effectiveness of influencing action/attitude. * **Belongs to tactic stage**: TA12 diff --git a/generated_pages/techniques/T0134.001.md b/generated_pages/techniques/T0134.001.md index 6d116af..189ced3 100644 --- a/generated_pages/techniques/T0134.001.md +++ b/generated_pages/techniques/T0134.001.md @@ -1,6 +1,6 @@ # Technique T0134.001: Message Reach -* **Summary**: Monitor and evaluate message reach in misinformation incidents. +* **Summary**: Monitor and evaluate message reach in misinformation incidents. * **Belongs to tactic stage**: TA12 diff --git a/generated_pages/techniques_index.md b/generated_pages/techniques_index.md index b06a859..331bf13 100644 --- a/generated_pages/techniques_index.md +++ b/generated_pages/techniques_index.md @@ -10,19 +10,19 @@ T0002 Facilitate State Propaganda -Organize citizens around pro-state messaging. Coordinate paid or volunteer groups to push state propaganda. +Organise citizens around pro-state messaging. Coordinate paid or volunteer groups to push state propaganda. TA02 T0003 Leverage Existing Narratives -Use or adapt existing narrative themes, where narratives are the baseline stories of a target audience. Narratives form the bedrock of our worldviews. New information is understood through a process firmly grounded in this bedrock. If new information is not consitent with the prevailing narratives of an audience, it will be ignored. Effective campaigns will frame their misinformation in the context of these narratives. Highly effective campaigns will make extensive use of audience-appropriate archetypes and meta-narratives throughout their content creation and amplifiction practices. +Use or adapt existing narrative themes, where narratives are the baseline stories of a target audience. Narratives form the bedrock of our worldviews. New information is understood through a process firmly grounded in this bedrock. If new information is not consitent with the prevailing narratives of an audience, it will be ignored. Effective campaigns will frame their misinformation in the context of these narratives. Highly effective campaigns will make extensive use of audience-appropriate archetypes and meta-narratives throughout their content creation and amplifiction practices. TA14 T0004 Develop Competing Narratives -Advance competing narratives connected to same issue ie: on one hand deny incident while at same time expresses dismiss. Suppressing or discouraging narratives already spreading requires an alternative. The most simple set of narrative techniques in response would be the construction and promotion of contradictory alternatives centered on denial, deflection, dismissal, counter-charges, excessive standards of proof, bias in prohibition or enforcement, and so on. These competing narratives allow loyalists cover, but are less compelling to opponents and fence-sitters than campaigns built around existing narratives or highly explanatory master narratives. Competing narratives, as such, are especially useful in the "firehose of misinformation" approach. +Advance competing narratives connected to same issue ie: on one hand deny incident while at same time expresses dismiss. Suppressing or discouraging narratives already spreading requires an alternative. The most simple set of narrative techniques in response would be the construction and promotion of contradictory alternatives centred on denial, deflection, dismissal, counter-charges, excessive standards of proof, bias in prohibition or enforcement, and so on. These competing narratives allow loyalists cover, but are less compelling to opponents and fence-sitters than campaigns built around existing narratives or highly explanatory master narratives. Competing narratives, as such, are especially useful in the "firehose of misinformation" approach. TA14 @@ -34,13 +34,13 @@ T0009 Create Fake Experts -Stories planted or promoted in computational propaganda operations often make use of experts fabricated from whole cloth, sometimes specifically for the story itself. +Stories planted or promoted in computational propaganda operations often make use of experts fabricated from whole cloth, sometimes specifically for the story itself. TA16 T0009.001 -Utilize Academic/Pseudoscientific Justifications -Utilize Academic/Pseudoscientific Justifications +Utilise Academic/Pseudoscientific Justifications +Utilise Academic/Pseudoscientific Justifications TA16 @@ -58,43 +58,43 @@ T0013 Create Inauthentic Websites -Create media assets to support inauthentic organizations (e.g. think tank), people (e.g. experts) and/or serve as sites to distribute malware/launch phishing operations. +Create media assets to support inauthentic organisations (e.g. think tank), people (e.g. experts) and/or serve as sites to distribute malware/launch phishing operations. TA15 T0014 Prepare Fundraising Campaigns -Fundraising campaigns refer to an influence operation’s systematic effort to seek financial support for a charity, cause, or other enterprise using online activities that further promote operation information pathways while raising a profit. Many influence operations have engaged in crowdfunding services on platforms including Tipee, Patreon, and GoFundMe. An operation may use its previously prepared fundraising campaigns (see: Develop Information Pathways) to promote operation messaging while raising money to support its activities. +Fundraising campaigns refer to an influence operation’s systematic effort to seek financial support for a charity, cause, or other enterprise using online activities that further promote operation information pathways while raising a profit. Many influence operations have engaged in crowdfunding services on platforms including Tipee, Patreon, and GoFundMe. An operation may use its previously prepared fundraising campaigns (see: Develop Information Pathways) to promote operation messaging while raising money to support its activities. TA15 T0014.001 Raise Funds from Malign Actors -Raising funds from malign actors may include contributions from foreign agents, cutouts or proxies, shell companies, dark money groups, etc. +Raising funds from malign actors may include contributions from foreign agents, cutouts or proxies, shell companies, dark money groups, etc. TA15 T0014.002 Raise Funds from Ignorant Agents -Raising funds from ignorant agents may include scams, donations intended for one stated purpose but then used for another, etc. +Raising funds from ignorant agents may include scams, donations intended for one stated purpose but then used for another, etc. TA15 T0015 -Create Hashtags and Search Artifacts -Create one or more hashtags and/or hashtag groups. Many incident-based campaigns will create hashtags to promote their fabricated event. Creating a hashtag for an incident can have two important effects: 1. Create a perception of reality around an event. Certainly only "real" events would be discussed in a hashtag. After all, the event has a name!, and 2. Publicize the story more widely through trending lists and search behavior. Asset needed to direct/control/manage "conversation" connected to launching new incident/campaign with new hashtag for applicable social media sites). +Create Hashtags and Search Artefacts +Create one or more hashtags and/or hashtag groups. Many incident-based campaigns will create hashtags to promote their fabricated event. Creating a hashtag for an incident can have two important effects: 1. Create a perception of reality around an event. Certainly only "real" events would be discussed in a hashtag. After all, the event has a name!, and 2. Publicise the story more widely through trending lists and search behaviour. Asset needed to direct/control/manage "conversation" connected to launching new incident/campaign with new hashtag for applicable social media sites). TA06 T0016 Create Clickbait -Create attention grabbing headlines (outrage, doubt, humor) required to drive traffic & engagement. This is a key asset. +Create attention grabbing headlines (outrage, doubt, humour) required to drive traffic & engagement. This is a key asset. TA05 T0017 Conduct Fundraising -Fundraising campaigns refer to an influence operation’s systematic effort to seek financial support for a charity, cause, or other enterprise using online activities that further promote operation information pathways while raising a profit. Many influence operations have engaged in crowdfunding services166 on platforms including Tipee, Patreon, and GoFundMe. An operation may use its previously prepared fundraising campaigns to promote operation messaging while raising money to support its activities. +Fundraising campaigns refer to an influence operation’s systematic effort to seek financial support for a charity, cause, or other enterprise using online activities that further promote operation information pathways while raising a profit. Many influence operations have engaged in crowdfunding services166 on platforms including Tipee, Patreon, and GoFundMe. An operation may use its previously prepared fundraising campaigns to promote operation messaging while raising money to support its activities. TA10 @@ -136,19 +136,19 @@ T0022 Leverage Conspiracy Theory Narratives -"Conspiracy narratives" appeal to the human desire for explanatory order, by invoking the participation of poweful (often sinister) actors in pursuit of their own political goals. These narratives are especially appealing when an audience is low-information, marginalized or otherwise inclined to reject the prevailing explanation. Conspiracy narratives are an important component of the "firehose of falsehoods" model. +"Conspiracy narratives" appeal to the human desire for explanatory order, by invoking the participation of poweful (often sinister) actors in pursuit of their own political goals. These narratives are especially appealing when an audience is low-information, marginalised or otherwise inclined to reject the prevailing explanation. Conspiracy narratives are an important component of the "firehose of falsehoods" model. TA14 T0022.001 Amplify Existing Conspiracy Theory Narratives -An influence operation may amplify an existing conspiracy theory narrative that aligns with its incident or campaign goals. By amplifying existing conspiracy theory narratives, operators can leverage the power of the existing communities that support and propagate those theories without needing to expend resources creating new narratives or building momentum and buy in around new narratives. +An influence operation may amplify an existing conspiracy theory narrative that aligns with its incident or campaign goals. By amplifying existing conspiracy theory narratives, operators can leverage the power of the existing communities that support and propagate those theories without needing to expend resources creating new narratives or building momentum and buy in around new narratives. TA14 T0022.002 Develop Original Conspiracy Theory Narratives -While this requires more resources than amplifying existing conspiracy theory narratives, an influence operation may develop original conspiracy theory narratives in order to achieve greater control and alignment over the narrative and their campaign goals. Prominent examples include the USSR's Operation INFEKTION disinformation campaign run by the KGB in the 1980s to plant the idea that the United States had invented HIV/AIDS as part of a biological weapons research project at Fort Detrick, Maryland. More recently, Fort Detrick featured prominently in a new conspiracy theory narratives around the origins of the COVID-19 outbreak and pandemic. +While this requires more resources than amplifying existing conspiracy theory narratives, an influence operation may develop original conspiracy theory narratives in order to achieve greater control and alignment over the narrative and their campaign goals. Prominent examples include the USSR's Operation INFEKTION disinformation campaign run by the KGB in the 1980s to plant the idea that the United States had invented HIV/AIDS as part of a biological weapons research project at Fort Detrick, Maryland. More recently, Fort Detrick featured prominently in a new conspiracy theory narratives around the origins of the COVID-19 outbreak and pandemic. TA14 @@ -160,13 +160,13 @@ T0023.001 Reframe Context -Reframing context refers to removing an event from its surrounding context to distort its intended meaning. Rather than deny that an event occurred, reframing context frames an event in a manner that may lead the target audience to draw a different conclusion about its intentions. +Reframing context refers to removing an event from its surrounding context to distort its intended meaning. Rather than deny that an event occurred, reframing context frames an event in a manner that may lead the target audience to draw a different conclusion about its intentions. TA06 T0023.002 Edit Open-Source Content -An influence operation may edit open-source content, such as collaborative blogs or encyclopedias, to promote its narratives on outlets with existing credibility and audiences. Editing open-source content may allow an operation to post content on platforms without dedicating resources to the creation and maintenance of its own assets. +An influence operation may edit open-source content, such as collaborative blogs or encyclopaedias, to promote its narratives on outlets with existing credibility and audiences. Editing open-source content may allow an operation to post content on platforms without dedicating resources to the creation and maintenance of its own assets. TA06 @@ -176,9 +176,9 @@ TA07 -T0039 +T0039 Bait Legitimate Influencers -Credibility in a social media environment is often a function of the size of a user's network. "Influencers" are so-called because of their reach, typically understood as: 1) the size of their network (i.e. the number of followers, perhaps weighted by their own influence); and 2) The rate at which their comments are re-circulated (these two metrics are related). Add traditional media players at all levels of credibility and professionalism to this, and the number of potential influencial carriers available for unwitting amplification becomes substantial. By targeting high-influence people and organizations in all types of media with narratives and content engineered to appeal their emotional or ideological drivers, influence campaigns are able to add perceived credibility to their messaging via saturation and adoption by trusted agents such as celebrities, journalists and local leaders. +Credibility in a social media environment is often a function of the size of a user's network. "Influencers" are so-called because of their reach, typically understood as: 1) the size of their network (i.e. the number of followers, perhaps weighted by their own influence); and 2) The rate at which their comments are re-circulated (these two metrics are related). Add traditional media players at all levels of credibility and professionalism to this, and the number of potential influencial carriers available for unwitting amplification becomes substantial. By targeting high-influence people and organisations in all types of media with narratives and content engineered to appeal their emotional or ideological drivers, influence campaigns are able to add perceived credibility to their messaging via saturation and adoption by trusted agents such as celebrities, journalists and local leaders. TA08 @@ -214,7 +214,7 @@ T0044 Seed Distortions -Try a wide variety of messages in the early hours surrounding an incident or event, to give a misleading account or impression. +Try a wide variety of messages in the early hours surrounding an incident or event, to give a misleading account or impression. TA08 @@ -225,8 +225,8 @@ T0046 -Use Search Engine Optimization -Manipulate content engagement metrics (ie: Reddit & Twitter) to influence/impact news search results (e.g. Google), also elevates RT & Sputnik headline into Google news alert emails. aka "Black-hat SEO" +Use Search Engine Optimisation +Manipulate content engagement metrics (ie: Reddit & Twitter) to influence/impact news search results (e.g. Google), also elevates RT & Sputnik headline into Google news alert emails. aka "Black-hat SEO" TA08 @@ -238,13 +238,13 @@ T0048 Harass -Threatening or harassing believers of opposing narratives refers to the use of intimidation techniques, including cyberbullying and doxing, to discourage opponents from voicing their dissent. An influence operation may threaten or harass believers of the opposing narratives to deter individuals from posting or proliferating conflicting content. +Threatening or harassing believers of opposing narratives refers to the use of intimidation techniques, including cyberbullying and doxing, to discourage opponents from voicing their dissent. An influence operation may threaten or harass believers of the opposing narratives to deter individuals from posting or proliferating conflicting content. TA18 T0048.001 Boycott/"Cancel" Opponents -Cancel culture refers to the phenomenon in which individuals collectively refrain from supporting an individual, organization, business, or other entity, usually following a real or falsified controversy. An influence operation may exploit cancel culture by emphasizing an adversary’s problematic or disputed behavior and presenting its own content as an alternative. +Cancel culture refers to the phenomenon in which individuals collectively refrain from supporting an individual, organisation, business, or other entity, usually following a real or falsified controversy. An influence operation may exploit cancel culture by emphasising an adversary’s problematic or disputed behaviour and presenting its own content as an alternative. TA18 @@ -256,13 +256,13 @@ T0048.003 Threaten to Dox -Doxing refers to online harassment in which individuals publicly release private information about another individual, including names, addresses, employment information, pictures, family members, and other sensitive information. An influence operation may dox its opposition to encourage individuals aligned with operation narratives to harass the doxed individuals themselves or otherwise discourage the doxed individuals from posting or proliferating conflicting content. +Doxing refers to online harassment in which individuals publicly release private information about another individual, including names, addresses, employment information, pictures, family members, and other sensitive information. An influence operation may dox its opposition to encourage individuals aligned with operation narratives to harass the doxed individuals themselves or otherwise discourage the doxed individuals from posting or proliferating conflicting content. TA18 T0048.004 Dox -Doxing refers to online harassment in which individuals publicly release private information about another individual, including names, addresses, employment information, pictures, family members, and other sensitive information. An influence operation may dox its opposition to encourage individuals aligned with operation narratives to harass the doxed individuals themselves or otherwise discourage the doxed individuals from posting or proliferating conflicting content. +Doxing refers to online harassment in which individuals publicly release private information about another individual, including names, addresses, employment information, pictures, family members, and other sensitive information. An influence operation may dox its opposition to encourage individuals aligned with operation narratives to harass the doxed individuals themselves or otherwise discourage the doxed individuals from posting or proliferating conflicting content. TA18 @@ -286,26 +286,25 @@ T0049.003 Bots Amplify via Automated Forwarding and Reposting -Automated forwarding and reposting refer to the proliferation of operation content using automated means, such as artificial intelligence or social media bots. An influence operation may use automated activity to increase content exposure without dedicating the resources, including personnel and time, traditionally required to forward and repost content. -Use bots to amplify narratives above algorithm thresholds. Bots are automated/programmed profiles designed to amplify content (ie: automatically retweet or like) and give appearance it's more "popular" than it is. They can operate as a network, to function in a coordinated/orchestrated manner. In some cases (more so now) they are an inexpensive/disposable assets used for minimal deployment as bot detection tools improve and platforms are more responsive. +Automated forwarding and reposting refer to the proliferation of operation content using automated means, such as artificial intelligence or social media bots. An influence operation may use automated activity to increase content exposure without dedicating the resources, including personnel and time, traditionally required to forward and repost content. Use bots to amplify narratives above algorithm thresholds. Bots are automated/programmed profiles designed to amplify content (ie: automatically retweet or like) and give appearance it's more "popular" than it is. They can operate as a network, to function in a coordinated/orchestrated manner. In some cases (more so now) they are an inexpensive/disposable assets used for minimal deployment as bot detection tools improve and platforms are more responsive. TA17 T0049.004 -Utilize Spamoflauge -Spamoflauge refers to the practice of disguising spam messages as legitimate. Spam refers to the use of electronic messaging systems to send out unrequested or unwanted messages in bulk. Simple methods of spamoflauge include replacing letters with numbers to fool keyword-based email spam filters, for example, "you've w0n our jackp0t!". Spamoflauge may extend to more complex techniques such as modifying the grammar or word choice of the language, casting messages as images which spam detectors cannot automatically read, or encapsulating messages in password protected attachments, such as .pdf or .zip files. Influence operations may use spamoflauge to avoid spam filtering systems and increase the likelihood of the target audience receiving operation messaging. +Utilise Spamoflauge +Spamoflauge refers to the practice of disguising spam messages as legitimate. Spam refers to the use of electronic messaging systems to send out unrequested or unwanted messages in bulk. Simple methods of spamoflauge include replacing letters with numbers to fool keyword-based email spam filters, for example, "you've w0n our jackp0t!". Spamoflauge may extend to more complex techniques such as modifying the grammar or word choice of the language, casting messages as images which spam detectors cannot automatically read, or encapsulating messages in password protected attachments, such as .pdf or .zip files. Influence operations may use spamoflauge to avoid spam filtering systems and increase the likelihood of the target audience receiving operation messaging. TA17 T0049.005 Conduct Swarming -Swarming refers to the coordinated use of accounts to overwhelm the information space with operation content. Unlike information flooding, swarming centers exclusively around a specific event or actor rather than a general narrative. Swarming relies on “horizontal communication” between information assets rather than a top-down, vertical command-and-control approach. +Swarming refers to the coordinated use of accounts to overwhelm the information space with operation content. Unlike information flooding, swarming centres exclusively around a specific event or actor rather than a general narrative. Swarming relies on “horizontal communication” between information assets rather than a top-down, vertical command-and-control approach. TA17 T0049.006 Conduct Keyword Squatting -Keyword squatting refers to the creation of online content, such as websites, articles, or social media accounts, around a specific search engine-optimized term to overwhelm the search results of that term. An influence may keyword squat to increase content exposure to target audience members who query the exploited term in a search engine and manipulate the narrative around the term. +Keyword squatting refers to the creation of online content, such as websites, articles, or social media accounts, around a specific search engine-optimized term to overwhelm the search results of that term. An influence may keyword squat to increase content exposure to target audience members who query the exploited term in a search engine and manipulate the narrative around the term. TA17 @@ -316,20 +315,20 @@ Use bots to amplify narratives above algorithm thresholds. Bots are automated/pr T0057 -Organize Events +Organise Events Coordinate and promote real-world events across media platforms, e.g. rallies, protests, gatherings in support of incident narratives. TA10 T0057.001 Pay for Physical Action -Paying for physical action occurs when an influence operation pays individuals to act in the physical realm. An influence operation may pay for physical action to create specific situations and frame them in a way that supports operation narratives, for example, paying a group of people to burn a car to later post an image of the burning car and frame it as an act of protest. +Paying for physical action occurs when an influence operation pays individuals to act in the physical realm. An influence operation may pay for physical action to create specific situations and frame them in a way that supports operation narratives, for example, paying a group of people to burn a car to later post an image of the burning car and frame it as an act of protest. TA10 T0057.002 Conduct Symbolic Action -Symbolic action refers to activities specifically intended to advance an operation’s narrative by signaling something to the audience, for example, a military parade supporting a state’s narrative of military superiority. An influence operation may use symbolic action to create falsified evidence supporting operation narratives in the physical information space. +Symbolic action refers to activities specifically intended to advance an operation’s narrative by signalling something to the audience, for example, a military parade supporting a state’s narrative of military superiority. An influence operation may use symbolic action to create falsified evidence supporting operation narratives in the physical information space. TA10 @@ -365,61 +364,61 @@ Use bots to amplify narratives above algorithm thresholds. Bots are automated/pr T0068 Respond to Breaking News Event or Active Crisis -Media attention on a story or event is heightened during a breaking news event, where unclear facts and incomplete information increase speculation, rumors, and conspiracy theories, which are all vulnerable to manipulation. +Media attention on a story or event is heightened during a breaking news event, where unclear facts and incomplete information increase speculation, rumours, and conspiracy theories, which are all vulnerable to manipulation. TA14 T0072 Segment Audiences -Create audience segmentations by features of interest to the influence campaign, including political affiliation, geographic location, income, demographics, and psychographics. +Create audience segmentations by features of interest to the influence campaign, including political affiliation, geographic location, income, demographics, and psychographics. TA13 T0072.001 Geographic Segmentation -An influence operation may target populations in a specific geographic location, such as a region, state, or city. An influence operation may use geographic segmentation to Create Localized Content (see: Establish Legitimacy). +An influence operation may target populations in a specific geographic location, such as a region, state, or city. An influence operation may use geographic segmentation to Create Localised Content (see: Establish Legitimacy). TA13 T0072.002 Demographic Segmentation -An influence operation may target populations based on demographic segmentation, including age, gender, and income. Demographic segmentation may be useful for influence operations aiming to change state policies that affect a specific population sector. For example, an influence operation attempting to influence Medicare funding in the United States would likely target U.S. voters over 65 years of age. +An influence operation may target populations based on demographic segmentation, including age, gender, and income. Demographic segmentation may be useful for influence operations aiming to change state policies that affect a specific population sector. For example, an influence operation attempting to influence Medicare funding in the United States would likely target U.S. voters over 65 years of age. TA13 T0072.003 Economic Segmentation -An influence operation may target populations based on their income bracket, wealth, or other financial or economic division. +An influence operation may target populations based on their income bracket, wealth, or other financial or economic division. TA13 T0072.004 Psychographic Segmentation -An influence operation may target populations based on psychographic segmentation, which uses audience values and decision-making processes. An operation may individually gather psychographic data with its own surveys or collection tools or externally purchase data from social media companies or online surveys, such as personality quizzes. +An influence operation may target populations based on psychographic segmentation, which uses audience values and decision-making processes. An operation may individually gather psychographic data with its own surveys or collection tools or externally purchase data from social media companies or online surveys, such as personality quizzes. TA13 T0072.005 Political Segmentation -An influence operation may target populations based on their political affiliations, especially when aiming to manipulate voting or change policy. +An influence operation may target populations based on their political affiliations, especially when aiming to manipulate voting or change policy. TA13 T0073 Determine Target Audiences -Determining the target audiences (segments of the population) who will receive campaign narratives and artifacts intended to achieve the strategic ends. +Determining the target audiences (segments of the population) who will receive campaign narratives and artefacts intended to achieve the strategic ends. TA01 T0074 Determine Strategic Ends -Determining the campaigns goals or objectives. Examples include achieving achieving geopolitical advantage like undermining trust in an adversary, gaining domestic political advantage, achieving financial gain, or attaining a policy change, +Determining the campaigns goals or objectives. Examples include achieving achieving geopolitical advantage like undermining trust in an adversary, gaining domestic political advantage, achieving financial gain, or attaining a policy change, TA01 T0075 Dismiss -Push back against criticism by dismissing your critics. This might be arguing that the critics use a different standard for you than with other actors or themselves; or arguing that their criticism is biased. +Push back against criticism by dismissing your critics. This might be arguing that the critics use a different standard for you than with other actors or themselves; or arguing that their criticism is biassed. TA02 @@ -431,7 +430,7 @@ Use bots to amplify narratives above algorithm thresholds. Bots are automated/pr T0076 Distort -Twist the narrative. Take information, or artifacts like images, and change the framing around them. +Twist the narrative. Take information, or artefacts like images, and change the framing around them. TA02 @@ -455,148 +454,145 @@ Use bots to amplify narratives above algorithm thresholds. Bots are automated/pr T0080 Map Target Audience Information Environment -Mapping the target audience information environment analyzes the information space itself, including social media analytics, web traffic, and media surveys. Mapping the information environment may help the influence operation determine the most realistic and popular information channels to reach its target audience. -Mapping the target audience information environment aids influence operations in determining the most vulnerable areas of the information space to target with messaging. +Mapping the target audience information environment analyses the information space itself, including social media analytics, web traffic, and media surveys. Mapping the information environment may help the influence operation determine the most realistic and popular information channels to reach its target audience. Mapping the target audience information environment aids influence operations in determining the most vulnerable areas of the information space to target with messaging. TA13 T0080.001 Monitor Social Media Analytics -An influence operation may use social media analytics to determine which factors will increase the operation content’s exposure to its target audience on social media platforms, including views, interactions, and sentiment relating to topics and content types. The social media platform itself or a third-party tool may collect the metrics. +An influence operation may use social media analytics to determine which factors will increase the operation content’s exposure to its target audience on social media platforms, including views, interactions, and sentiment relating to topics and content types. The social media platform itself or a third-party tool may collect the metrics. TA13 T0080.002 Evaluate Media Surveys -An influence operation may evaluate its own or third-party media surveys to determine what type of content appeals to its target audience. Media surveys may provide insight into an audience’s political views, social class, general interests, or other indicators used to tailor operation messaging to its target audience. +An influence operation may evaluate its own or third-party media surveys to determine what type of content appeals to its target audience. Media surveys may provide insight into an audience’s political views, social class, general interests, or other indicators used to tailor operation messaging to its target audience. TA13 T0080.003 Identify Trending Topics/Hashtags -An influence operation may identify trending hashtags on social media platforms for later use in boosting operation content. A hashtag40 refers to a word or phrase preceded by the hash symbol (#) on social media used to identify messages and posts relating to a specific topic. All public posts that use the same hashtag are aggregated onto a centralized page dedicated to the word or phrase and sorted either chronologically or by popularity. +An influence operation may identify trending hashtags on social media platforms for later use in boosting operation content. A hashtag40 refers to a word or phrase preceded by the hash symbol (#) on social media used to identify messages and posts relating to a specific topic. All public posts that use the same hashtag are aggregated onto a centralised page dedicated to the word or phrase and sorted either chronologically or by popularity. TA13 T0080.004 Conduct Web Traffic Analysis -An influence operation may conduct web traffic analysis to determine which search engines, keywords, websites, and advertisements gain the most traction with its target audience. +An influence operation may conduct web traffic analysis to determine which search engines, keywords, websites, and advertisements gain the most traction with its target audience. TA13 T0080.005 Assess Degree/Type of Media Access -An influence operation may survey a target audience’s Internet availability and degree of media freedom to determine which target audience members will have access to operation content and on which platforms. An operation may face more difficulty targeting an information environment with heavy restrictions and media control than an environment with independent media, freedom of speech and of the press, and individual liberties. +An influence operation may survey a target audience’s Internet availability and degree of media freedom to determine which target audience members will have access to operation content and on which platforms. An operation may face more difficulty targeting an information environment with heavy restrictions and media control than an environment with independent media, freedom of speech and of the press, and individual liberties. TA13 T0081 Identify Social and Technical Vulnerabilities -Identifying social and technical vulnerabilities determines weaknesses within the target audience information environment for later exploitation. Vulnerabilities include decisive political issues, weak cybersecurity infrastructure, search engine data voids, and other technical and non technical weaknesses in the target information environment. -Identifying social and technical vulnerabilities facilitates the later exploitation of the identified weaknesses to advance operation objectives. +Identifying social and technical vulnerabilities determines weaknesses within the target audience information environment for later exploitation. Vulnerabilities include decisive political issues, weak cybersecurity infrastructure, search engine data voids, and other technical and non technical weaknesses in the target information environment. Identifying social and technical vulnerabilities facilitates the later exploitation of the identified weaknesses to advance operation objectives. TA13 T0081.001 Find Echo Chambers -Find or plan to create areas (social media groups, search term groups, hashtag groups etc) where individuals only engage with people they agree with. +Find or plan to create areas (social media groups, search term groups, hashtag groups etc) where individuals only engage with people they agree with. TA13 T0081.002 Identify Data Voids -A data void refers to a word or phrase that results in little, manipulative, or low-quality search engine data. Data voids are hard to detect and relatively harmless until exploited by an entity aiming to quickly proliferate false or misleading information during a phenomenon that causes a high number of individuals to query the term or phrase. In the Plan phase, an influence operation may identify data voids for later exploitation in the operation. -A 2019 report by Michael Golebiewski identifies five types of data voids. (1) “Breaking news” data voids occur when a keyword gains popularity during a short period of time, allowing an influence operation to publish false content before legitimate news outlets have an opportunity to publish relevant information. (2) An influence operation may create a “strategic new terms” data void by creating their own terms and publishing information online before promoting their keyword to the target audience. (3) An influence operation may publish content on “outdated terms” that have decreased in popularity, capitalizing on most search engines’ preferences for recency. (4) “Fragmented concepts” data voids separate connections between similar ideas, isolating segment queries to distinct search engine results. (5) An influence operation may use “problematic queries” that previously resulted in disturbing or inappropriate content to promote messaging until mainstream media recontextualizes the term. +A data void refers to a word or phrase that results in little, manipulative, or low-quality search engine data. Data voids are hard to detect and relatively harmless until exploited by an entity aiming to quickly proliferate false or misleading information during a phenomenon that causes a high number of individuals to query the term or phrase. In the Plan phase, an influence operation may identify data voids for later exploitation in the operation. A 2019 report by Michael Golebiewski identifies five types of data voids. (1) “Breaking news” data voids occur when a keyword gains popularity during a short period of time, allowing an influence operation to publish false content before legitimate news outlets have an opportunity to publish relevant information. (2) An influence operation may create a “strategic new terms” data void by creating their own terms and publishing information online before promoting their keyword to the target audience. (3) An influence operation may publish content on “outdated terms” that have decreased in popularity, capitalising on most search engines’ preferences for recency. (4) “Fragmented concepts” data voids separate connections between similar ideas, isolating segment queries to distinct search engine results. (5) An influence operation may use “problematic queries” that previously resulted in disturbing or inappropriate content to promote messaging until mainstream media recontextualizes the term. TA13 T0081.003 Identify Existing Prejudices -An influence operation may exploit existing racial, religious, demographic, or social prejudices to further polarize its target audience from the rest of the public. +An influence operation may exploit existing racial, religious, demographic, or social prejudices to further polarise its target audience from the rest of the public. TA13 T0081.004 Identify Existing Fissures -An influence operation may identify existing fissures to pit target populations against one another or facilitate a “divide-and-conquer" approach to tailor operation narratives along the divides. +An influence operation may identify existing fissures to pit target populations against one another or facilitate a “divide-and-conquer" approach to tailor operation narratives along the divides. TA13 T0081.005 Identify Existing Conspiracy Narratives/Suspicions -An influence operation may assess preexisting conspiracy theories or suspicions in a population to identify existing narratives that support operational objectives. +An influence operation may assess preexisting conspiracy theories or suspicions in a population to identify existing narratives that support operational objectives. TA13 T0081.006 Identify Wedge Issues -A wedge issue is a divisive political issue, usually concerning a social phenomenon, that divides individuals along a defined line. An influence operation may exploit wedge issues by intentionally polarizing the public along the wedge issue line and encouraging opposition between factions. +A wedge issue is a divisive political issue, usually concerning a social phenomenon, that divides individuals along a defined line. An influence operation may exploit wedge issues by intentionally polarising the public along the wedge issue line and encouraging opposition between factions. TA13 T0081.007 Identify Target Audience Adversaries -An influence operation may identify or create a real or imaginary adversary to center operation narratives against. A real adversary may include certain politicians or political parties while imaginary adversaries may include falsified “deep state”62 actors that, according to conspiracies, run the state behind public view. +An influence operation may identify or create a real or imaginary adversary to centre operation narratives against. A real adversary may include certain politicians or political parties while imaginary adversaries may include falsified “deep state”62 actors that, according to conspiracies, run the state behind public view. TA13 T0081.008 Identify Media System Vulnerabilities -An influence operation may exploit existing weaknesses in a target’s media system. These weaknesses may include existing biases among media agencies, vulnerability to false news agencies on social media, or existing distrust of traditional media sources. An existing distrust among the public in the media system’s credibility holds high potential for exploitation by an influence operation when establishing alternative news agencies to spread operation content. +An influence operation may exploit existing weaknesses in a target’s media system. These weaknesses may include existing biases among media agencies, vulnerability to false news agencies on social media, or existing distrust of traditional media sources. An existing distrust among the public in the media system’s credibility holds high potential for exploitation by an influence operation when establishing alternative news agencies to spread operation content. TA13 T0082 Develop New Narratives -Actors may develop new narratives to further strategic or tactical goals, especially when existing narratives adequately align with the campaign goals. New narratives provide more control in terms of crafting the message to achieve specific goals. However, new narratives may require more effort to disseminate than adapting or adopting existing narratives. +Actors may develop new narratives to further strategic or tactical goals, especially when existing narratives adequately align with the campaign goals. New narratives provide more control in terms of crafting the message to achieve specific goals. However, new narratives may require more effort to disseminate than adapting or adopting existing narratives. TA14 T0083 Integrate Target Audience Vulnerabilities into Narrative -An influence operation may seek to exploit the preexisting weaknesses, fears, and enemies of the target audience for integration into the operation’s narratives and overall strategy. Integrating existing vulnerabilities into the operational approach conserves resources by exploiting already weak areas of the target information environment instead of forcing the operation to create new vulnerabilities in the environment. +An influence operation may seek to exploit the preexisting weaknesses, fears, and enemies of the target audience for integration into the operation’s narratives and overall strategy. Integrating existing vulnerabilities into the operational approach conserves resources by exploiting already weak areas of the target information environment instead of forcing the operation to create new vulnerabilities in the environment. TA14 T0084 Reuse Existing Content -When an operation recycles content from its own previous operations or plagiarizes from external operations. An operation may launder information to conserve resources that would have otherwise been utilized to develop new content. +When an operation recycles content from its own previous operations or plagiarises from external operations. An operation may launder information to conserve resources that would have otherwise been utilised to develop new content. TA06 T0084.001 Use Copypasta -Copypasta refers to a piece of text that has been copied and pasted multiple times across various online platforms. A copypasta’s final form may differ from its original source text as users add, delete, or otherwise edit the content as they repost the text. +Copypasta refers to a piece of text that has been copied and pasted multiple times across various online platforms. A copypasta’s final form may differ from its original source text as users add, delete, or otherwise edit the content as they repost the text. TA06 T0084.002 -Plagiarize Content -An influence operation may take content from other sources without proper attribution. This content may be either misinformation content shared by others without malicious intent but now leveraged by the campaign as disinformation or disinformation content from other sources. +Plagiarise Content +An influence operation may take content from other sources without proper attribution. This content may be either misinformation content shared by others without malicious intent but now leveraged by the campaign as disinformation or disinformation content from other sources. TA06 T0084.003 -Deceptively Labeled or Translated -An influence operation may take authentic content from other sources and add deceptive labels or deceptively translate the content into other langauges. +Deceptively Labelled or Translated +An influence operation may take authentic content from other sources and add deceptive labels or deceptively translate the content into other langauges. TA06 T0084.004 Appropriate Content -An influence operation may take content from other sources with proper attribution. This content may be either misinformation content shared by others without malicious intent but now leveraged by the campaign as disinformation or disinformation content from other sources. Examples include the appropriation of content from one inauthentic news site to another inauthentic news site or network in ways that align with the originators licensing or terms of service. +An influence operation may take content from other sources with proper attribution. This content may be either misinformation content shared by others without malicious intent but now leveraged by the campaign as disinformation or disinformation content from other sources. Examples include the appropriation of content from one inauthentic news site to another inauthentic news site or network in ways that align with the originators licencing or terms of service. TA06 T0085 Develop Text-Based Content -Creating and editing false or misleading text-based artifacts, often aligned with one or more specific narratives, for use in a disinformation campaign. +Creating and editing false or misleading text-based artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign. TA06 T0085.001 Develop AI-Generated Text -AI-generated texts refers to synthetic text composed by computers using text-generating AI technology. Autonomous generation refers to content created by a bot without human input, also known as bot-created content generation. Autonomous generation represents the next step in automation after language generation and may lead to automated journalism. An influence operation may use read fakes or autonomous generation to quickly develop and distribute content to the target audience. +AI-generated texts refers to synthetic text composed by computers using text-generating AI technology. Autonomous generation refers to content created by a bot without human input, also known as bot-created content generation. Autonomous generation represents the next step in automation after language generation and may lead to automated journalism. An influence operation may use read fakes or autonomous generation to quickly develop and distribute content to the target audience. TA06 @@ -608,13 +604,13 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0085.003 Develop Inauthentic News Articles -An influence operation may develop false or misleading news articles aligned to their campaign goals or narratives. +An influence operation may develop false or misleading news articles aligned to their campaign goals or narratives. TA06 T0086 Develop Image-Based Content -Creating and editing false or misleading visual artifacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include photographing staged real-life situations, repurposing existing digital images, or using image creation and editing technologies. +Creating and editing false or misleading visual artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include photographing staged real-life situations, repurposing existing digital images, or using image creation and editing technologies. TA06 @@ -632,7 +628,7 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0086.003 Deceptively Edit Images (Cheap Fakes) -Cheap fakes utilize less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. +Cheap fakes utilise less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. TA06 @@ -644,7 +640,7 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0087 Develop Video-Based Content -Creating and editing false or misleading video artifacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include staging videos of purportedly real situations, repurposing existing video artifacts, or using AI-generated video creation and editing technologies (including deepfakes). +Creating and editing false or misleading video artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include staging videos of purportedly real situations, repurposing existing video artefacts, or using AI-generated video creation and editing technologies (including deepfakes). TA06 @@ -656,13 +652,13 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0087.002 Deceptively Edit Video (Cheap Fakes) -Cheap fakes utilize less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. +Cheap fakes utilise less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. TA06 T0088 Develop Audio-Based Content -Creating and editing false or misleading audio artifacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include creating completely new audio content, repurposing existing audio artifacts (including cheap fakes), or using AI-generated audio creation and editing technologies (including deepfakes). +Creating and editing false or misleading audio artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include creating completely new audio content, repurposing existing audio artefacts (including cheap fakes), or using AI-generated audio creation and editing technologies (including deepfakes). TA06 @@ -674,7 +670,7 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0088.002 Deceptively Edit Audio (Cheap Fakes) -Cheap fakes utilize less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. +Cheap fakes utilise less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event. TA06 @@ -710,26 +706,25 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0090.001 Create Anonymous Accounts -Anonymous accounts or anonymous users refer to users that access network resources without providing a username or password. An influence operation may use anonymous accounts to spread content without direct attribution to the operation. +Anonymous accounts or anonymous users refer to users that access network resources without providing a username or password. An influence operation may use anonymous accounts to spread content without direct attribution to the operation. TA15 T0090.002 Create Cyborg Accounts -Cyborg accounts refer to partly manned, partly automated social media accounts. Cyborg accounts primarily act as bots, but a human operator periodically takes control of the account to engage with real social media users by responding to comments and posting original content. Influence operations may use cyborg accounts to reduce the amount of direct human input required to maintain a regular account but increase the apparent legitimacy of the cyborg account by occasionally breaking its bot-like behavior with human interaction. +Cyborg accounts refer to partly manned, partly automated social media accounts. Cyborg accounts primarily act as bots, but a human operator periodically takes control of the account to engage with real social media users by responding to comments and posting original content. Influence operations may use cyborg accounts to reduce the amount of direct human input required to maintain a regular account but increase the apparent legitimacy of the cyborg account by occasionally breaking its bot-like behaviour with human interaction. TA15 T0090.003 Create Bot Accounts -Bots refer to autonomous internet users that interact with systems or other users while imitating traditional human behavior. Bots use a variety of tools to stay active without direct human operation, including artificial intelligence and big data analytics. For example, an individual may program a Twitter bot to retweet a tweet every time it contains a certain keyword or hashtag. An influence operation may use bots to increase its exposure and artificially promote its content across the internet without dedicating additional time or human resources. -Amplifier bots promote operation content through reposts, shares, and likes to increase the content’s online popularity. Hacker bots are traditionally covert bots running on computer scripts that rarely engage with users and work primarily as agents of larger cyberattacks, such as a Distributed Denial of Service attacks. Spammer bots are programmed to post content on social media or in comment sections, usually as a supplementary tool. Impersonator bots102 pose as real people by mimicking human behavior, complicating their detection. +Bots refer to autonomous internet users that interact with systems or other users while imitating traditional human behaviour. Bots use a variety of tools to stay active without direct human operation, including artificial intelligence and big data analytics. For example, an individual may programme a Twitter bot to retweet a tweet every time it contains a certain keyword or hashtag. An influence operation may use bots to increase its exposure and artificially promote its content across the internet without dedicating additional time or human resources. Amplifier bots promote operation content through reposts, shares, and likes to increase the content’s online popularity. Hacker bots are traditionally covert bots running on computer scripts that rarely engage with users and work primarily as agents of larger cyberattacks, such as a Distributed Denial of Service attacks. Spammer bots are programmed to post content on social media or in comment sections, usually as a supplementary tool. Impersonator bots102 pose as real people by mimicking human behaviour, complicating their detection. TA15 T0090.004 Create Sockpuppet Accounts -Sockpuppet accounts refer to falsified accounts that either promote the influence operation’s own material or attack critics of the material online. Individuals who control sockpuppet accounts also man at least one other user account.67 Sockpuppet accounts help legitimize operation narratives by providing an appearance of external support for the material and discrediting opponents of the operation. +Sockpuppet accounts refer to falsified accounts that either promote the influence operation’s own material or attack critics of the material online. Individuals who control sockpuppet accounts also man at least one other user account.67 Sockpuppet accounts help legitimise operation narratives by providing an appearance of external support for the material and discrediting opponents of the operation. TA15 @@ -753,52 +748,49 @@ Amplifier bots promote operation content through reposts, shares, and likes to i T0091.003 Enlist Troll Accounts -An influence operation may hire trolls, or human operators of fake accounts that aim to provoke others by posting and amplifying content about controversial issues. Trolls can serve to discredit an influence operation’s opposition or bring attention to the operation’s cause through debate. -Classic trolls refer to regular people who troll for personal reasons, such as attention-seeking or boredom. Classic trolls may advance operation narratives by coincidence but are not directly affiliated with any larger operation. Conversely, hybrid trolls act on behalf of another institution, such as a state or financial organization, and post content with a specific ideological goal. Hybrid trolls may be highly advanced and institutionalized or less organized and work for a single individual. +An influence operation may hire trolls, or human operators of fake accounts that aim to provoke others by posting and amplifying content about controversial issues. Trolls can serve to discredit an influence operation’s opposition or bring attention to the operation’s cause through debate. Classic trolls refer to regular people who troll for personal reasons, such as attention-seeking or boredom. Classic trolls may advance operation narratives by coincidence but are not directly affiliated with any larger operation. Conversely, hybrid trolls act on behalf of another institution, such as a state or financial organisation, and post content with a specific ideological goal. Hybrid trolls may be highly advanced and institutionalised or less organised and work for a single individual. TA15 T0092 Build Network -Operators build their own network, creating links between accounts -- whether authentic or inauthentic -- in order amplify and promote narratives and artifacts, and encourage further growth of ther network, as well as the ongoing sharing and engagement with operational content. +Operators build their own network, creating links between accounts -- whether authentic or inauthentic -- in order amplify and promote narratives and artefacts, and encourage further growth of ther network, as well as the ongoing sharing and engagement with operational content. TA15 T0092.001 -Create Organizations -Influence operations may establish organizations with legitimate or falsified hierarchies, staff, and content to structure operation assets, provide a sense of legitimacy to the operation, or provide institutional backing to operation activities. +Create Organisations +Influence operations may establish organisations with legitimate or falsified hierarchies, staff, and content to structure operation assets, provide a sense of legitimacy to the operation, or provide institutional backing to operation activities. TA15 T0092.002 Use Follow Trains -A follow train is a group of people who follow each other on a social media platform, often as a way for an individual or campaign to grow its social media following. Follow trains may be a violation of platform Terms of Service. They are also known as follow-for-follow groups. +A follow train is a group of people who follow each other on a social media platform, often as a way for an individual or campaign to grow its social media following. Follow trains may be a violation of platform Terms of Service. They are also known as follow-for-follow groups. TA15 T0092.003 Create Community or Sub-Group -When there is not an existing community or sub-group that meets a campaign's goals, an influence operation may seek to create a community or sub-group. +When there is not an existing community or sub-group that meets a campaign's goals, an influence operation may seek to create a community or sub-group. TA15 T0093 Acquire/Recruit Network -Operators acquire an existing network by paying, recruiting, or exerting control over the leaders of the existing network. +Operators acquire an existing network by paying, recruiting, or exerting control over the leaders of the existing network. TA15 T0093.001 Fund Proxies -An influence operation may fund proxies, or external entities that work for the operation. An operation may recruit/train users with existing sympathies towards the operation’s narratives and/or goals as proxies. Funding proxies serves various purposes including: -- Diversifying operation locations to complicate attribution -- Reducing the workload for direct operation assets +An influence operation may fund proxies, or external entities that work for the operation. An operation may recruit/train users with existing sympathies towards the operation’s narratives and/or goals as proxies. Funding proxies serves various purposes including: - Diversifying operation locations to complicate attribution - Reducing the workload for direct operation assets TA15 T0093.002 Acquire Botnets -A botnet is a group of bots that can function in coordination with each other. +A botnet is a group of bots that can function in coordination with each other. TA15 @@ -815,38 +807,38 @@ Classic trolls refer to regular people who troll for personal reasons, such as a T0094.002 -Utilize Butterfly Attacks -Butterfly attacks occur when operators pretend to be members of a certain social group, usually a group that struggles for representation. An influence operation may mimic a group to insert controversial statements into the discourse, encourage the spread of operation content, or promote harassment among group members. Unlike astroturfing, butterfly attacks aim to infiltrate and discredit existing grassroots movements, organizations, and media campaigns. +Utilise Butterfly Attacks +Butterfly attacks occur when operators pretend to be members of a certain social group, usually a group that struggles for representation. An influence operation may mimic a group to insert controversial statements into the discourse, encourage the spread of operation content, or promote harassment among group members. Unlike astroturfing, butterfly attacks aim to infiltrate and discredit existing grassroots movements, organisations, and media campaigns. TA15 T0095 Develop Owned Media Assets -An owned media asset refers to an agency or organization through which an influence operation may create, develop, and host content and narratives. Owned media assets include websites, blogs, social media pages, forums, and other platforms that facilitate the creation and organization of content. +An owned media asset refers to an agency or organisation through which an influence operation may create, develop, and host content and narratives. Owned media assets include websites, blogs, social media pages, forums, and other platforms that facilitate the creation and organisation of content. TA15 T0096 Leverage Content Farms -Using the services of large-scale content providers for creating and amplifying campaign artifacts at scale. +Using the services of large-scale content providers for creating and amplifying campaign artefacts at scale. TA15 T0096.001 Create Content Farms -An influence operation may create an organization for creating and amplifying campaign artifacts at scale. +An influence operation may create an organisation for creating and amplifying campaign artefacts at scale. TA15 T0096.002 -Outsource Content Creation to External Organizations -An influence operation may outsource content creation to external companies to avoid attribution, increase the rate of content creation, or improve content quality, i.e., by employing an organization that can create content in the target audience’s native language. Employed organizations may include marketing companies for tailored advertisements or external content farms for high volumes of targeted media. +Outsource Content Creation to External Organisations +An influence operation may outsource content creation to external companies to avoid attribution, increase the rate of content creation, or improve content quality, i.e., by employing an organisation that can create content in the target audience’s native language. Employed organisations may include marketing companies for tailored advertisements or external content farms for high volumes of targeted media. TA15 T0097 Create Personas -Creating fake people, often with accounts across multiple platforms. These personas can be as simple as a name, can contain slightly more background like location, profile pictures, backstory, or can be effectively backstopped with indicators like fake identity documents. +Creating fake people, often with accounts across multiple platforms. These personas can be as simple as a name, can contain slightly more background like location, profile pictures, backstory, or can be effectively backstopped with indicators like fake identity documents. TA16 @@ -876,29 +868,25 @@ Classic trolls refer to regular people who troll for personal reasons, such as a T0099 Prepare Assets Impersonating Legitimate Entities -An influence operation may prepare assets impersonating legitimate entities to further conceal its network identity and add a layer of legitimacy to its operation content. Users will more likely believe and less likely fact-check news from recognizable sources rather than unknown sites. Legitimate entities may include authentic news outlets, public figures, organizations, or state entities. -An influence operation may use a wide variety of cyber techniques to impersonate a legitimate entity’s website or social media account. Typosquatting87 is the international registration of a domain name with purposeful variations of the impersonated domain name through intentional typos, top-level domain (TLD) manipulation, or punycode. Typosquatting facilitates the creation of falsified websites by creating similar domain names in the URL box, leaving it to the user to confirm that the URL is correct. +An influence operation may prepare assets impersonating legitimate entities to further conceal its network identity and add a layer of legitimacy to its operation content. Users will more likely believe and less likely fact-check news from recognisable sources rather than unknown sites. Legitimate entities may include authentic news outlets, public figures, organisations, or state entities. An influence operation may use a wide variety of cyber techniques to impersonate a legitimate entity’s website or social media account. Typosquatting87 is the international registration of a domain name with purposeful variations of the impersonated domain name through intentional typos, top-level domain (TLD) manipulation, or punycode. Typosquatting facilitates the creation of falsified websites by creating similar domain names in the URL box, leaving it to the user to confirm that the URL is correct. TA16 T0099.001 Astroturfing -Astroturfing occurs when an influence operation disguises itself as grassroots movement or organization that supports operation narratives. Unlike butterfly attacks, astroturfing aims to increase the appearance of popular support for the operation cause and does not infiltrate existing groups to discredit their objectives. +Astroturfing occurs when an influence operation disguises itself as grassroots movement or organisation that supports operation narratives. Unlike butterfly attacks, astroturfing aims to increase the appearance of popular support for the operation cause and does not infiltrate existing groups to discredit their objectives. TA16 T0099.002 Spoof/Parody Account/Site -An influence operation may prepare assets impersonating legitimate entities to further conceal its network identity and add a layer of legitimacy to its operation content. Users will more likely believe and less likely fact-check news from recognizable sources rather than unknown sites. Legitimate entities may include authentic news outlets, public figures, organizations, or state entities. +An influence operation may prepare assets impersonating legitimate entities to further conceal its network identity and add a layer of legitimacy to its operation content. Users will more likely believe and less likely fact-check news from recognisable sources rather than unknown sites. Legitimate entities may include authentic news outlets, public figures, organisations, or state entities. TA16 T0100 Co-Opt Trusted Sources -An influence operation may co-opt trusted sources by infiltrating or repurposing a source to reach a target audience through existing, previously reliable networks. Co-opted trusted sources may include: -- National or local new outlets -- Research or academic publications -- Online blogs or websites +An influence operation may co-opt trusted sources by infiltrating or repurposing a source to reach a target audience through existing, previously reliable networks. Co-opted trusted sources may include: - National or local new outlets - Research or academic publications - Online blogs or websites TA16 @@ -921,14 +909,14 @@ An influence operation may use a wide variety of cyber techniques to impersonate T0101 -Create Localized Content -Localized content refers to content that appeals to a specific community of individuals, often in defined geographic areas. An operation may create localized content using local language and dialects to resonate with its target audience and blend in with other local news and social media. Localized content may help an operation increase legitimacy, avoid detection, and complicate external attribution. +Create Localised Content +Localised content refers to content that appeals to a specific community of individuals, often in defined geographic areas. An operation may create localised content using local language and dialects to resonate with its target audience and blend in with other local news and social media. Localised content may help an operation increase legitimacy, avoid detection, and complicate external attribution. TA05 T0102 Leverage Echo Chambers/Filter Bubbles -An echo chamber refers to an internet subgroup, often along ideological lines, where individuals only engage with “others with which they are already in agreement.” A filter bubble refers to an algorithm's placement of an individual in content that they agree with or regularly engage with, possibly entrapping the user into a bubble of their own making. An operation may create these isolated areas of the internet by match existing groups, or aggregating individuals into a single target audience based on shared interests, politics, values, demographics, and other characteristics. Echo chambers and filter bubbles help to reinforce similar biases and content to the same target audience members. +An echo chamber refers to an internet subgroup, often along ideological lines, where individuals only engage with “others with which they are already in agreement.” A filter bubble refers to an algorithm's placement of an individual in content that they agree with or regularly engage with, possibly entrapping the user into a bubble of their own making. An operation may create these isolated areas of the internet by match existing groups, or aggregating individuals into a single target audience based on shared interests, politics, values, demographics, and other characteristics. Echo chambers and filter bubbles help to reinforce similar biases and content to the same target audience members. TA05 @@ -946,8 +934,7 @@ An influence operation may use a wide variety of cyber techniques to impersonate T0102.003 Exploit Data Voids -A data void refers to a word or phrase that results in little, manipulative, or low-quality search engine data. Data voids are hard to detect and relatively harmless until exploited by an entity aiming to quickly proliferate false or misleading information during a phenomenon that causes a high number of individuals to query the term or phrase. In the Plan phase, an influence operation may identify data voids for later exploitation in the operation. -A 2019 report by Michael Golebiewski identifies five types of data voids. (1) “Breaking news” data voids occur when a keyword gains popularity during a short period of time, allowing an influence operation to publish false content before legitimate news outlets have an opportunity to publish relevant information. (2) An influence operation may create a “strategic new terms” data void by creating their own terms and publishing information online before promoting their keyword to the target audience. (3) An influence operation may publish content on “outdated terms” that have decreased in popularity, capitalizing on most search engines’ preferences for recency. (4) “Fragmented concepts” data voids separate connections between similar ideas, isolating segment queries to distinct search engine results. (5) An influence operation may use “problematic queries” that previously resulted in disturbing or inappropriate content to promote messaging until mainstream media recontextualizes the term. +A data void refers to a word or phrase that results in little, manipulative, or low-quality search engine data. Data voids are hard to detect and relatively harmless until exploited by an entity aiming to quickly proliferate false or misleading information during a phenomenon that causes a high number of individuals to query the term or phrase. In the Plan phase, an influence operation may identify data voids for later exploitation in the operation. A 2019 report by Michael Golebiewski identifies five types of data voids. (1) “Breaking news” data voids occur when a keyword gains popularity during a short period of time, allowing an influence operation to publish false content before legitimate news outlets have an opportunity to publish relevant information. (2) An influence operation may create a “strategic new terms” data void by creating their own terms and publishing information online before promoting their keyword to the target audience. (3) An influence operation may publish content on “outdated terms” that have decreased in popularity, capitalising on most search engines’ preferences for recency. (4) “Fragmented concepts” data voids separate connections between similar ideas, isolating segment queries to distinct search engine results. (5) An influence operation may use “problematic queries” that previously resulted in disturbing or inappropriate content to promote messaging until mainstream media recontextualizes the term. TA05 @@ -989,7 +976,7 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0104.003 Private/Closed Social Networks -An audio livestream refers to an online audio broadcast capability that allows for real-time communication to closed or open networks. Examples include Twitter Spaces, +An audio livestream refers to an online audio broadcast capability that allows for real-time communication to closed or open networks. Examples include Twitter Spaces, TA07 @@ -1055,19 +1042,19 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0108 Blogging and Publishing Networks -Examples include WordPress, Blogger, Weebly, Tumblr, Medium, etc. +Examples include WordPress, Blogger, Weebly, Tumblr, Medium, etc. TA07 T0109 Consumer Review Networks -Platforms for finding, reviewing, and sharing information about brands, products, services, restaurants, travel destinations, etc. Examples include Yelp, TripAdvisor, etc. +Platforms for finding, reviewing, and sharing information about brands, products, services, restaurants, travel destinations, etc. Examples include Yelp, TripAdvisor, etc. TA07 T0110 Formal Diplomatic Channels -Leveraging formal, traditional, diplomatic channels to communicate with foreign governments (written documents, meetings, summits, diplomatic visits, etc). This type of diplomacy is conducted by diplomats of one nation with diplomats and other officials of another nation or international organization. +Leveraging formal, traditional, diplomatic channels to communicate with foreign governments (written documents, meetings, summits, diplomatic visits, etc). This type of diplomacy is conducted by diplomats of one nation with diplomats and other officials of another nation or international organisation. TA07 @@ -1103,7 +1090,7 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0113 Employ Commercial Analytic Firms -Commercial analytic firms collect data on target audience activities and evaluate the data to detect trends, such as content receiving high click-rates. An influence operation may employ commercial analytic firms to facilitate external collection on its target audience, complicating attribution efforts and better tailoring the content to audience preferences. +Commercial analytic firms collect data on target audience activities and evaluate the data to detect trends, such as content receiving high click-rates. An influence operation may employ commercial analytic firms to facilitate external collection on its target audience, complicating attribution efforts and better tailoring the content to audience preferences. TA08 @@ -1127,7 +1114,7 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0115 Post Content -Delivering content by posting via owned media (assets that the operator controls). +Delivering content by posting via owned media (assets that the operator controls). TA09 @@ -1145,13 +1132,13 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0115.003 One-Way Direct Posting -Direct posting refers to a method of posting content via a one-way messaging service, where the recipient cannot directly respond to the poster’s messaging. An influence operation may post directly to promote operation narratives to the target audience without allowing opportunities for fact-checking or disagreement, creating a false sense of support for the narrative. +Direct posting refers to a method of posting content via a one-way messaging service, where the recipient cannot directly respond to the poster’s messaging. An influence operation may post directly to promote operation narratives to the target audience without allowing opportunities for fact-checking or disagreement, creating a false sense of support for the narrative. TA09 T0116 Comment or Reply on Content -Delivering content by replying or commenting via owned media (assets that the operator controls). +Delivering content by replying or commenting via owned media (assets that the operator controls). TA09 @@ -1169,25 +1156,25 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0118 Amplify Existing Narrative -An influence operation may amplify existing narratives that align with its narratives to support operation objectives. +An influence operation may amplify existing narratives that align with its narratives to support operation objectives. TA17 T0119 Cross-Posting -Cross-posting refers to posting the same message to multiple internet discussions, social media platforms or accounts, or news groups at one time. An influence operation may post content online in multiple communities and platforms to increase the chances of content exposure to the target audience. +Cross-posting refers to posting the same message to multiple internet discussions, social media platforms or accounts, or news groups at one time. An influence operation may post content online in multiple communities and platforms to increase the chances of content exposure to the target audience. TA17 T0119.001 Post across Groups -An influence operation may post content across groups to spread narratives and content to new communities within the target audiences or to new target audiences. +An influence operation may post content across groups to spread narratives and content to new communities within the target audiences or to new target audiences. TA17 T0119.002 Post across Platform -An influence operation may post content across platforms to spread narratives and content to new communities within the target audiences or to new target audiences. Posting across platforms can also remove opposition and context, helping the narrative spread with less opposition on the cross-posted platform. +An influence operation may post content across platforms to spread narratives and content to new communities within the target audiences or to new target audiences. Posting across platforms can also remove opposition and context, helping the narrative spread with less opposition on the cross-posted platform. TA17 @@ -1199,13 +1186,13 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0120 Incentivize Sharing -Incentivizing content sharing refers to actions that encourage users to share content themselves, reducing the need for the operation itself to post and promote its own content. +Incentivizing content sharing refers to actions that encourage users to share content themselves, reducing the need for the operation itself to post and promote its own content. TA17 T0120.001 -Use Affiliate Marketing Programs -Use Affiliate Marketing Programs +Use Affiliate Marketing Programmes +Use Affiliate Marketing Programmes TA17 @@ -1217,73 +1204,67 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0121 Manipulate Platform Algorithm -Manipulating a platform algorithm refers to conducting activity on a platform in a way that intentionally targets its underlying algorithm. After analyzing a platform’s algorithm (see: Select Platforms), an influence operation may use a platform in a way that increases its content exposure, avoids content removal, or otherwise benefits the operation’s strategy. For example, an influence operation may use bots to amplify its posts so that the platform’s algorithm recognizes engagement with operation content and further promotes the content on user timelines. +Manipulating a platform algorithm refers to conducting activity on a platform in a way that intentionally targets its underlying algorithm. After analysing a platform’s algorithm (see: Select Platforms), an influence operation may use a platform in a way that increases its content exposure, avoids content removal, or otherwise benefits the operation’s strategy. For example, an influence operation may use bots to amplify its posts so that the platform’s algorithm recognises engagement with operation content and further promotes the content on user timelines. TA17 T0121.001 Bypass Content Blocking -Bypassing content blocking refers to actions taken to circumvent network security measures that prevent users from accessing certain servers, resources, or other online spheres. An influence operation may bypass content blocking to proliferate its content on restricted areas of the internet. Common strategies for bypassing content blocking include: -- Altering IP addresses to avoid IP filtering -- Using a Virtual Private Network (VPN) to avoid IP filtering -- Using a Content Delivery Network (CDN) to avoid IP filtering -- Enabling encryption to bypass packet inspection blocking -- Manipulating text to avoid filtering by keywords -- Posting content on multiple platforms to avoid platform-specific removals - Using local facilities or modified DNS servers to avoid DNS filtering +Bypassing content blocking refers to actions taken to circumvent network security measures that prevent users from accessing certain servers, resources, or other online spheres. An influence operation may bypass content blocking to proliferate its content on restricted areas of the internet. Common strategies for bypassing content blocking include: - Altering IP addresses to avoid IP filtering - Using a Virtual Private Network (VPN) to avoid IP filtering - Using a Content Delivery Network (CDN) to avoid IP filtering - Enabling encryption to bypass packet inspection blocking - Manipulating text to avoid filtering by keywords - Posting content on multiple platforms to avoid platform-specific removals - Using local facilities or modified DNS servers to avoid DNS filtering TA17 T0122 Direct Users to Alternative Platforms -Direct users to alternative platforms refers to encouraging users to move from the platform on which they initially viewed operation content and engage with content on alternate information channels, including separate social media channels and inauthentic websites. An operation may drive users to alternative platforms to diversify its information channels and ensure the target audience knows where to access operation content if the initial platform suspends, flags, or otherwise removes original operation assets and content. +Direct users to alternative platforms refers to encouraging users to move from the platform on which they initially viewed operation content and engage with content on alternate information channels, including separate social media channels and inauthentic websites. An operation may drive users to alternative platforms to diversify its information channels and ensure the target audience knows where to access operation content if the initial platform suspends, flags, or otherwise removes original operation assets and content. TA17 T0123 Control Information Environment through Offensive Cyberspace Operations -Controlling the information environment through offensive cyberspace operations uses cyber tools and techniques to alter the trajectory of content in the information space to either prioritize operation messaging or block opposition messaging. +Controlling the information environment through offensive cyberspace operations uses cyber tools and techniques to alter the trajectory of content in the information space to either prioritise operation messaging or block opposition messaging. TA18 T0123.001 Delete Opposing Content -Deleting opposing content refers to the removal of content that conflicts with operational narratives from selected platforms. An influence operation may delete opposing content to censor contradictory information from the target audience, allowing operation narratives to take priority in the information space. +Deleting opposing content refers to the removal of content that conflicts with operational narratives from selected platforms. An influence operation may delete opposing content to censor contradictory information from the target audience, allowing operation narratives to take priority in the information space. TA18 T0123.002 Block Content -Content blocking refers to actions taken to restrict internet access or render certain areas of the internet inaccessible. An influence operation may restrict content based on both network and content attributes. +Content blocking refers to actions taken to restrict internet access or render certain areas of the internet inaccessible. An influence operation may restrict content based on both network and content attributes. TA18 T0123.003 Destroy Information Generation Capabilities -Destroying information generation capabilities refers to actions taken to limit, degrade, or otherwise incapacitate an actor’s ability to generate conflicting information. An influence operation may destroy an actor’s information generation capabilities by physically dismantling the information infrastructure, disconnecting resources needed for information generation, or redirecting information generation personnel. An operation may destroy an adversary’s information generation capabilities to limit conflicting content exposure to the target audience and crowd the information space with its own narratives. +Destroying information generation capabilities refers to actions taken to limit, degrade, or otherwise incapacitate an actor’s ability to generate conflicting information. An influence operation may destroy an actor’s information generation capabilities by physically dismantling the information infrastructure, disconnecting resources needed for information generation, or redirecting information generation personnel. An operation may destroy an adversary’s information generation capabilities to limit conflicting content exposure to the target audience and crowd the information space with its own narratives. TA18 T0123.004 Conduct Server Redirect -A server redirect, also known as a URL redirect, occurs when a server automatically forwards a user from one URL to another using server-side scripting languages. An influence operation may conduct a server redirect to divert target audience members from one website to another without their knowledge. The redirected website may pose as a legitimate source, host malware, or otherwise aid operation objectives. +A server redirect, also known as a URL redirect, occurs when a server automatically forwards a user from one URL to another using server-side scripting languages. An influence operation may conduct a server redirect to divert target audience members from one website to another without their knowledge. The redirected website may pose as a legitimate source, host malware, or otherwise aid operation objectives. TA18 T0124 Suppress Opposition -Operators can suppress the opposition by exploiting platform content moderation tools and processes like reporting non-violative content to platforms for takedown and goading opposition actors into taking actions that result in platform action or target audience disapproval. +Operators can suppress the opposition by exploiting platform content moderation tools and processes like reporting non-violative content to platforms for takedown and goading opposition actors into taking actions that result in platform action or target audience disapproval. TA18 T0124.001 Report Non-Violative Opposing Content -Reporting opposing content refers to notifying and providing an instance of a violation of a platform’s guidelines and policies for conduct on the platform. In addition to simply reporting the content, an operation may leverage copyright regulations to trick social media and web platforms into removing opposing content by manipulating the content to appear in violation of copyright laws. Reporting opposing content facilitates the suppression of contradictory information and allows operation narratives to take priority in the information space. +Reporting opposing content refers to notifying and providing an instance of a violation of a platform’s guidelines and policies for conduct on the platform. In addition to simply reporting the content, an operation may leverage copyright regulations to trick social media and web platforms into removing opposing content by manipulating the content to appear in violation of copyright laws. Reporting opposing content facilitates the suppression of contradictory information and allows operation narratives to take priority in the information space. TA18 T0124.002 Goad People into Harmful Action (Stop Hitting Yourself) -Goad people into actions that violate terms of service or will lead to having their content or accounts taken down. +Goad people into actions that violate terms of service or will lead to having their content or accounts taken down. TA18 @@ -1319,19 +1300,19 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0127 Physical Violence -Physical violence refers to the use of force to injure, abuse, damage, or destroy. An influence operation may conduct or encourage physical violence to discourage opponents from promoting conflicting content or draw attention to operation narratives using shock value. +Physical violence refers to the use of force to injure, abuse, damage, or destroy. An influence operation may conduct or encourage physical violence to discourage opponents from promoting conflicting content or draw attention to operation narratives using shock value. TA10 T0127.001 Conduct Physical Violence -An influence operation may directly Conduct Physical Violence to achieve campaign goals. +An influence operation may directly Conduct Physical Violence to achieve campaign goals. TA10 T0127.002 Encourage Physical Violence -An influence operation may Encourage others to engage in Physical Violence to achieve campaign goals. +An influence operation may Encourage others to engage in Physical Violence to achieve campaign goals. TA10 @@ -1343,31 +1324,31 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0128.001 Use Pseudonyms -An operation may use pseudonyms, or fake names, to mask the identity of operation accounts, publish anonymous content, or otherwise use falsified personas to conceal identity of the operation. An operation may coordinate pseudonyms across multiple platforms, for example, by writing an article under a pseudonym and then posting a link to the article on social media on an account with the same falsified name. +An operation may use pseudonyms, or fake names, to mask the identity of operation accounts, publish anonymous content, or otherwise use falsified personas to conceal identity of the operation. An operation may coordinate pseudonyms across multiple platforms, for example, by writing an article under a pseudonym and then posting a link to the article on social media on an account with the same falsified name. TA11 T0128.002 Conceal Network Identity -Concealing network identity aims to hide the existence an influence operation’s network completely. Unlike concealing sponsorship, concealing network identity denies the existence of any sort of organization. +Concealing network identity aims to hide the existence an influence operation’s network completely. Unlike concealing sponsorship, concealing network identity denies the existence of any sort of organisation. TA11 T0128.003 Distance Reputable Individuals from Operation -Distancing reputable individuals from the operation occurs when enlisted individuals, such as celebrities or subject matter experts, actively disengage themselves from operation activities and messaging. Individuals may distance themselves from the operation by deleting old posts or statements, unfollowing operation information assets, or otherwise detaching themselves from the operation’s timeline. An influence operation may want reputable individuals to distance themselves from the operation to reduce operation exposure, particularly if the operation aims to remove all evidence. +Distancing reputable individuals from the operation occurs when enlisted individuals, such as celebrities or subject matter experts, actively disengage themselves from operation activities and messaging. Individuals may distance themselves from the operation by deleting old posts or statements, unfollowing operation information assets, or otherwise detaching themselves from the operation’s timeline. An influence operation may want reputable individuals to distance themselves from the operation to reduce operation exposure, particularly if the operation aims to remove all evidence. TA11 T0128.004 Launder Accounts -Account laundering occurs when an influence operation acquires control of previously legitimate online accounts from third parties through sale or exchange and often in contravention of terms of use. Influence operations use laundered accounts to reach target audience members from an existing information channel and complicate attribution. +Account laundering occurs when an influence operation acquires control of previously legitimate online accounts from third parties through sale or exchange and often in contravention of terms of use. Influence operations use laundered accounts to reach target audience members from an existing information channel and complicate attribution. TA11 T0128.005 Change Names of Accounts -Changing names of accounts occurs when an operation changes the name of an existing social media account. An operation may change the names of its accounts throughout an operation to avoid detection or alter the names of newly acquired or repurposed accounts to fit operational narratives. +Changing names of accounts occurs when an operation changes the name of an existing social media account. An operation may change the names of its accounts throughout an operation to avoid detection or alter the names of newly acquired or repurposed accounts to fit operational narratives. TA11 @@ -1379,25 +1360,25 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0129.001 Conceal Network Identity -Concealing network identity aims to hide the existence an influence operation’s network completely. Unlike concealing sponsorship, concealing network identity denies the existence of any sort of organization. +Concealing network identity aims to hide the existence an influence operation’s network completely. Unlike concealing sponsorship, concealing network identity denies the existence of any sort of organisation. TA11 T0129.002 Generate Content Unrelated to Narrative -An influence operation may mix its own operation content with legitimate news or external unrelated content to disguise operational objectives, narratives, or existence. For example, an operation may generate "lifestyle" or "cuisine" content alongside regular operation content. +An influence operation may mix its own operation content with legitimate news or external unrelated content to disguise operational objectives, narratives, or existence. For example, an operation may generate "lifestyle" or "cuisine" content alongside regular operation content. TA11 T0129.003 Break Association with Content -Breaking association with content occurs when an influence operation actively separates itself from its own content. An influence operation may break association with content by unfollowing, unliking, or unsharing its content, removing attribution from its content, or otherwise taking actions that distance the operation from its messaging. An influence operation may break association with its content to complicate attribution or regain credibility for a new operation. +Breaking association with content occurs when an influence operation actively separates itself from its own content. An influence operation may break association with content by unfollowing, unliking, or unsharing its content, removing attribution from its content, or otherwise taking actions that distance the operation from its messaging. An influence operation may break association with its content to complicate attribution or regain credibility for a new operation. TA11 T0129.004 Delete URLs -URL deletion occurs when an influence operation completely removes its website registration, rendering the URL inaccessible. An influence operation may delete its URLs to complicate attribution or remove online documentation that the operation ever occurred. +URL deletion occurs when an influence operation completely removes its website registration, rendering the URL inaccessible. An influence operation may delete its URLs to complicate attribution or remove online documentation that the operation ever occurred. TA11 @@ -1415,25 +1396,25 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0129.007 Delete Accounts/Account Activity -Deleting accounts and account activity occurs when an influence operation removes its online social media assets, including social media accounts, posts, likes, comments, and other online artifacts. An influence operation may delete its accounts and account activity to complicate attribution or remove online documentation that the operation ever occurred. +Deleting accounts and account activity occurs when an influence operation removes its online social media assets, including social media accounts, posts, likes, comments, and other online artefacts. An influence operation may delete its accounts and account activity to complicate attribution or remove online documentation that the operation ever occurred. TA11 T0129.008 Redirect URLs -An influence operation may redirect its falsified or typosquatted URLs to legitimate websites to increase the operation's appearance of legitimacy, complicate attribution, and avoid detection. +An influence operation may redirect its falsified or typosquatted URLs to legitimate websites to increase the operation's appearance of legitimacy, complicate attribution, and avoid detection. TA11 T0129.009 Remove Post Origins -Removing post origins refers to the elimination of evidence that indicates the initial source of operation content, often to complicate attribution. An influence operation may remove post origins by deleting watermarks, renaming files, or removing embedded links in its content. +Removing post origins refers to the elimination of evidence that indicates the initial source of operation content, often to complicate attribution. An influence operation may remove post origins by deleting watermarks, renaming files, or removing embedded links in its content. TA11 T0129.010 Misattribute Activity -Misattributed activity refers to incorrectly attributed operation activity. For example, a state sponsored influence operation may conduct operation activity in a way that mimics another state so that external entities misattribute activity to the incorrect state. An operation may misattribute their activities to complicate attribution, avoid detection, or frame an adversary for negative behavior. +Misattributed activity refers to incorrectly attributed operation activity. For example, a state sponsored influence operation may conduct operation activity in a way that mimics another state so that external entities misattribute activity to the incorrect state. An operation may misattribute their activities to complicate attribution, avoid detection, or frame an adversary for negative behaviour. TA11 @@ -1445,26 +1426,25 @@ A 2019 report by Michael Golebiewski identifies five types of data voids. (1) T0130.001 Conceal Sponsorship -Concealing sponsorship aims to mislead or obscure the identity of the hidden sponsor behind an operation rather than entity publicly running the operation. Operations that conceal sponsorship may maintain visible falsified groups, news outlets, non-profits, or other organizations, but seek to mislead or obscure the identity sponsoring, funding, or otherwise supporting these entities. -Influence operations may use a variety of techniques to mask the location of their social media accounts to complicate attribution and conceal evidence of foreign interference. Operation accounts may set their location to a false place, often the location of the operation’s target audience, and post in the region’s language +Concealing sponsorship aims to mislead or obscure the identity of the hidden sponsor behind an operation rather than entity publicly running the operation. Operations that conceal sponsorship may maintain visible falsified groups, news outlets, non-profits, or other organisations, but seek to mislead or obscure the identity sponsoring, funding, or otherwise supporting these entities. Influence operations may use a variety of techniques to mask the location of their social media accounts to complicate attribution and conceal evidence of foreign interference. Operation accounts may set their location to a false place, often the location of the operation’s target audience, and post in the region’s language TA11 T0130.002 -Utilize Bulletproof Hosting -Hosting refers to services through which storage and computing resources are provided to an individual or organization for the accommodation and maintenance of one or more websites and related services. Services may include web hosting, file sharing, and email distribution. Bulletproof hosting refers to services provided by an entity, such as a domain hosting or web hosting firm, that allows its customer considerable leniency in use of the service. An influence operation may utilize bulletproof hosting to maintain continuity of service for suspicious, illegal, or disruptive operation activities that stricter hosting services would limit, report, or suspend. +Utilise Bulletproof Hosting +Hosting refers to services through which storage and computing resources are provided to an individual or organisation for the accommodation and maintenance of one or more websites and related services. Services may include web hosting, file sharing, and email distribution. Bulletproof hosting refers to services provided by an entity, such as a domain hosting or web hosting firm, that allows its customer considerable leniency in use of the service. An influence operation may utilise bulletproof hosting to maintain continuity of service for suspicious, illegal, or disruptive operation activities that stricter hosting services would limit, report, or suspend. TA11 T0130.003 -Use Shell Organizations -Use Shell Organizations to conceal sponsorship. +Use Shell Organisations +Use Shell Organisations to conceal sponsorship. TA11 T0130.004 Use Cryptocurrency -Use Cryptocurrency to conceal sponsorship. Examples include Bitcoin, Monero, and Etherium. +Use Cryptocurrency to conceal sponsorship. Examples include Bitcoin, Monero, and Etherium. TA11 @@ -1523,32 +1503,32 @@ Influence operations may use a variety of techniques to mask the location of the T0133.001 -Behavior Changes -Monitor and evaluate behaviour changes from misinformation incidents. +Behaviour Changes +Monitor and evaluate behaviour changes from misinformation incidents. TA12 T0133.002 Content -Measure current system state with respect to the effectiveness of campaign content. +Measure current system state with respect to the effectiveness of campaign content. TA12 T0133.003 Awareness -Measure current system state with respect to the effectiveness of influencing awareness. +Measure current system state with respect to the effectiveness of influencing awareness. TA12 T0133.004 Knowledge -Measure current system state with respect to the effectiveness of influencing knowledge. +Measure current system state with respect to the effectiveness of influencing knowledge. TA12 T0133.005 Action/Attitude -Measure current system state with respect to the effectiveness of influencing action/attitude. +Measure current system state with respect to the effectiveness of influencing action/attitude. TA12 @@ -1560,7 +1540,7 @@ Influence operations may use a variety of techniques to mask the location of the T0134.001 Message Reach -Monitor and evaluate message reach in misinformation incidents. +Monitor and evaluate message reach in misinformation incidents. TA12