Event JSON
{
"id": "dfed2ebf5033f497a8397cc101fe0e554ef438106c41f160b14ee1ba9dd12ac0",
"pubkey": "e7e59d140cd89529cbd6f8941bfa28fbd9147d84e0e575755c209784781464cc",
"created_at": 1724020446,
"kind": 1,
"tags": [
[
"proxy",
"https://arvr.social/@mpesce/112985403996789105",
"web"
],
[
"proxy",
"https://arvr.social/users/mpesce/statuses/112985403996789105",
"activitypub"
],
[
"L",
"pink.momostr"
],
[
"l",
"pink.momostr.activitypub:https://arvr.social/users/mpesce/statuses/112985403996789105",
"pink.momostr"
],
[
"-"
]
],
"content": "Former Microsoft security architect Michael Bargury showcased multiple exploits that bad actors can leverage to breach Copilot's security guardrails and misuse its capabilities to cause harm. \n\nhttp://windowscopilot.news/2024/08/19/a-former-security-architect-demonstrates-15-different-ways-to-break-copilot-microsoft-is-trying-but-if-we-are-honest-here-we-dont-know-how-to-build-secure-ai-applications/",
"sig": "408df029a7ac581cc0147555bb9f371f983899c8757d74b558f6ef3014cd15a065a6423760e864334ade364eb629440020283d1c49b35047a2e738277c3e1147"
}