Event JSON
{
"id": "0b4b0ac4751eca3015ed35240b90921018ecfa5aabe5f37d88751d9a60a4d857",
"pubkey": "00a3e3e90093e46e7a730659a3a92b13ab498d1e47b0eefbb284b92ba6194b43",
"created_at": 1735914210,
"kind": 1,
"tags": [
[
"p",
"f113ec1846156ce4aaa2da7ec2000b193f093b74017aff579446474f0f6213fa",
"wss://relay.mostr.pub"
],
[
"p",
"3ba412ac4b14c4b37cd6ed16b9d262ad4ffefb05c5b6c6b3e15e381471b1221a",
"wss://relay.mostr.pub"
],
[
"e",
"24721044396e3d3d380ab5be8b6998a91c40c3711795601426c127b230fa0024",
"wss://relay.mostr.pub",
"reply"
],
[
"proxy",
"https://infosec.exchange/users/AAKL/statuses/113764873714602553",
"activitypub"
]
],
"content": "nostr:nprofile1qy2hwumn8ghj7un9d3shjtnddaehgu3wwp6kyqpq7yf7cxzxz4kwf24zmflvyqqtrylsjwm5q9a074u5ger57rmzz0aq0ufle4 That occurred to me when I posted the story yesterday. Even if it was actually \"accidental,\" it would still be Apple's liability. Siri is turned on by default across all of the iPhone apps to learn from users - something I'm sure a lot of people aren't aware of. So the idea of secretly recording private chats seems entirely plausible as a result, especially for companies desperate for conversational fodder for their AI models.",
"sig": "445162db0a0df2eb17a4652fc4dd67dc052913ca173685b963aa273209873a55ed98c7707d415f962be3055e84b85b8a136b8524fc1de49ba5d47c10b7aa4ce3"
}