Event JSON
{
"id": "288b3be44f18ce0d3c73df3362ab824167e99edb973ba539b8e6b659a830535f",
"pubkey": "cd1e48c145a46c437637a19149049827a77286aeeef568566ef0399870bde650",
"created_at": 1703087927,
"kind": 1,
"tags": [
[
"p",
"1a1add9955a05ca54d7963a0babcc1d11923d64ecaffb57a5bee1035288b1368",
"wss://relay.mostr.pub"
],
[
"p",
"b410b83e37b375bc8f3a6aca3dfd309d25da45909c2832cfd0a902540ac42864",
"wss://relay.mostr.pub"
],
[
"proxy",
"https://cybervillains.com/users/alex/statuses/111613570398628299",
"activitypub"
]
],
"content": "How does Stable Diffusion 1.5 know how to create CSAM? It turns out it was trained on thousands of illegal images contained in the extremely popular LAION-5B image set.\nI’m so incredibly proud of my friend and colleague nostr:npub1rgddmx245pw22ntevwst40xp6yvj84jwetlm27jmacgr22ytzd5qzxfyzy \n\nStory: \nhttps://www.404media.co/laion-datasets-removed-stanford-csam-child-abuse/ \n\nPaper: \nhttps://stacks.stanford.edu/file/druid:kh752sm9123/ml_training_data_csam_report-2023-12-20.pdf",
"sig": "025de83d088c9c55610099397d41deafcf7665336ca2e1e597798011d3503e9126882ecd474cdd0cc5ec61c599f63c85b2faabc3cf2383e22efa4ed0cceddabb"
}