A place to cache linked articles (think custom and personal wayback machine)
選択できるのは25トピックまでです。 トピックは、先頭が英数字で、英数字とダッシュ('-')を使用した35文字以内のものにしてください。

index.html 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173
  1. <!doctype html><!-- This is a valid HTML5 document. -->
  2. <!-- Screen readers, SEO, extensions and so on. -->
  3. <html lang="fr">
  4. <!-- Has to be within the first 1024 bytes, hence before the <title>
  5. See: https://www.w3.org/TR/2012/CR-html5-20121217/document-metadata.html#charset -->
  6. <meta charset="utf-8">
  7. <!-- Why no `X-UA-Compatible` meta: https://stackoverflow.com/a/6771584 -->
  8. <!-- The viewport meta is quite crowded and we are responsible for that.
  9. See: https://codepen.io/tigt/post/meta-viewport-for-2015 -->
  10. <meta name="viewport" content="width=device-width,initial-scale=1">
  11. <!-- Required to make a valid HTML5 document. -->
  12. <title>Free Speech Is Not the Same As Free Reach (archive) — David Larlet</title>
  13. <meta name="description" content="Publication mise en cache pour en conserver une trace.">
  14. <!-- That good ol' feed, subscribe :). -->
  15. <link rel="alternate" type="application/atom+xml" title="Feed" href="/david/log/">
  16. <!-- Generated from https://realfavicongenerator.net/ such a mess. -->
  17. <link rel="apple-touch-icon" sizes="180x180" href="/static/david/icons2/apple-touch-icon.png">
  18. <link rel="icon" type="image/png" sizes="32x32" href="/static/david/icons2/favicon-32x32.png">
  19. <link rel="icon" type="image/png" sizes="16x16" href="/static/david/icons2/favicon-16x16.png">
  20. <link rel="manifest" href="/static/david/icons2/site.webmanifest">
  21. <link rel="mask-icon" href="/static/david/icons2/safari-pinned-tab.svg" color="#07486c">
  22. <link rel="shortcut icon" href="/static/david/icons2/favicon.ico">
  23. <meta name="msapplication-TileColor" content="#f0f0ea">
  24. <meta name="msapplication-config" content="/static/david/icons2/browserconfig.xml">
  25. <meta name="theme-color" content="#f0f0ea">
  26. <!-- Documented, feel free to shoot an email. -->
  27. <link rel="stylesheet" href="/static/david/css/style_2020-06-19.css">
  28. <!-- See https://www.zachleat.com/web/comprehensive-webfonts/ for the trade-off. -->
  29. <link rel="preload" href="/static/david/css/fonts/triplicate_t4_poly_regular.woff2" as="font" type="font/woff2" media="(prefers-color-scheme: light), (prefers-color-scheme: no-preference)" crossorigin>
  30. <link rel="preload" href="/static/david/css/fonts/triplicate_t4_poly_bold.woff2" as="font" type="font/woff2" media="(prefers-color-scheme: light), (prefers-color-scheme: no-preference)" crossorigin>
  31. <link rel="preload" href="/static/david/css/fonts/triplicate_t4_poly_italic.woff2" as="font" type="font/woff2" media="(prefers-color-scheme: light), (prefers-color-scheme: no-preference)" crossorigin>
  32. <link rel="preload" href="/static/david/css/fonts/triplicate_t3_regular.woff2" as="font" type="font/woff2" media="(prefers-color-scheme: dark)" crossorigin>
  33. <link rel="preload" href="/static/david/css/fonts/triplicate_t3_bold.woff2" as="font" type="font/woff2" media="(prefers-color-scheme: dark)" crossorigin>
  34. <link rel="preload" href="/static/david/css/fonts/triplicate_t3_italic.woff2" as="font" type="font/woff2" media="(prefers-color-scheme: dark)" crossorigin>
  35. <script type="text/javascript">
  36. function toggleTheme(themeName) {
  37. document.documentElement.classList.toggle(
  38. 'forced-dark',
  39. themeName === 'dark'
  40. )
  41. document.documentElement.classList.toggle(
  42. 'forced-light',
  43. themeName === 'light'
  44. )
  45. }
  46. const selectedTheme = localStorage.getItem('theme')
  47. if (selectedTheme !== 'undefined') {
  48. toggleTheme(selectedTheme)
  49. }
  50. </script>
  51. <meta name="robots" content="noindex, nofollow">
  52. <meta content="origin-when-cross-origin" name="referrer">
  53. <!-- Canonical URL for SEO purposes -->
  54. <link rel="canonical" href="https://www.wired.com/story/free-speech-is-not-the-same-as-free-reach/">
  55. <body class="remarkdown h1-underline h2-underline h3-underline hr-center ul-star pre-tick">
  56. <article>
  57. <header>
  58. <h1>Free Speech Is Not the Same As Free Reach</h1>
  59. </header>
  60. <nav>
  61. <p class="center">
  62. <a href="/david/" title="Aller à l’accueil">🏠</a> •
  63. <a href="https://www.wired.com/story/free-speech-is-not-the-same-as-free-reach/" title="Lien vers le contenu original">Source originale</a>
  64. </p>
  65. </nav>
  66. <hr>
  67. <main>
  68. <p><span class="lead-in-text-callout">The algorithms that</span> govern how we find information online are once again in the news—but you have to squint to find them. </p>
  69. <p>“Trump Accuses Google of Burying Conservative News in Search Results,” <a class="external-link" data-event-click="{&quot;element&quot;:&quot;ExternalLink&quot;,&quot;outgoingURL&quot;:&quot;https://www.nytimes.com/2018/08/28/business/media/google-trump-news-results.html?action=click&amp;module=Top%20Stories&amp;pgtype=Homepage&quot;}" href="https://www.nytimes.com/2018/08/28/business/media/google-trump-news-results.html?action=click&amp;module=Top%20Stories&amp;pgtype=Homepage" rel="nofollow noopener" target="_blank">reads</a> an August 28 <em>New York Times</em> headline. The piece features a bombastic president, a <a class="external-link" data-event-click="{&quot;element&quot;:&quot;ExternalLink&quot;,&quot;outgoingURL&quot;:&quot;https://twitter.com/realDonaldTrump/status/1034456273306243076&quot;}" href="https://twitter.com/realDonaldTrump/status/1034456273306243076" rel="nofollow noopener" target="_blank">string</a> of bitter tweets, and accusations of censorship. “Algorithms” are mentioned, but not until the twelfth paragraph.</p>
  70. <p>Trump—like so many other politicians and pundits—has found search and social media companies to be convenient targets in the debate over free speech and censorship online. “They have it RIGGED, for me &amp; others, so that almost all stories &amp; news is BAD,” the president <a class="external-link" data-event-click="{&quot;element&quot;:&quot;ExternalLink&quot;,&quot;outgoingURL&quot;:&quot;https://twitter.com/realDonaldTrump/status/1034456273306243076&quot;}" href="https://twitter.com/realDonaldTrump/status/1034456273306243076" rel="nofollow noopener" target="_blank">recently tweeted</a>. He <a class="external-link" data-event-click="{&quot;element&quot;:&quot;ExternalLink&quot;,&quot;outgoingURL&quot;:&quot;https://twitter.com/realDonaldTrump/status/1034456281120206848&quot;}" href="https://twitter.com/realDonaldTrump/status/1034456281120206848" rel="nofollow noopener" target="_blank">added</a>: “They are controlling what we can &amp; cannot see. This is a very serious situation---will be addressed!”</p>
  71. <p>Trump is partly right: They are controlling what we can and cannot see. But “they” aren’t the executives leading Google, Facebook, and other technology companies. “They” are the opaque, influential algorithms that determine what content billions of internet users read, watch, and share next.</p>
  72. <p>These algorithms are invisible, but they have an outsized impact on shaping individuals’ experience online and society at large. Indeed, YouTube’s video-recommendation algorithm inspires <a class="external-link" data-event-click="{&quot;element&quot;:&quot;ExternalLink&quot;,&quot;outgoingURL&quot;:&quot;https://www.youtube.com/yt/about/press/&quot;}" href="https://www.youtube.com/yt/about/press/" rel="nofollow noopener" target="_blank">700,000,000 hours of watch time per day</a>—and can spread misinformation, disrupt elections, and incite violence. Algorithms like this need fixing.</p>
  73. <p data-attr-viewport-monitor="inline-recirc" class="inline-recirc-wrapper inline-recirc-observer-target-1 viewport-monitor-anchor"/><p>But in this moment, the conversation we should be having—how can we fix the algorithms?—is instead being co-opted and twisted by politicians and pundits howling about censorship and miscasting content moderation as the demise of free speech online. It would be good to remind them that free <em>speech</em> does not mean free <em>reach</em>. There is no right to algorithmic amplification. In fact, that’s the very problem that needs fixing.</p><p><span class="lead-in-text-callout">To see how</span> this algorithm amplification works, simply look to RT, or <em>Russia Today</em>, a Russian state-owned propaganda outlet that’s also among the most popular YouTube presences. RT has amassed more than 6 billion views across 22 channels, more than MSNBC and Fox News combined. According to YouTube chief product officer Neal Mohan, <a class="external-link" data-event-click="{&quot;element&quot;:&quot;ExternalLink&quot;,&quot;outgoingURL&quot;:&quot;https://qz.com/1178125/youtubes-recommendations-drive-70-of-what-we-watch/&quot;}" href="https://qz.com/1178125/youtubes-recommendations-drive-70-of-what-we-watch/" rel="nofollow noopener" target="_blank">70 percent of views on YouTube are from recommendations</a>—so the site’s algorithms are largely responsible for amplifying RT’s propaganda hundreds of millions of times.</p><p>How? Most RT viewers don’t set out in search of Russian propaganda. The videos that rack up the views are <a class="external-link" data-event-click="{&quot;element&quot;:&quot;ExternalLink&quot;,&quot;outgoingURL&quot;:&quot;https://www.google.com/url?q=https://www.washingtonpost.com/news/monkey-cage/wp/2015/03/23/how-russia-today-is-using-youtube/?noredirect=on&amp;utm_term=.aec69b556318&amp;sa=D&amp;ust=1535660886144000&amp;usg=AFQjCNGUmnP8CZVChUCHXhHgbN0YKQsL6A&quot;}" href="https://www.google.com/url?q=https://www.washingtonpost.com/news/monkey-cage/wp/2015/03/23/how-russia-today-is-using-youtube/?noredirect=on&amp;utm_term=.aec69b556318&amp;sa=D&amp;ust=1535660886144000&amp;usg=AFQjCNGUmnP8CZVChUCHXhHgbN0YKQsL6A" rel="nofollow noopener" target="_blank">RT’s clickbait-y, gateway content</a>: videos of towering tsunamis, meteors striking buildings, shark attacks, amusement park accidents, some that are years old but have comments from within an hour ago. This disaster porn is highly engaging; the videos have been viewed tens of millions of times and are likely watched until the end. As a result, YouTube’s algorithm likely believes other RT content is worth suggesting to the viewers of that content—and so, quickly, an American YouTube user looking for news finds themselves watching Russia’s take on Hillary Clinton, immigration, and current events. These videos are served up in autoplay playlists alongside content from legitimate news organizations, giving RT itself increased legitimacy by association.</p>
  74. <p>The social internet is mediated by algorithms: recommendation engines, search, trending, autocomplete, and other mechanisms that predict what we want to see next. The algorithms don’t understand what is propaganda and what isn’t, or what is “fake news” and what is fact-checked. Their job is to surface relevant content (relevant to the user, of course), and they do it exceedingly well. So well, in fact, that the engineers who built these algorithms are sometimes baffled: “Even the creators don’t always understand why it recommends one video instead of another,” says Guillaume Chaslot, an ex-YouTube engineer who worked on the site’s algorithm.</p><p>These opaque algorithms with their singular purpose—“keep watching”—coupled with billions of users is a dangerous recipe. In recent years, we’ve seen how dire the consequences can be. Propaganda like RT content is circulated far and wide to disinform and worsen polarization, especially during democratic elections. YouTube’s algorithms can also radicalize by suggesting “white supremacist rants, Holocaust denials, and other disturbing content,” Zeynep Tufekci <a class="external-link" data-event-click="{&quot;element&quot;:&quot;ExternalLink&quot;,&quot;outgoingURL&quot;:&quot;https://www.nytimes.com/2018/03/10/opinion/sunday/youtube-politics-radical.html&quot;}" href="https://www.nytimes.com/2018/03/10/opinion/sunday/youtube-politics-radical.html" rel="nofollow noopener" target="_blank">recently wrote</a> in the <em>Times.</em> “YouTube may be one of the most powerful radicalizing instruments of the 21st century.”</p>
  75. <p>The problem extends beyond YouTube, though. On Google search, dangerous anti-vaccine misinformation can <a href="https://www.wired.com/story/the-complexity-of-simply-searching-for-medical-advice/">commandeer</a> the top results. And on Facebook, hate speech can thrive and <a class="external-link" data-event-click="{&quot;element&quot;:&quot;ExternalLink&quot;,&quot;outgoingURL&quot;:&quot;https://www.theverge.com/2018/8/28/17789202/facebook-myanmar-ban-genocide-military-leadership&quot;}" href="https://www.theverge.com/2018/8/28/17789202/facebook-myanmar-ban-genocide-military-leadership" rel="nofollow noopener" target="_blank">fuel genocide</a>. A United Nations report about the genocide in Myanmar <a class="external-link" data-event-click="{&quot;element&quot;:&quot;ExternalLink&quot;,&quot;outgoingURL&quot;:&quot;https://www.ohchr.org/EN/HRBodies/HRC/MyanmarFFM/Pages/ReportoftheMyanmarFFM.aspx&quot;}" href="https://www.ohchr.org/EN/HRBodies/HRC/MyanmarFFM/Pages/ReportoftheMyanmarFFM.aspx" rel="nofollow noopener" target="_blank">reads</a>: “The role of social media is significant. Facebook has been a useful instrument for those seeking to spread hate, in a context where for most users Facebook is the Internet … The extent to which Facebook posts and messages have led to real-world discrimination and violence must be independently and thoroughly examined.”</p><p>So what can we do about it? The solution isn’t to outlaw algorithmic ranking or make noise about legislating what results Google can return. Algorithms are an invaluable tool for making sense of the immense universe of information online. There’s an overwhelming amount of content available to fill any given person’s feed or search query; sorting and ranking is a necessity, and there has never been evidence indicating that the results display systemic partisan bias. That said, unconscious bias is a concern in any algorithm; this is why tech companies have investigated conservative claims of bias since the <a class="external-link" data-event-click="{&quot;element&quot;:&quot;ExternalLink&quot;,&quot;outgoingURL&quot;:&quot;https://www.theguardian.com/technology/2016/may/24/facebook-changes-trending-topics-anti-conservative-bias&quot;}" href="https://www.theguardian.com/technology/2016/may/24/facebook-changes-trending-topics-anti-conservative-bias" rel="nofollow noopener" target="_blank">Facebook Trending News debacle</a> of 2016. There hasn’t been any credible evidence. But there is a trust problem, and a lack of understanding of how rankings and feeds work, and that allows bad-faith politicking to gain traction. The best solution to that is to increase transparency and internet literacy, enabling users to have a better understanding of why they see what they see—and to build these powerful curatorial systems <a href="https://www.wired.com/story/creating-ethical-recommendation-engines/">with a sense of responsibility</a> for what they return.</p><p>There have been positive steps in this direction. The examples of harms mentioned above have sparked congressional investigations aimed at understanding how tech platforms shape our conversations and our media consumption. In an upcoming Senate hearing next week, the Senate Intelligence Committee will ask Jack Dorsey of Twitter and Sheryl Sandberg of Facebook to provide an accounting of how, specifically, they are taking steps to address computational propaganda.</p>
  76. <p>It’s imperative that we focus on solutions, not politics. We need to build on those initial investigations. We need more nuanced conversations and education about algorithmic curation, its strange incentives, and its occasionally unfortunate outcomes. We need to hold tech companies accountable—for irresponsible tech, not evidence-free allegations of censorship—and demand transparency into how their algorithms and moderation policies work. By focusing on the real problem here, we can begin addressing the real issues that are disrupting the internet—and democracy.</p>
  77. </main>
  78. </article>
  79. <hr>
  80. <footer>
  81. <p>
  82. <a href="/david/" title="Aller à l’accueil">🏠</a> •
  83. <a href="/david/log/" title="Accès au flux RSS">🤖</a> •
  84. <a href="http://larlet.com" title="Go to my English profile" data-instant>🇨🇦</a> •
  85. <a href="mailto:david%40larlet.fr" title="Envoyer un courriel">📮</a> •
  86. <abbr title="Hébergeur : Alwaysdata, 62 rue Tiquetonne 75002 Paris, +33184162340">🧚</abbr>
  87. </p>
  88. <template id="theme-selector">
  89. <form>
  90. <fieldset>
  91. <legend>Thème</legend>
  92. <label>
  93. <input type="radio" value="auto" name="chosen-color-scheme" checked> Auto
  94. </label>
  95. <label>
  96. <input type="radio" value="dark" name="chosen-color-scheme"> Foncé
  97. </label>
  98. <label>
  99. <input type="radio" value="light" name="chosen-color-scheme"> Clair
  100. </label>
  101. </fieldset>
  102. </form>
  103. </template>
  104. </footer>
  105. <script type="text/javascript">
  106. function loadThemeForm(templateName) {
  107. const themeSelectorTemplate = document.querySelector(templateName)
  108. const form = themeSelectorTemplate.content.firstElementChild
  109. themeSelectorTemplate.replaceWith(form)
  110. form.addEventListener('change', (e) => {
  111. const chosenColorScheme = e.target.value
  112. localStorage.setItem('theme', chosenColorScheme)
  113. toggleTheme(chosenColorScheme)
  114. })
  115. const selectedTheme = localStorage.getItem('theme')
  116. if (selectedTheme && selectedTheme !== 'undefined') {
  117. form.querySelector(`[value="${selectedTheme}"]`).checked = true
  118. }
  119. }
  120. const prefersColorSchemeDark = '(prefers-color-scheme: dark)'
  121. window.addEventListener('load', () => {
  122. let hasDarkRules = false
  123. for (const styleSheet of Array.from(document.styleSheets)) {
  124. let mediaRules = []
  125. for (const cssRule of styleSheet.cssRules) {
  126. if (cssRule.type !== CSSRule.MEDIA_RULE) {
  127. continue
  128. }
  129. // WARNING: Safari does not have/supports `conditionText`.
  130. if (cssRule.conditionText) {
  131. if (cssRule.conditionText !== prefersColorSchemeDark) {
  132. continue
  133. }
  134. } else {
  135. if (cssRule.cssText.startsWith(prefersColorSchemeDark)) {
  136. continue
  137. }
  138. }
  139. mediaRules = mediaRules.concat(Array.from(cssRule.cssRules))
  140. }
  141. // WARNING: do not try to insert a Rule to a styleSheet you are
  142. // currently iterating on, otherwise the browser will be stuck
  143. // in a infinite loop…
  144. for (const mediaRule of mediaRules) {
  145. styleSheet.insertRule(mediaRule.cssText)
  146. hasDarkRules = true
  147. }
  148. }
  149. if (hasDarkRules) {
  150. loadThemeForm('#theme-selector')
  151. }
  152. })
  153. </script>
  154. </body>
  155. </html>