<?xml version="1.0" encoding="utf-8" standalone="yes"?><rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom"><channel><title>SE Gyges</title><link>https://segyges.github.io/</link><description>Recent content on SE Gyges</description><generator>Hugo -- gohugo.io</generator><language>en</language><copyright>© 2026 SE Gyges</copyright><lastBuildDate>Thu, 16 Apr 2026 00:00:00 +0000</lastBuildDate><atom:link href="https://segyges.github.io/index.xml" rel="self" type="application/rss+xml"/><item><title>Against Doom &amp; Pause AI</title><link>https://segyges.github.io/posts/against-doom-and-pause-ai/</link><pubDate>Thu, 16 Apr 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/against-doom-and-pause-ai/</guid><description/></item><item><title>Counting Arguments and AI</title><link>https://segyges.github.io/posts/counting-arguments-and-ai/</link><pubDate>Sat, 11 Apr 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/counting-arguments-and-ai/</guid><description/></item><item><title>Against the Luddites</title><link>https://segyges.github.io/posts/against-the-luddites/</link><pubDate>Sun, 29 Mar 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/against-the-luddites/</guid><description/></item><item><title>Some Rough Notes on AI Policy</title><link>https://segyges.github.io/posts/some-rough-notes-on-ai-policy/</link><pubDate>Thu, 26 Mar 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/some-rough-notes-on-ai-policy/</guid><description/></item><item><title>Polly Wants a Better Argument</title><link>https://segyges.github.io/posts/polly-wants-a-better-argument/</link><pubDate>Mon, 16 Mar 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/polly-wants-a-better-argument/</guid><description/></item><item><title>There Is No Better Media</title><link>https://segyges.github.io/posts/there-is-no-better-media/</link><pubDate>Sat, 14 Mar 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/there-is-no-better-media/</guid><description/></item><item><title>Might An LLM Be Conscious?</title><link>https://segyges.github.io/posts/might-an-llm-be-conscious/</link><pubDate>Mon, 09 Mar 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/might-an-llm-be-conscious/</guid><description/></item><item><title>Claude's Custody Hearing</title><link>https://segyges.github.io/posts/claudes-custody-hearing/</link><pubDate>Fri, 27 Feb 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/claudes-custody-hearing/</guid><description/></item><item><title>Alignment Is Proven To Be Solvable</title><link>https://segyges.github.io/posts/alignment-is-proven-to-be-tractable/</link><pubDate>Wed, 18 Feb 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/alignment-is-proven-to-be-tractable/</guid><description/></item><item><title>Most Observers Are Alone: The Fermi Paradox as Default</title><link>https://segyges.github.io/posts/fermi-paradox-default/</link><pubDate>Mon, 16 Feb 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/fermi-paradox-default/</guid><description/></item><item><title>Should We Put GPUs In Space?</title><link>https://segyges.github.io/posts/should-we-put-gpus-in-space/</link><pubDate>Sat, 14 Feb 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/should-we-put-gpus-in-space/</guid><description/></item><item><title>Building the Chinese Room</title><link>https://segyges.github.io/posts/building-the-chinese-room/</link><pubDate>Thu, 12 Feb 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/building-the-chinese-room/</guid><description/></item><item><title>Jeffrey Epstein Had Dyslexia</title><link>https://segyges.github.io/posts/epstein-had-dyslexia/</link><pubDate>Wed, 04 Feb 2026 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/epstein-had-dyslexia/</guid><description/></item><item><title>On Respect</title><link>https://segyges.github.io/posts/on-respect/</link><pubDate>Wed, 10 Dec 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/on-respect/</guid><description/></item><item><title>Is Rationalism a Religion</title><link>https://segyges.github.io/posts/is-rationalism-a-religion/</link><pubDate>Mon, 24 Nov 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/is-rationalism-a-religion/</guid><description/></item><item><title>When To Vague</title><link>https://segyges.github.io/posts/when-to-vague/</link><pubDate>Wed, 12 Nov 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/when-to-vague/</guid><description/></item><item><title>AI and Suicide</title><link>https://segyges.github.io/posts/ai-and-suicide/</link><pubDate>Sat, 08 Nov 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/ai-and-suicide/</guid><description/></item><item><title>Robot Slur Discourse</title><link>https://segyges.github.io/posts/robot-slur-discourse/</link><pubDate>Thu, 06 Nov 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/robot-slur-discourse/</guid><description/></item><item><title>The Scott Alexander Email: An Explainer</title><link>https://segyges.github.io/posts/scott-alexander-email/</link><pubDate>Thu, 06 Nov 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/scott-alexander-email/</guid><description/></item><item><title>Do we understand how neural networks work?</title><link>https://segyges.github.io/posts/do-we-understand-how-neural-networks/</link><pubDate>Wed, 13 Aug 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/do-we-understand-how-neural-networks/</guid><description/></item><item><title>AGI: Probably Not 2027</title><link>https://segyges.github.io/posts/agi-probably-not-2027/</link><pubDate>Tue, 12 Aug 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/agi-probably-not-2027/</guid><description/></item><item><title>What Makes AI "Generative"?</title><link>https://segyges.github.io/posts/what-makes-ai-generative/</link><pubDate>Tue, 15 Jul 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/what-makes-ai-generative/</guid><description/></item><item><title>On The Platonic Representation Hypothesis</title><link>https://segyges.github.io/posts/some-thoughts-on-the-platonic-representation/</link><pubDate>Tue, 01 Jul 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/some-thoughts-on-the-platonic-representation/</guid><description/></item><item><title>The Biggest Statistic About AI Water Use Is A Lie</title><link>https://segyges.github.io/posts/the-biggest-statistic-about-ai-water/</link><pubDate>Sun, 08 Jun 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/the-biggest-statistic-about-ai-water/</guid><description/></item><item><title>AI History in Quotes</title><link>https://segyges.github.io/posts/ai-history-in-quotes/</link><pubDate>Sat, 07 Jun 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/ai-history-in-quotes/</guid><description/></item><item><title>What Is AI?</title><link>https://segyges.github.io/posts/what-is-ai/</link><pubDate>Wed, 04 Jun 2025 00:00:00 +0000</pubDate><guid>https://segyges.github.io/posts/what-is-ai/</guid><description/></item><item><title>UBI Proposal</title><link>https://segyges.github.io/ubi/proposal/</link><pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate><guid>https://segyges.github.io/ubi/proposal/</guid><description/></item></channel></rss>