txt.lurk.org

misc texts and essays from active lurkers
git clone https://git.lurk.org/repos/txt.lurk.org.git
Log | Files | Refs

commit 5087e6e4874683cbf45fa137a8aee10b6a0fde7b
parent fc3db2cd6befbadce29c41d5c093bcde3004c111
Author: rra <rscmbbng@riseup.net>
Date:   Tue Mar 26 12:17:32 +0100

on not scaling lurk text
Diffstat:
on-not-scaling-lurk/3DCjoMkpReeZYDijQbP_4w.jpg | 0
on-not-scaling-lurk/54HB2Yb7RCuRFFqy1YzFoQ.jpg | 0
on-not-scaling-lurk/FToAO_ZXEAkqlCg.jpeg | 0
on-not-scaling-lurk/RDvNaHHbT7ObN7iCZb69VQ.jpg | 0
on-not-scaling-lurk/README.txt | 1+
on-not-scaling-lurk/RFbXd8pXRCKW8qkxrIOeXg.png | 0
on-not-scaling-lurk/SE3W9YkGTreFRaMVgkF2vg.jpg | 0
on-not-scaling-lurk/eU8gcyIPReOL-nGS2uTJlQ.jpg | 0
on-not-scaling-lurk/i2l56pBPRxKNGJ6FJabDkw.jpg | 0
on-not-scaling-lurk/iFfov8BmSq21IIPSNJtDFg.jpg | 0
on-not-scaling-lurk/index.html | 556+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
on-not-scaling-lurk/mC-4HGEvTjCMi-lvo4u07g.jpg | 0
on-not-scaling-lurk/olUPjfz5SHi-HXdN0x7kNw.jpg | 0
on-not-scaling-lurk/on-not-scaling-lurk.md | 87+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
on-not-scaling-lurk/sharecropping.png | 0
on-not-scaling-lurk/tSglnd7XT8qmj4DqU4pKEA.jpg | 0
on-not-scaling-lurk/taSj1BprSmeaHqBqoKOS6Q.jpg | 0
on-not-scaling-lurk/template.html | 94+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
on-not-scaling-lurk/thecycle-small.png | 0
19 files changed, 738 insertions(+), 0 deletions(-)
diff --git a/on-not-scaling-lurk/3DCjoMkpReeZYDijQbP_4w.jpg b/on-not-scaling-lurk/3DCjoMkpReeZYDijQbP_4w.jpg
Binary files differ.
diff --git a/on-not-scaling-lurk/54HB2Yb7RCuRFFqy1YzFoQ.jpg b/on-not-scaling-lurk/54HB2Yb7RCuRFFqy1YzFoQ.jpg
Binary files differ.
diff --git a/on-not-scaling-lurk/FToAO_ZXEAkqlCg.jpeg b/on-not-scaling-lurk/FToAO_ZXEAkqlCg.jpeg
Binary files differ.
diff --git a/on-not-scaling-lurk/RDvNaHHbT7ObN7iCZb69VQ.jpg b/on-not-scaling-lurk/RDvNaHHbT7ObN7iCZb69VQ.jpg
Binary files differ.
diff --git a/on-not-scaling-lurk/README.txt b/on-not-scaling-lurk/README.txt
@@ -0,0 +1 @@
+pandoc on-not-scaling-lurk.md -o index.html --template template.html
diff --git a/on-not-scaling-lurk/RFbXd8pXRCKW8qkxrIOeXg.png b/on-not-scaling-lurk/RFbXd8pXRCKW8qkxrIOeXg.png
Binary files differ.
diff --git a/on-not-scaling-lurk/SE3W9YkGTreFRaMVgkF2vg.jpg b/on-not-scaling-lurk/SE3W9YkGTreFRaMVgkF2vg.jpg
Binary files differ.
diff --git a/on-not-scaling-lurk/eU8gcyIPReOL-nGS2uTJlQ.jpg b/on-not-scaling-lurk/eU8gcyIPReOL-nGS2uTJlQ.jpg
Binary files differ.
diff --git a/on-not-scaling-lurk/i2l56pBPRxKNGJ6FJabDkw.jpg b/on-not-scaling-lurk/i2l56pBPRxKNGJ6FJabDkw.jpg
Binary files differ.
diff --git a/on-not-scaling-lurk/iFfov8BmSq21IIPSNJtDFg.jpg b/on-not-scaling-lurk/iFfov8BmSq21IIPSNJtDFg.jpg
Binary files differ.
diff --git a/on-not-scaling-lurk/index.html b/on-not-scaling-lurk/index.html
@@ -0,0 +1,556 @@
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <meta charset="UTF-8">
+    <meta name="viewport" content="width=device-width, initial-scale=1">
+    <meta property="og:title" content="On not scaling LURK: a tale of
+maintenance, federation, and governance" />
+    <meta property="og:description" content="Ruminations on scale, technology and sustainability and their consequences for how lurk.org is run" />
+    <meta property="og:image" itemprop="image" content="https://txt.lurk.org/on-not-scaling-lurk/taSj1BprSmeaHqBqoKOS6Q.jpg" />
+    <meta property="og:url" content="https://txt.lurk.org/on-not-scaling-lurk/" />
+    <meta property="og:type" content="website" />
+    <meta name="twitter:card" content="summary_large_image" />
+    <meta name="twitter:title" content="On not scaling LURK: a tale of
+maintenance, federation, and governance" />
+    <meta name="twitter:description" content="Ruminations on scale, technology and sustainability and their consequences for how lurk.org is run" />
+    <meta name="twitter:image" content="https://txt.lurk.org/on-not-scaling-lurk/taSj1BprSmeaHqBqoKOS6Q.jpg" />
+    <meta name="twitter:image:alt" content="Capybara with a hat says 'too rad to be sad'" />
+    <title>On not scaling LURK: a tale of maintenance, federation, and
+governance</title>
+    <style type="text/css">
+      @font-face {
+        font-family: 'Route159-SemiBold';
+        src: url('../Route159-SemiBold.woff') format('woff');
+        }
+      body{
+        margin:40px auto;
+        max-width:650px;
+        line-height:1.6;
+        font-size:20px;
+        color:#444;
+        padding:0 10px;
+        font-family: 'Route159-SemiBold';
+
+      }
+      a:hover, a:visited {
+      color: #7800FF;
+      word-wrap:anywhere;
+      }
+      h1,h2,h3{
+        line-height:1.2;
+      }
+      figcaption {
+        text-align: center;
+      }
+      #ecf {
+        padding-top: 6em;
+        display: block;
+        margin-left: auto;
+        width: 25%;
+      }
+
+      img {
+        width: 100%;
+        margin: auto;
+      }
+
+    @media only screen
+       and (max-device-width: 400px)
+       and (orientation: portrait) {
+      body {
+        margin: 1em 2em;
+        font-size: 14px;
+        line-height: 1.4;
+        font-family: sans;
+
+      }
+      figure {
+        width: 100%;
+        margin: auto;
+      }
+    }
+
+    @media only screen
+       and (max-device-width: 650px)
+       and (orientation: portrait) {
+      body {
+        margin: 1em 2em;
+        font-size: 16px;
+        line-height: 1.4;
+
+      }
+      blockquote {
+        margin: 0 auto;
+        border-left: 2px solid #444;
+        padding-left: 1em;
+      }
+      figure {
+        width: 100%;
+        margin: auto;
+      }
+    }
+    </style>
+  </head>
+  <body>
+    <h1
+    id="on-not-scaling-lurk-a-tale-of-maintenance-federation-and-governance">On
+    not scaling LURK: a tale of maintenance, federation, and
+    governance</h1>
+    <p>This text is important. It’s also quite long. There is no tl;dr.
+    But there are some good memes.</p>
+    <p>It is both long and a long time in the making as it reflects some
+    of our internal discussions that started in November 2022, when Elon
+    Musk completed his purchase of Twitter, and suddenly many people
+    found their way to the Fediverse. This moment changed things for the
+    Fediverse and also changed things for LURK. In the interest of
+    accountability and transparency, we want to share our reflections on
+    the matter with you all.</p>
+    <p>Let’s start with the numbers. In November 2022, we welcomed 50
+    new people to the instance. Moreover, many who had created an
+    account in prior years also came back to hang out more, now that the
+    incentive to flee away from Twitter became more pressing. To put
+    these numbers in perspective, over the years, post.lurk.org had
+    around 150 people active at any one time. That week the number
+    jumped to 350 and has hovered around 300 since then. Hi everyone. Of
+    course, LURK runs more than a Mastodon instance, and if we take this
+    into account, the total number of LURKers is more in the thousand.
+    However, the LURK instance is the place where the most interaction
+    is taking place, and has become an entry point to the other things
+    we host in many situations.</p>
+    <p><img src="olUPjfz5SHi-HXdN0x7kNw.jpg" /></p>
+    <p>We’re happy that many of you are trusting us, and consider this
+    instance to be their new home base, however, and like many other
+    Mastodon instances who suddenly grew, the increase in active users
+    both on our instance and across the fediverse meant that the
+    workload of our server increased significantly. Simultaneously, this
+    change in active accounts, both old and new, changed the vibe of our
+    local instance. Several instance admins have written blog posts on
+    how they scaled their technical infrastructure in response<a
+    href="#fn1" class="footnote-ref" id="fnref1"
+    role="doc-noteref"><sup>1</sup></a> and this is our post explaining
+    why we didn’t. To be sure, for post.lurk.org, we also put in many
+    hours of technical work to better accommodate new folks and the
+    growth of the wider fediverse. During this period, some probably
+    remember how we were testing all sorts of things and how this was
+    impacting the instance for better or worse. We even had to ask one
+    of our hosts, Eclips.is, to kindly expand a bit the capacity of the
+    server where we run Mastodon. This was not to accommodate for even
+    more users like many other admins were doing at the time, not at
+    all, in our case it was to be able to <em>just</em> keep the
+    instance afloat with its new 350 active users! Finally, things got
+    stable, and somehow post.lurk.org is considered a nice instance from
+    outside. This translates in receiving many requests to join the
+    instance, even though it’s explicitly closed and we are actively
+    declining these. Why? The temptation to scale things up further
+    could seem obvious. More LURKers the better? Well, not quite. For
+    us, not scaling up was always the obvious choice for the
+    sustainability of the project.</p>
+    <p><img src="RFbXd8pXRCKW8qkxrIOeXg.png" /></p>
+    <p>Specifically, there are three interrelated ways in which the
+    issue of sustainability comes to play for post.lurk.org. First,
+    long-term sustainability of the project itself. Second, financial
+    sustainability of the project. Finally, ecological sustainability of
+    the project. All three concerns are interrelated and have been
+    actively guiding us until now and will hopefully keep on guiding us
+    going forward. These in turn touch on how to provide access to the
+    instance in the future, how we will maintain the server, and what we
+    do with the threat of Threads.</p>
+    <p>In terms of long-term sustainability, the growth of the space is
+    a consideration, and in particular the change in social dynamics
+    that occur during moments when many new folks join a new
+    environment, such as when many people (re)join post.lurk.org when
+    something happens on Twitter (or whatever it’s called these days).
+    That change is rooted in the tension between providing friends (and
+    friends of friends) a space to network in a rich and focused
+    environment, <em>and</em> maintaining that environment. On the one
+    hand, we want to give many the possibility to join post.lurk.org and
+    the wider Fediverse, on the other hand, there is only so much that
+    we can do as a small collective to make a wider transition happen.
+    Culturally speaking, we also want to sustain the vibe of the space
+    we have been creating. Throughout the years, our slow growth through
+    invites and the word of mouth of friends-of-friends has helped with
+    maintaining that focus and a pleasant environment. But in times of
+    crisis, like in November 2022, many people needed a new home and, of
+    course, this has an impact on the experience of the instance. So
+    what to do? Well, the bottom line is that there is only so much we
+    can, and, honestly, want to do.</p>
+    <p><img src="iFfov8BmSq21IIPSNJtDFg.jpg" /></p>
+    <p>Since the start, we tried to focus on quality over quantity on
+    post.lurk.org. This has meant that we try to maintain a healthy
+    diversity—across genders, creative practices, cultural backgrounds
+    —rather than aiming at opening the door to a large number of people
+    vaguely connected or interested in digital media and cultural
+    practices. This is to a large extent because we do this for the sake
+    of it and in our spare time, so we want this to remain an
+    interesting place and hub for communities of practice that inspire
+    us, rather than a chore. At times, and since last November 2022,
+    particularly that has meant that we need to engage the brake on new
+    sign-ups to be able to make sure that this sentiment keeps on being
+    shared by everyone. At the same time, this means we have had to
+    exclude some folks, who, as a consequence, felt left out. We’re not
+    saying that the way we’re doing things is perfect, and it’s
+    difficult to communicate about this choice, hence this long text. It
+    can be a bit discouraging when from the outside, and from peers we
+    had to decline, we hear that this reticence of letting more, and
+    more-of-the-same, people is perceived as an attempt to create some
+    exclusive cool kids club (true story).</p>
+    <p>Nevertheless, we feel that this strategy has paid off. For us,
+    one of the great things about having been involved in post.lurk.org
+    is the quality of the space and how generative it has been for our
+    practices versus how little time we, as an admin team, have to put
+    into it to keeping it running. That is something we want to maintain
+    and something that is at risk when growing the instance more and
+    more. Still, we want a mechanism where people can join
+    post.lurk.org. After all, even if we want to not grow, there is also
+    the fact that some people join and eventually leave, some just join
+    and never use their account. Some simply disappear into the ether
+    after a while. That’s cool. But this means that we could potentially
+    welcome new people occasionally, without compromising on our way of
+    running the instance.</p>
+    <p><img src="3DCjoMkpReeZYDijQbP_4w.jpg" /></p>
+    <p>Until now, we did this is in a relatively unstructured way,
+    opening applications every now and then, and receiving suddenly a
+    huge wave of messages from people explaining to us why our instance
+    is meaningful for them. Filtering these applications is one of the
+    most unrewarding and stressful things about this approach, all the
+    while having to make important but also, at times, arbitrary
+    selection. Part of the issue is because of the crappy interface for
+    selection—there is no possibility to respond to an application
+    outside accept or reject, for instance—but a larger part is based on
+    the arbitrariness of it. The secret LURK truth is more often than
+    not, people we found exciting based on their application turned out
+    to not be super engaged (if at all), and likewise, people we had no
+    idea about have become some of the nicest LURKers! Of course, we’re
+    not naive, and this is a social process that is not that surprising
+    in community building. The point is that we feel that the
+    application method is not only stressful, but also doesn’t add
+    anything to existing social processes emerging in online
+    communities. Let’s try something else!</p>
+    <p>One of the decisions we made in November 2022 is to cap the
+    number of accounts on post.lurk.org at 666 (keeping with our
+    tradition of using Meaningful NumbersTM). The past years we stuck
+    with that and it has felt pleasant. And here is the plot twist,
+    starting now, we will automatically remove unused accounts. We will
+    warn (of course!) accounts that have not logged in for 12 months and
+    delete them after 13 months of inactivity. This allows more people
+    to join post, and automatically and slowly open up new spots for
+    others to join, as people lose interest or move on, which is fine
+    really—please send postcards, though. We will hand out invites to
+    you if you request them, but we <em>really</em> still want to
+    privilege both diversity <em>and</em> people that are not yet on the
+    fedi.</p>
+    <p><img src="thecycle-small.png" /></p>
+    <p>It’s also important to say that, next to running the LURK
+    instances and its other services, we are also active developing and
+    offering workshops for communities to onboard the fediverse, not
+    just as users of an existing instance, but as collective
+    administrators of their own instance <a href="#fn2"
+    class="footnote-ref" id="fnref2"
+    role="doc-noteref"><sup>2</sup></a>. And this is really the key
+    thing that cultural workers need to understand about decentralised
+    and federated social media, namely the promise of having a balance
+    between online communities of practice that are humanly scaled, and
+    still be able to connect and reach out to many many many others. For
+    instance, very recently, it was very exciting to see the new <a
+    href="https://social.toplap.org">social.toplap.org</a> instance
+    emerge to give a proper hub for live coders, who until now tended to
+    flock to LURK or similar places where algorithmic and software art
+    is welcome (like <a
+    href="https://merveilles.town">merveilles.town</a> and <a
+    href="https://sonomu.club">sonomu.club</a> instances). Running your
+    instance is not trivial, but it’s not impossible for a small group
+    of motivated people, as we’ve seen in our workshops. And this
+    instance mitosis is the kind of scaling we’d like to see more happen
+    on the Fediverse instead of the emergence of heavily centralised and
+    large instances.</p>
+    <p><img src="eU8gcyIPReOL-nGS2uTJlQ.jpg" /></p>
+    <p>As mentioned above, we do this for the sake of it, and, outside
+    some flurries of work on technical things or moderation issues, it
+    has been fairly easy going. We want to keep it this way and are
+    really keen on none of this becoming a <em>Work</em> or a
+    <em>Chore</em>. Last year Brendan, both a long-time friend and an
+    experienced hater of computers, joined the team to help out. He has
+    been a great help with gnarly technical stuff. Others have
+    approached us offering help in various ways, for instance with
+    moderation, which has been useful with the current state of the
+    world. Others, however, have also approached us to help with means
+    of becoming larger, more professional, and we kindly rejected those
+    offers because at the end of the day, that means more meetings and
+    whatnot… and <em>Work</em>. What /works/ for us is to stay haphazard
+    and spontaneous, the way we’ve been operating hitherto. We have an
+    idiosyncratic way of working, a weird governance model so to speak,
+    and we like it despite its highly artistic take on administration.
+    In the context of the ATNOFS project in 2021 we did some
+    introspection and came up with an honest description of such a take:
+    an “impulsive and time-constrained benevolent eurocentric
+    oligarcho-do-ocracy”<a href="#fn3" class="footnote-ref" id="fnref3"
+    role="doc-noteref"><sup>3</sup></a>.</p>
+    <blockquote>
+    <p>“Specifically in terms of governance, while it might be seductive
+    to go for a democratic consensus-governance model, this can also be
+    a risk when it comes to starting out and establishing the space if
+    the group doesn’t have enough capacity. In order to highlight this,
+    we introduced an honest description of LURK’s governance model as an
+    “impulsive and time-constrained benevolent eurocentric
+    oligarcho-do-ocracy”. Deconstructing what this means: our governance
+    model is impulsive because scratching itches / personal enjoyment
+    are the main motivators for work on LURK. Time-constrained because
+    everything is done whenever the administrators / moderators find
+    free time to work on the server; TODOs tend to span months, unless
+    they happen to be scratching someone’s itch. Benevolent, as we like
+    to consider ourselves well-intended, and are willing to listen,
+    learn and do best efforts given our constraints. Eurocentric, as the
+    entire team is in one timezone, concentrated on four to five
+    languages, and culturally homogeneous. Oligarchy,as the governance
+    structure consists of a small cabal (a conspiratorial group) which
+    makes executive decisions. A do-ocracy, because decisions are made
+    primarily by people acting on something. Moderation decisions such
+    as accepting new people to the server, banning other servers etc.,
+    tweaking the technical configuration are often just “done” by those
+    within the oligarchy without prior discussion. Only very difficult
+    situations, non-trivial technical issues, or really large decisions
+    are actively discussed in the oligarchy. All of that does not imply
+    that we haven’t, for example, solicited input and feedback on things
+    such as the Terms of Service to the larger LURK.org userbase.”</p>
+    </blockquote>
+    <p>Surely, there is an alternative timeline where LURK is run as a
+    super structured COOP using Loomio and whatnot to implement various
+    models of liquid democracy and participation, but, honestly, in our
+    present timeline, our model is not likely to change soon, and we
+    have the feeling that if we stick to this approach, we can stick to
+    it for the long run (by the way could there be a LURK 10 year
+    anniversary around the corner?<a href="#fn4" class="footnote-ref"
+    id="fnref4" role="doc-noteref"><sup>4</sup></a>). Surely, we can
+    improve and tweak things, but, it’s nice to appreciate when
+    something works well enough and brings good feels. <em>SLAPS ROOF OF
+    LURK</em>. To be sure, participatory modes of governance are the way
+    forward and our position is by no means a critique of these. If
+    anything, we are strong believers of direct democracy models, such
+    as participatory democracy, deliberative democracy, and agonism.
+    It’s just that LURK is more of an artistic driven approach to long
+    term community building and server infrastructure, and we would
+    rather not pretend to be otherwise<a href="#fn5"
+    class="footnote-ref" id="fnref5"
+    role="doc-noteref"><sup>5</sup></a>. With that said, as exemplified
+    with this wall of text, we are ruminating <em>a lot</em> on these
+    issues and our slow cooking is so slow that it’s probably more
+    accurate to describe it as fermentation. It took us 5 years to
+    figure out how to have a 3-in-1 Code of Conduct, Terms of Services
+    and Privacy Statement that, we felt, was strong enough. To reach
+    this point, we spoke both formally and informally with many other
+    LURKers and friends, but also learned from practice and from what
+    other instances are doing .</p>
+    <p><img src="mC-4HGEvTjCMi-lvo4u07g.jpg" /></p>
+    <p>Concerning financial sustainability, one of the ways we have been
+    receiving (and gladly accepting) a tremendous amount of support is
+    in terms of donations. We started an <a
+    href="opencollective.com/lurk">Open Collective</a> in 2021 and have
+    been amazed at how people have chipped in. Because we are small,
+    frugal, anti-cloud and get some of our infrastructure sponsored<a
+    href="#fn6" class="footnote-ref" id="fnref6"
+    role="doc-noteref"><sup>6</sup></a>, we have historically spent very
+    little costs regarding infrastructure. The reason we started
+    collecting donations was to see if we could compensate for
+    maintenance labour instead, and hopefully demonstrate the value of
+    such a tactic at a time when Big Tech and a misunderstanding of open
+    forms of software production have led us to believe that the digital
+    commons are a thing falling from the sky. This is even crucial for
+    us, as, like discussed earlier, we are often helping other cultural
+    workers to run things themselves and pretending that the economic
+    dimension does not exist is incredibly dishonest. (Post-)Free
+    culture evangelism has to stop sounding like an obscure hypocritical
+    pyramid scheme with only the most privileged able to play the game.
+    To our surprise, soliciting donations has worked so far, and we have
+    been using the majority of donations to compensate for sysadmin and
+    moderation labour of the team. We believe we are one of the few
+    instances where donated funds are used primarily to pay people,
+    rather than cloud companies.</p>
+    <p>However, we also realize that this can raise expectations on what
+    LURK as a project will become, and we want to be explicit that we
+    are not planning to change the nature and scale of our operation. We
+    will use the funds to continue to pay for labour, keep a buffer for
+    these moments where we suddenly need to fix something urgently. If
+    there is any surplus, we aim to donate upstream. This can be to
+    either Servus (who hosts one of our servers for free until now), or
+    to Hometown the modified version of Mastodon we use (which is
+    difficult as, probably for the same reason as LURK, has no formal
+    structure), or to useful Mastodon clients, or to other FLOSS and
+    related projects we rely on. We are still trying to figure out how
+    we will make it work, and to be honest, it’s difficult to get a
+    clear idea of our operational expenses in terms of labour, and as a
+    result, how to best use the buffer. For instance, we’ve now noticed
+    that it only takes a few days of technical or moderation clusterfuck
+    for our buffer to empty very fast. What is sure is that your ongoing
+    support in the form of donations will allow us to continue this
+    fermentation of community server maintenance for the long term.</p>
+    <p><img src="sharecropping.png" /></p>
+    <p>Last but not least, at the intersection of financial and
+    ecological sustainability is the question of technology use.
+    Sticking to the magic number of 666 accounts and operating with a
+    small team not only allows post.lurk.org to socially function well,
+    it also means that on a technical level, we don’t all of a sudden
+    have to become DevOps cloud engineers. Growing more would mean that
+    we will have to fundamentally reconsider how post.lurk.org is set up
+    and installed, and then start investing in cloud technologies and
+    platforms to keep things running. This is really something none of
+    us are looking forward to, or are even remotely interested in, let
+    alone supportive of, both in terms of the type of maintenance we
+    will have to do, how much it will cost, and finally also how it sits
+    ecologically. We think morally there should be a clear upper-bound
+    to how much the environment should suffer to facilitate shitposting.
+    From Low-Tech<a href="#fn7" class="footnote-ref" id="fnref7"
+    role="doc-noteref"><sup>7</sup></a> to permacomputing<a href="#fn8"
+    class="footnote-ref" id="fnref8" role="doc-noteref"><sup>8</sup></a>
+    to degrowth<a href="#fn9" class="footnote-ref" id="fnref9"
+    role="doc-noteref"><sup>9</sup></a>, several of us on the admin side
+    of LURK are interested in different frameworks to reconceptualize
+    computing’s relation to the environment and that practice is also
+    expressed in how we run post.lurk.org. It’s also great to see how
+    this interest has drawn many who share the same views to the
+    instance, and are themselves active in these fields<a href="#fn10"
+    class="footnote-ref" id="fnref10"
+    role="doc-noteref"><sup>10</sup></a>. Currently, post.lurk.org runs
+    on a fairly limited setup on a more than a decade old machine. The
+    backup system likewise is made up of second hand and spare equipment
+    (hosted as encrypted blobs in apartments and under work desks). So
+    far, this has been workable, but unfortunately Mastodon has been
+    until now designed with an unlimited growth mindset. For instance,
+    Mastodon servers by default accumulate an ever-growing cache of
+    remote media. On the one hand, this is necessary to be able to
+    properly moderate, on the other hand, it relies on ever-growing disk
+    space, which is wrongly considered as a “cheap” and easy to access
+    commodity and therefore this is not considered a fundamental
+    issue.</p>
+    <p><img src="i2l56pBPRxKNGJ6FJabDkw.jpg" /></p>
+    <p>One of the things we do on post.lurk.org to counteract this is to
+    frequently prune this cache on the server. That however, has some
+    implications: only the most recent remote posts are visible, and,
+    remote profiles that haven’t been interacted with in a while will
+    not have avatars or profile headers. When we remove remote users
+    from the database that have not been active in a long time, this can
+    also mean that you lose followers. Or, to be more precise, the
+    “followers” counter will be suddenly lower, since you likely already
+    lost those followers as the remote accounts will have stopped using
+    the fediverse a long time before we remove them. Having said that,
+    things like favourites and bookmarks are not deleted, and we also
+    won’t delete your personal data (unless your profile becomes
+    inactive for longer than a year, and we send you a warning before
+    that).</p>
+    <p>The reason to discuss this is that, at the end of the day, it
+    also impacts the user experience, especially when the cloud mindset
+    of “everything at my fingertips forever” is the default. Some of you
+    use a feature of Mastodon to automatically delete old posts based on
+    some conditions. At the time of writing we haven’t really decided or
+    discussed seriously if it’s something we should encourage everyone
+    to do and if yes, what would be the default strategy as it can be
+    configured in many ways (<a
+    href="https://post.lurk.org/statuses_cleanup">have a look</a> to get
+    an idea of all the options!). Keeping things constantly online that
+    are essentially ephemeral, or low value, feels wrong since it uses
+    actual resources. If you need to keep an archive, you can export it
+    from the configuration panel, and with all the clever LURKers
+    around, perhaps someone can make a masto2static script to serve your
+    glorious toots elsewhere (and perhaps this is something we should
+    put some lurk funds towards or crowdfund?).</p>
+    <p>We want to mention this because one of the big unknowns at this
+    point is whether we can continue running the server as we have done
+    before as the entire network grows in size. For instance, one way
+    the network will drastically grow, is if/when Facebook’s Instagram’s
+    Meta’s Threads becomes fully interoperable.</p>
+    <p><img src="SE3W9YkGTreFRaMVgkF2vg.jpg" /></p>
+    <p>In conclusion, this is also where these three strands coincide in
+    to our position on federating with Threads: it is weird that
+    volunteer mods and admins will have to put in effort to maintain a
+    connection to what essentially is a giant and badly moderated
+    server. Likewise, it is weird that small alternative projects will
+    have to drastically upscale their infrastructure, labour and capital
+    investment to facilitate a billion dollar corporation’s regulation
+    dodging/<a
+    href="https://en.wikipedia.org/wiki/Embrace,_extend,_and_extinguish">EEE</a>.
+    It is weird that we will have to be decentrally storing all kinds of
+    random crap from a social media empire that follows a cornucopian
+    perspective on computing and actively incentivizes the production of
+    bullshit at the expense of people and the planet. We appreciate that
+    others might feel doing just that is sound techno-political
+    strategy; more attention for the alternatives etc. The reason we got
+    into to post.lurk.org is to get away from all that and try something
+    else. So no, we will not federate with Threads. What is the point
+    really?</p>
+    <p><img src="FToAO_ZXEAkqlCg.jpeg" /></p>
+    <p>Happy LURKing :^) Alex, Aymeric, Brendan, Lídia, Roel</p>
+    <p><img src="taSj1BprSmeaHqBqoKOS6Q.jpg" /></p>
+    <section class="footnotes footnotes-end-of-document"
+    role="doc-endnotes">
+    <hr />
+    <ol>
+    <li id="fn1" role="doc-endnote"><p>See for instance <a
+    href="https://leah.is/posts/scaling-the-mastodon/"
+    class="uri">https://leah.is/posts/scaling-the-mastodon/</a>, <a
+    href="https://mijndertstuij.nl/posts/scaling-mastodon-community/"
+    class="uri">https://mijndertstuij.nl/posts/scaling-mastodon-community/</a>,
+    <a
+    href="https://blog.freeradical.zone/post/surviving-thriving-through-2022-11-05-meltdown/"
+    class="uri">https://blog.freeradical.zone/post/surviving-thriving-through-2022-11-05-meltdown/</a>,
+    <a
+    href="https://nora.codes/post/scaling-mastodon-in-the-face-of-an-exodus/"
+    class="uri">https://nora.codes/post/scaling-mastodon-in-the-face-of-an-exodus/</a><a
+    href="#fnref1" class="footnote-back"
+    role="doc-backlink">↩︎</a></p></li>
+    <li id="fn2" role="doc-endnote"><p>See <a
+    href="https://txt.lurk.org/how-to-run-a-small-social-networking-site/"
+    class="uri">https://txt.lurk.org/how-to-run-a-small-social-networking-site/</a>
+    and <a href="https://txt.lurk.org/ATNOFS/"
+    class="uri">https://txt.lurk.org/ATNOFS/</a><a href="#fnref2"
+    class="footnote-back" role="doc-backlink">↩︎</a></p></li>
+    <li id="fn3" role="doc-endnote"><p>From LURK in A Transversal
+    Network of Feminist Servers, 2022, <a
+    href="https://txt.lurk.org/ATNOFS/"
+    class="uri">https://txt.lurk.org/ATNOFS/</a><a href="#fnref3"
+    class="footnote-back" role="doc-backlink">↩︎</a></p></li>
+    <li id="fn4" role="doc-endnote"><p>What is LURK <a
+    href="https://web.archive.org/web/20150206001212/http://lurk.org/groups/meta-lurk/messages/topic/1Bqk3euF2ou2v8KsttTwd7/"
+    class="uri">https://web.archive.org/web/20150206001212/http://lurk.org/groups/meta-lurk/messages/topic/1Bqk3euF2ou2v8KsttTwd7/</a><a
+    href="#fnref4" class="footnote-back"
+    role="doc-backlink">↩︎</a></p></li>
+    <li id="fn5" role="doc-endnote"><p>On top of that, several of us are
+    involved in such models in other parts of practice and personal
+    lives, whether art collectives, collectively run kindergartens, food
+    coops or open source projects. There is a limit to how many of these
+    things you can meaningfully take part in.<a href="#fnref5"
+    class="footnote-back" role="doc-backlink">↩︎</a></p></li>
+    <li id="fn6" role="doc-endnote"><p>This text and our mailing lists
+    are at <a href="https://servus.at">servus.at</a>, <a
+    href="https://post.lurk.org">post.lurk.org</a> is sponsored through
+    <a href="https://eclips.is">Eclips.is/Greenhost</a><a href="#fnref6"
+    class="footnote-back" role="doc-backlink">↩︎</a></p></li>
+    <li id="fn7" role="doc-endnote"><p>De Decker, K., Roscam Abbing, R.,
+    &amp; Otsuka, M. (2018). <em>How to build a low-tech website</em>.
+    <a
+    href="https://solar.lowtechmagazine.com/2018/09/how-to-build-a-low-tech-website/">https://solar.lowtechmagazine.com/2018/09/how-to-build-a-low-tech-website/</a><a
+    href="#fnref7" class="footnote-back"
+    role="doc-backlink">↩︎</a></p></li>
+    <li id="fn8" role="doc-endnote"><p>Mansoux, A., Howell, B., Barok,
+    D., &amp; Heikkilä, V. M. (2023). <em>Permacomputing aesthetics:
+    potential and limits of constraints in computational art, design and
+    culture</em>. Ninth Computing within Limits. <a
+    href="https://limits.pubpub.org/pub/6loh1eqi">https://limits.pubpub.org/pub/6loh1eqi</a><a
+    href="#fnref8" class="footnote-back"
+    role="doc-backlink">↩︎</a></p></li>
+    <li id="fn9" role="doc-endnote"><p>Roscam Abbing, R. (2021).
+    <em>‘This is a solar-powered website, which means it sometimes goes
+    offline’: a design inquiry into degrowth and ICT</em>. Seventh
+    Computing within Limits. <a
+    href="https://limits.pubpub.org/pub/lecuxefc">https://limits.pubpub.org/pub/lecuxefc</a><a
+    href="#fnref9" class="footnote-back"
+    role="doc-backlink">↩︎</a></p></li>
+    <li id="fn10" role="doc-endnote"><p>De Valk, M. (2021, June). <em>A
+    pluriverse of local worlds: A review of Computing within Limits
+    related terminology and practices.</em> Seventh Computing within
+    Limits. <a
+    href="https://limits.pubpub.org/pub/jkrofglk">https://limits.pubpub.org/pub/jkrofglk</a><a
+    href="#fnref10" class="footnote-back"
+    role="doc-backlink">↩︎</a></p></li>
+    </ol>
+    </section>
+  </body>
+</html>
diff --git a/on-not-scaling-lurk/mC-4HGEvTjCMi-lvo4u07g.jpg b/on-not-scaling-lurk/mC-4HGEvTjCMi-lvo4u07g.jpg
Binary files differ.
diff --git a/on-not-scaling-lurk/olUPjfz5SHi-HXdN0x7kNw.jpg b/on-not-scaling-lurk/olUPjfz5SHi-HXdN0x7kNw.jpg
Binary files differ.
diff --git a/on-not-scaling-lurk/on-not-scaling-lurk.md b/on-not-scaling-lurk/on-not-scaling-lurk.md
@@ -0,0 +1,86 @@
+---
+title: "On not scaling LURK: a tale of maintenance, federation, and governance"
+date: 26 March, 2024
+---
+
+# On not scaling LURK: a tale of maintenance, federation, and governance
+
+This text is important. It's also quite long. There is no tl;dr. But there are some good memes.
+
+It is both long and a long time in the making as it reflects some of our internal discussions that started in November 2022, when Elon Musk completed his purchase of Twitter, and suddenly many people found their way to the Fediverse. This moment changed things for the Fediverse and also changed things for LURK. In the interest of accountability and transparency, we want to share our reflections on the matter with you all.
+
+Let's start with the numbers. In November 2022, we welcomed 50 new people to the instance. Moreover, many who had created an account in prior years also came back to hang out more, now that the incentive to flee away from Twitter became more pressing. To put these numbers in perspective, over the years, post.lurk.org had around 150 people active at any one time. That week the number jumped to 350 and has hovered around 300 since then. Hi everyone. Of course, LURK runs more than a Mastodon instance, and if we take this into account, the total number of LURKers is more in the thousand. However, the LURK instance is the place where the most interaction is taking place, and has become an entry point to the other things we host in many situations.
+
+![](olUPjfz5SHi-HXdN0x7kNw.jpg)
+
+We're happy that many of you are trusting us, and consider this instance to be their new home base, however, and like many other Mastodon instances who suddenly grew, the increase in active users both on our instance and across the fediverse meant that the workload of our server increased significantly. Simultaneously, this change in active accounts, both old and new, changed the vibe of our local instance. Several instance admins have written blog posts on how they scaled their technical infrastructure in response[^scaling] and this is our post explaining why we didn't. To be sure, for post.lurk.org, we also put in many hours of technical work to better accommodate new folks and the growth of the wider fediverse. During this period, some probably remember how we were testing all sorts of things and how this was impacting the instance for better or worse. We even had to ask one of our hosts, Eclips.is, to kindly expand a bit the capacity of the server where we run Mastodon. This was not to accommodate for even more users like many other admins were doing at the time, not at all, in our case it was to be able to *just* keep the instance afloat with its new 350 active users! Finally, things got stable, and somehow post.lurk.org is considered a nice instance from outside. This translates in receiving many requests to join the instance, even though it's explicitly closed and we are actively declining these. Why? The temptation to scale things up further could seem obvious. More LURKers the better? Well, not quite. For us, not scaling up was always the obvious choice for the sustainability of the project.
+
+![](RFbXd8pXRCKW8qkxrIOeXg.png)
+
+Specifically, there are three interrelated ways in which the issue of sustainability comes to play for post.lurk.org. First, long-term sustainability of the project itself. Second, financial sustainability of the project. Finally, ecological sustainability of the project. All three concerns are interrelated and have been actively guiding us until now and will hopefully keep on guiding us going forward. These in turn touch on how to provide access to the instance in the future, how we will maintain the server, and what we do with the threat of Threads.
+
+In terms of long-term sustainability, the growth of the space is a consideration, and in particular the change in social dynamics that occur during moments when many new folks join a new environment, such as when many people (re)join post.lurk.org when something happens on Twitter (or whatever it's called these days). That change is rooted in the tension between providing friends (and friends of friends) a space to network in a rich and focused environment, *and* maintaining that environment. On the one hand, we want to give many the possibility to join post.lurk.org and the wider Fediverse, on the other hand, there is only so much that we can do as a small collective to make a wider transition happen. Culturally speaking, we also want to sustain the vibe of the space we have been creating. Throughout the years, our slow growth through invites and the word of mouth of friends-of-friends has helped with maintaining that focus and a pleasant environment. But in times of crisis, like in November 2022, many people needed a new home and, of course, this has an impact on the experience of the instance. So what to do? Well, the bottom line is that there is only so much we can, and, honestly, want to do.
+
+![](iFfov8BmSq21IIPSNJtDFg.jpg)
+
+Since the start, we tried to focus on quality over quantity on post.lurk.org. This has meant that we try to maintain a healthy diversity—across genders, creative practices, cultural backgrounds —rather than aiming at opening the door to a large number of people vaguely connected or interested in digital media and cultural practices. This is to a large extent because we do this for the sake of it and in our spare time, so we want this to remain an interesting place and hub for communities of practice that inspire us, rather than a chore. At times, and since last November 2022, particularly that has meant that we need to engage the brake on new sign-ups to be able to make sure that this sentiment keeps on being shared by everyone. At the same time, this means we have had to exclude some folks, who, as a consequence, felt left out. We're not saying that the way we're doing things is perfect, and it's difficult to communicate about this choice, hence this long text. It can be a bit discouraging when from the outside, and from peers we had to decline, we hear that this reticence of letting more, and more-of-the-same, people is perceived as an attempt to create some exclusive cool kids club (true story).
+
+Nevertheless, we feel that this strategy has paid off. For us, one of the great things about having been involved in post.lurk.org is the quality of the space and how generative it has been for our practices versus how little time we, as an admin team, have to put into it to keeping it running. That is something we want to maintain and something that is at risk when growing the instance more and more. Still, we want a mechanism where people can join post.lurk.org. After all, even if we want to not grow, there is also the fact that some people join and eventually leave, some just join and never use their account. Some simply disappear into the ether after a while. That's cool. But this means that we could potentially welcome new people occasionally, without compromising on our way of running the instance.
+
+![](3DCjoMkpReeZYDijQbP_4w.jpg)
+
+Until now, we did this is in a relatively unstructured way, opening applications every now and then, and receiving suddenly a huge wave of messages from people explaining to us why our instance is meaningful for them. Filtering these applications is one of the most unrewarding and stressful things about this approach, all the while having to make important but also, at times, arbitrary selection. Part of the issue is because of the crappy interface for selection—there is no possibility to respond to an application outside accept or reject, for instance—but a larger part is based on the arbitrariness of it. The secret LURK truth is more often than not, people we found exciting based on their application turned out to not be super engaged (if at all), and likewise, people we had no idea about have become some of the nicest LURKers! Of course, we're not naive, and this is a social process that is not that surprising in community building. The point is that we feel that the application method is not only stressful, but also doesn't add anything to existing social processes emerging in online communities. Let's try something else! 
+
+One of the decisions we made in November 2022 is to cap the number of accounts on post.lurk.org at 666 (keeping with our tradition of using Meaningful NumbersTM). The past years we stuck with that and it has felt pleasant. And here is the plot twist, starting now, we will automatically remove unused accounts. We will warn (of course!) accounts that have not logged in for 12 months and delete them after 13 months of inactivity. This allows more people to join post, and automatically and slowly open up new spots for others to join, as people lose interest or move on, which is fine really—please send postcards, though. We will hand out invites to you if you request them, but we *really* still want to privilege both diversity *and* people that are not yet on the fedi.
+
+![](thecycle-small.png)
+
+It's also important to say that, next to running the LURK instances and its other services, we are also active developing and offering workshops for communities to onboard the fediverse, not just as users of an existing instance, but as collective administrators of their own instance [^runyourown]. And this is really the key thing that cultural workers need to understand about decentralised and federated social media, namely the promise of having a balance between online communities of practice that are humanly scaled, and still be able to connect and reach out to many many many others. For instance, very recently, it was very exciting to see the new [social.toplap.org](https://social.toplap.org) instance emerge to give a proper hub for live coders, who until now tended to flock to LURK or similar places where algorithmic and software art is welcome (like [merveilles.town](https://merveilles.town) and [sonomu.club](https://sonomu.club) instances). Running your instance is not trivial, but it's not impossible for a small group of motivated people, as we've seen in our workshops. And this instance mitosis is the kind of scaling we'd like to see more happen on the Fediverse instead of the emergence of heavily centralised and large instances.
+
+![](eU8gcyIPReOL-nGS2uTJlQ.jpg)
+
+As mentioned above, we do this for the sake of it, and, outside some flurries of work on technical things or moderation issues, it has been fairly easy going. We want to keep it this way and are really keen on none of this becoming a *Work* or a *Chore*. Last year Brendan, both a long-time friend and an experienced hater of computers, joined the team to help out. He has been a great help with gnarly technical stuff. Others have approached us offering help in various ways, for instance with moderation, which has been useful with the current state of the world. Others, however, have also approached us to help with means of becoming larger, more professional, and we kindly rejected those offers because at the end of the day, that means more meetings and whatnot... and *Work*. What /works/ for us is to stay haphazard and spontaneous, the way we've been operating hitherto. We have an idiosyncratic way of working, a weird governance model so to speak, and we like it despite its highly artistic take on administration. In the context of the ATNOFS project in 2021 we did some introspection and came up with an honest description of such a take: an “impulsive and time-constrained benevolent eurocentric oligarcho-do-ocracy”[^1].
+
+>"Specifically in terms of governance, while it might be seductive to go for a democratic consensus-governance model, this can also be a risk when it comes to starting out and establishing the space if the group doesn’t have enough capacity. In order to highlight this, we introduced an honest description of LURK’s governance model as an “impulsive and time-constrained benevolent eurocentric oligarcho-do-ocracy”. Deconstructing what this means: our governance model is impulsive because scratching itches / personal enjoyment are the main motivators for work on LURK. Time-constrained because everything is done whenever the administrators / moderators find free time to work on the server; TODOs tend to span months, unless they happen to be scratching someone’s itch. Benevolent, as we like to consider ourselves well-intended, and are willing to listen, learn and do best efforts given our constraints. Eurocentric, as the entire team is in one timezone, concentrated on four to five languages, and culturally homogeneous. Oligarchy,as the governance structure consists of a small cabal (a conspiratorial group) which makes executive decisions. A do-ocracy, because decisions are made primarily by people acting on something. Moderation decisions such as accepting new people to the server, banning other servers etc., tweaking the technical configuration are often just “done” by those within the oligarchy without prior discussion. Only very difficult situations, non-trivial technical issues, or really large decisions are actively discussed in the oligarchy. All of that does not imply that we haven’t, for example, solicited input and feedback on things such as the Terms of Service to the larger LURK.org userbase."
+
+Surely, there is an alternative timeline where LURK is run as a super structured COOP using Loomio and whatnot to implement various models of liquid democracy and participation, but, honestly, in our present timeline, our model is not likely to change soon, and we have the feeling that if we stick to this approach, we can stick to it for the long run (by the way could there be a LURK 10 year anniversary around the corner?[^2]). Surely, we can improve and tweak things, but, it's nice to appreciate when something works well enough and brings good feels. *SLAPS ROOF OF LURK*. To be sure, participatory modes of governance are the way forward and our position is by no means a critique of these. If anything, we are strong believers of direct democracy models, such as participatory democracy, deliberative democracy, and agonism. It's just that LURK is more of an artistic driven approach to long term community building and server infrastructure, and we would rather not pretend to be otherwise[^ontopopthat]. With that said, as exemplified with this wall of text, we are ruminating *a lot* on these issues and our slow cooking is so slow that it's probably more accurate to describe it as fermentation. It took us 5 years to figure out how to have a 3-in-1 Code of Conduct, Terms of Services and Privacy Statement that, we felt, was strong enough. To reach this point, we spoke both formally and informally with many other LURKers and friends, but also learned from practice and from what other instances are doing .
+
+![](mC-4HGEvTjCMi-lvo4u07g.jpg)
+
+Concerning financial sustainability, one of the ways we have been receiving (and gladly accepting) a tremendous amount of support is in terms of donations. We started an [Open Collective](opencollective.com/lurk) in 2021 and have been amazed at how people have chipped in. Because we are small, frugal, anti-cloud and get some of our infrastructure sponsored[^sponsored], we have historically spent very little costs regarding infrastructure. The reason we started collecting donations was to see if we could compensate for maintenance labour instead, and hopefully demonstrate the value of such a tactic at a time when Big Tech and a misunderstanding of open forms of software production have led us to believe that the digital commons are a thing falling from the sky. This is even crucial for us, as, like discussed earlier, we are often helping other cultural workers to run things themselves and pretending that the economic dimension does not exist is incredibly dishonest. (Post-)Free culture evangelism has to stop sounding like an obscure hypocritical pyramid scheme with only the most privileged able to play the game. To our surprise, soliciting donations has worked so far, and we have been using the majority of donations to compensate for sysadmin and moderation labour of the team. We believe we are one of the few instances where donated funds are used primarily to pay people, rather than cloud companies. 
+
+However, we also realize that this can raise expectations on what LURK as a project will become, and we want to be explicit that we are not planning to change the nature and scale of our operation. We will use the funds to continue to pay for labour, keep a buffer for these moments where we suddenly need to fix something urgently. If there is any surplus, we aim to donate upstream. This can be to either Servus (who hosts one of our servers for free until now), or to Hometown the modified version of Mastodon we use (which is difficult as, probably for the same reason as LURK, has no formal structure), or to useful Mastodon clients, or to other FLOSS and related projects we rely on. We are still trying to figure out how we will make it work, and to be honest, it's difficult to get a clear idea of our operational expenses in terms of labour, and as a result, how to best use the buffer. For instance, we've now noticed that it only takes a few days of technical or moderation clusterfuck for our buffer to empty very fast. What is sure is that your ongoing support in the form of donations will allow us to continue this fermentation of community server maintenance for the long term.
+
+![](sharecropping.png)
+
+Last but not least, at the intersection of financial and ecological sustainability is the question of technology use. Sticking to the magic number of 666 accounts and operating with a small team not only allows post.lurk.org to socially function well, it also means that on a technical level, we don't all of a sudden have to become DevOps cloud engineers. Growing more would mean that we will have to fundamentally reconsider how post.lurk.org is set up and installed, and then start investing in cloud technologies and platforms to keep things running. This is really something none of us are looking forward to, or are even remotely interested in, let alone supportive of, both in terms of the type of maintenance we will have to do, how much it will cost, and finally also how it sits ecologically. We think morally there should be a clear upper-bound to how much the environment should suffer to facilitate shitposting. From Low-Tech[^3] to permacomputing[^4] to degrowth[^5], several of us on the admin side of LURK are interested in different frameworks to reconceptualize computing's relation to the environment and that practice is also expressed in how we run post.lurk.org. It's also great to see how this interest has drawn  many who share the same views to the instance, and are themselves active in these fields[^6]. Currently, post.lurk.org runs on a fairly limited setup on a more than a decade old machine. The backup system likewise is made up of second hand and spare equipment (hosted as encrypted blobs in apartments and under work desks). So far, this has been workable, but unfortunately Mastodon has been until now designed with an unlimited growth mindset. For instance, Mastodon servers by default accumulate an ever-growing cache of remote media. On the one hand, this is necessary to be able to properly moderate, on the other hand, it relies on ever-growing disk space, which is wrongly considered as a “cheap” and easy to access commodity and therefore this is not considered a fundamental issue.
+
+![](i2l56pBPRxKNGJ6FJabDkw.jpg)
+
+One of the things we do on post.lurk.org to counteract this is to frequently prune this cache on the server. That however, has some implications: only the most recent remote posts are visible, and, remote profiles that haven't been interacted with in a while will not have avatars or profile headers. When we remove remote users from the database that have not been active in a long time, this can also mean that you lose followers. Or, to be more precise, the “followers” counter will be suddenly lower, since you likely already lost those followers as the remote accounts will have stopped using the fediverse a long time before we remove them. Having said that, things like favourites and bookmarks are not deleted, and we also won't delete your personal data (unless your profile becomes inactive for longer than a year, and we send you a warning before that). 
+
+The reason to discuss this is that, at the end of the day, it also impacts the user experience, especially when the cloud mindset of “everything at my fingertips forever” is the default. Some of you use a feature of Mastodon to automatically delete old posts based on some conditions. At the time of writing we haven't really decided or discussed seriously if it's something we should encourage everyone to do and if yes, what would be the default strategy as it can be configured in many ways ([have a look](https://post.lurk.org/statuses_cleanup) to get an idea of all the options!). Keeping things constantly online that are essentially ephemeral, or low value, feels wrong since it uses actual resources. If you need to keep an archive, you can export it from the configuration panel, and with all the clever LURKers around, perhaps someone can make a masto2static script to serve your glorious toots elsewhere (and perhaps this is something we should put some lurk funds towards or crowdfund?). 
+
+We want to mention this because one of the big unknowns at this point is whether we can continue running the server as we have done before as the entire network grows in size. For instance, one way the network will drastically grow, is if/when Facebook's Instagram's Meta's Threads becomes fully interoperable.
+
+![](SE3W9YkGTreFRaMVgkF2vg.jpg)
+
+In conclusion, this is also where these three strands coincide in to our position on federating with Threads: it is weird that volunteer mods and admins will have to put in effort to maintain a connection to what essentially is a giant and badly moderated server. Likewise, it is weird that small alternative projects will have to drastically upscale their infrastructure, labour and capital investment to facilitate a billion dollar corporation's regulation dodging/[EEE](https://en.wikipedia.org/wiki/Embrace,_extend,_and_extinguish). It is weird that we will have to be decentrally storing all kinds of random crap from a social media empire that follows a cornucopian perspective on computing and actively incentivizes the production of bullshit at the expense of people and the planet. We appreciate that others might feel doing just that is sound techno-political strategy; more attention for the alternatives etc. The reason we got into to post.lurk.org is to get away from all that and try something else. So no, we will not federate with Threads. What is the point really?
+
+![](FToAO_ZXEAkqlCg.jpeg)
+
+Happy LURKing :^)
+Alex, Aymeric, Brendan, Lídia, Roel
+
+![](taSj1BprSmeaHqBqoKOS6Q.jpg)
+
+[^1]: From LURK in A Transversal Network of Feminist Servers, 2022, <https://txt.lurk.org/ATNOFS/>
+[^2]: What is LURK <https://web.archive.org/web/20150206001212/http://lurk.org/groups/meta-lurk/messages/topic/1Bqk3euF2ou2v8KsttTwd7/>
+[^3]: De Decker, K., Roscam Abbing, R., & Otsuka, M. (2018). *How to build a low-tech website*. [https://solar.lowtechmagazine.com/2018/09/how-to-build-a-low-tech-website/](https://solar.lowtechmagazine.com/2018/09/how-to-build-a-low-tech-website/)
+[^4]: Mansoux, A., Howell, B., Barok, D., & Heikkilä, V. M. (2023). *Permacomputing aesthetics: potential and limits of constraints in computational art, design and culture*. Ninth Computing within Limits. [https://limits.pubpub.org/pub/6loh1eqi](https://limits.pubpub.org/pub/6loh1eqi) 
+[^5]: Roscam Abbing, R. (2021). *‘This is a solar-powered website, which means it sometimes goes offline’: a design inquiry into degrowth and ICT*. Seventh Computing within Limits. [https://limits.pubpub.org/pub/lecuxefc](https://limits.pubpub.org/pub/lecuxefc)
+[^6]: De Valk, M. (2021, June). *A pluriverse of local worlds: A review of Computing within Limits related terminology and practices.* Seventh Computing within Limits. [https://limits.pubpub.org/pub/jkrofglk](https://limits.pubpub.org/pub/jkrofglk)
+[^scaling]: See for instance <https://leah.is/posts/scaling-the-mastodon/>, <https://mijndertstuij.nl/posts/scaling-mastodon-community/>, <https://blog.freeradical.zone/post/surviving-thriving-through-2022-11-05-meltdown/>, <https://nora.codes/post/scaling-mastodon-in-the-face-of-an-exodus/>
+[^sponsored]: This text and our mailing lists are at [servus.at](https://servus.at), [post.lurk.org](https://post.lurk.org) is sponsored through [Eclips.is/Greenhost](https://eclips.is)
+[^runyourown]: See <https://txt.lurk.org/how-to-run-a-small-social-networking-site/> and <https://txt.lurk.org/ATNOFS/>
+[^ontopopthat]: On top of that, several of us are involved in such models in other parts of practice and personal lives, whether art collectives, collectively run kindergartens, food coops or open source projects. There is a limit to how many of these things you can meaningfully take part in.+
\ No newline at end of file
diff --git a/on-not-scaling-lurk/sharecropping.png b/on-not-scaling-lurk/sharecropping.png
Binary files differ.
diff --git a/on-not-scaling-lurk/tSglnd7XT8qmj4DqU4pKEA.jpg b/on-not-scaling-lurk/tSglnd7XT8qmj4DqU4pKEA.jpg
Binary files differ.
diff --git a/on-not-scaling-lurk/taSj1BprSmeaHqBqoKOS6Q.jpg b/on-not-scaling-lurk/taSj1BprSmeaHqBqoKOS6Q.jpg
Binary files differ.
diff --git a/on-not-scaling-lurk/template.html b/on-not-scaling-lurk/template.html
@@ -0,0 +1,94 @@
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <meta charset="UTF-8">
+    <meta name="viewport" content="width=device-width, initial-scale=1">
+    <meta property="og:title" content="$title$" />
+    <meta property="og:description" content="Ruminations on scale, technology and sustainability and their consequences for how lurk.org is run" />
+    <meta property="og:image" itemprop="image" content="https://txt.lurk.org/on-not-scaling-lurk/taSj1BprSmeaHqBqoKOS6Q.jpg" />
+    <meta property="og:url" content="https://txt.lurk.org/on-not-scaling-lurk/" />
+    <meta property="og:type" content="website" />
+    <meta name="twitter:card" content="summary_large_image" />
+    <meta name="twitter:title" content="$title$" />
+    <meta name="twitter:description" content="Ruminations on scale, technology and sustainability and their consequences for how lurk.org is run" />
+    <meta name="twitter:image" content="https://txt.lurk.org/on-not-scaling-lurk/taSj1BprSmeaHqBqoKOS6Q.jpg" />
+    <meta name="twitter:image:alt" content="Capybara with a hat says 'too rad to be sad'" />
+    <title>$title$</title>
+    <style type="text/css">
+      @font-face {
+        font-family: 'Route159-SemiBold';
+        src: url('../Route159-SemiBold.woff') format('woff');
+        }
+      body{
+        margin:40px auto;
+        max-width:650px;
+        line-height:1.6;
+        font-size:20px;
+        color:#444;
+        padding:0 10px;
+        font-family: 'Route159-SemiBold';
+
+      }
+      a:hover, a:visited {
+      color: #7800FF;
+      word-wrap:anywhere;
+      }
+      h1,h2,h3{
+        line-height:1.2;
+      }
+      figcaption {
+        text-align: center;
+      }
+      #ecf {
+        padding-top: 6em;
+        display: block;
+        margin-left: auto;
+        width: 25%;
+      }
+
+      img {
+        width: 100%;
+        margin: auto;
+      }
+
+    @media only screen
+       and (max-device-width: 400px)
+       and (orientation: portrait) {
+      body {
+        margin: 1em 2em;
+        font-size: 14px;
+        line-height: 1.4;
+        font-family: sans;
+
+      }
+      figure {
+        width: 100%;
+        margin: auto;
+      }
+    }
+
+    @media only screen
+       and (max-device-width: 650px)
+       and (orientation: portrait) {
+      body {
+        margin: 1em 2em;
+        font-size: 16px;
+        line-height: 1.4;
+
+      }
+      blockquote {
+        margin: 0 auto;
+        border-left: 2px solid #444;
+        padding-left: 1em;
+      }
+      figure {
+        width: 100%;
+        margin: auto;
+      }
+    }
+    </style>
+  </head>
+  <body>
+    $body$
+  </body>
+</html>
diff --git a/on-not-scaling-lurk/thecycle-small.png b/on-not-scaling-lurk/thecycle-small.png
Binary files differ.