summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--Makefile5
-rw-r--r--TODO1
-rw-r--r--articles/unburn.txt76
-rwxr-xr-xdirindex.sh22
5 files changed, 105 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index 0081a87..7749a74 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,5 +5,6 @@
index.ttl
pubs/index.txt
pubs/*.pdf
+articles/index.txt
!includes/*.xml
diff --git a/Makefile b/Makefile
index 96e3d8e..2b19a33 100644
--- a/Makefile
+++ b/Makefile
@@ -10,6 +10,7 @@ BIB=pubs/index.ttl
BIBTXT=pubs/index.txt
PAGES=$(shell find . -name '*.txt' ! -name 'robots.txt' ! -name $(RSS)) $(BIBTXT)
PUBS=$(shell find ./pubs/ -name '*.txt' ! -name 'index.*')
+ARTS=$(shell find ./articles/ -name '*.txt' ! -name 'index.*')
EXTRAS=$(shell find . -name '*.css') card.ttl
TARGETS=$(PAGES:.txt=.html) $(RSS) $(BIBTXT)
GZIP=$(PAGES:=.gz) $(TARGETS:=.gz) $(EXTRAS:=.gz) $(BIB:=.gz)
@@ -50,6 +51,10 @@ pubs/index.txt: $(BIB)
echo making $@
sh bibtotxt.sh $< > $@
+articles/index.txt: $(ARTS)
+ echo making $@
+ sh dirindex.sh articles > $@
+
clean:
rm -f -- $(TARGETS) $(GZIP)
diff --git a/TODO b/TODO
new file mode 100644
index 0000000..a1a542c
--- /dev/null
+++ b/TODO
@@ -0,0 +1 @@
+automatically generate the tex from markdown
diff --git a/articles/unburn.txt b/articles/unburn.txt
new file mode 100644
index 0000000..9fdd9ab
--- /dev/null
+++ b/articles/unburn.txt
@@ -0,0 +1,76 @@
+Unburn
+=======================================================================
+
+### How and why to eschew feedburner
+
+Feedburner is fucked up. It is a way of proxying feeds through their
+centralised service for purposes of surveillance and advertising. It
+is owned and operated by Google, so it's very likely that the data
+collected about your reading habits is linked with the existing profile
+Google has on you.
+
+## Background
+
+Feeds are not supposed to be like this. The web is not supposed to be
+like this. The web is decentralised. Feeds make it easy to quickly
+collect information from many different places together.
+
+The decentralisation of the web makes it difficult for someone to
+find out all of the different websites you visit (except by
+internet service providers and those with influence over them,
+though this can be defeated using [Tor](https://www.torproject.org).)
+This makes practical surveillance of your reading habits difficult.
+Decentralisation also creates a resiliant infrastructure which is very
+difficult to censor.
+
+## The problem
+
+Feedburner seeks to disrupt this in the name of profit. It offers
+website owners a way of sending all requests for their feeds through
+a central website. In exchange they offer website owners the ability
+to inject advertising into feeds and view more information about the
+readers of their website. Feedburner collect and analyse information
+about each readers' habits, which they use to better divert readers
+attention to the commercial, either directly through their own
+adverts, or by selling detailed reader profiles to anybody
+willing to pay.
+
+To summarise; feedburner surveils your reading habits, sharing them
+with anyone powerful enough to pay, undermines the censorship
+resistant infrastructure of the web, and interrupts your reading
+with adverts.
+
+## The solution
+
+Fortunately it is quite easy to defeat the redirects to feedburner.
+
+Firstly though we need to find out which feeds are redirected through
+feedburner. This script will print any url from the file 'urls' which
+currently goes through feedburner:
+
+ #!/bin/sh
+ urlfile=urls
+ sed -e '/^#/d' -e '/^$/d' < $urlfile | awk '{print $1}' \
+ | while read u; do
+ fb=`curl -s -I $u | grep -c feedburner`
+ test $fb -gt 0 && echo $u needs unburning
+ done
+
+Now you know the feeds to deal with, you can go ahead and unburn them.
+
+The key is to claim to be feedburner to the website, and it will
+dutifully let you through to the real feeds. Do this by setting the
+User-Agent string to 'feedburner' in your HTTP request.
+
+If using a newsreader which supports Snownews extensions, this is easy
+to integrate. Create a simple shell script called unburn.sh:
+
+ !#/bin/sh
+ curl -s -A 'feedburner' "$1"`
+
+Then replace the url entry in your newsreader with
+`"exec:sh unburn.sh http://feedurl/"`
+
+Other newsreaders will have different procedures, but the same
+principle applies; just set the User-Agent header string to
+'feedburner'.
diff --git a/dirindex.sh b/dirindex.sh
new file mode 100755
index 0000000..127a4c0
--- /dev/null
+++ b/dirindex.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+t=`echo $1 | sed -e 's/.*\/\([^\/]*\)/\1/' -e 's/^./\U&/g'`
+echo "$t"
+echo "======================================================================="
+echo ""
+
+# list of files sorted by modtime
+find $1 -type f -name '*.txt' ! -name 'index.*' -printf '%T@ %p\n' \
+| sort -r -n | awk '{print $2}' | while read f; do
+ reluri=`echo $f | sed 's/.*\/\([^\/]*\)\.txt/\1/'`
+ title=`sed -e 's/# //' -e 1q < $f`
+
+ echo -n "- [$title]($reluri)"
+ if test `grep -c '^### ' < $f` -ne 0; then
+ subtitle=`sed -e 's/### //' -e 4q < $f | tail -n 1`
+ echo -n ": $subtitle"
+ fi
+ echo ""
+done
+
+exit 0