aly / postcodes.json

0 likes
0 forks
1 files
Last active 1668862036
1 {"200": [["200", "Australian National University", "ACT", -35.28, 149.12]],
2 "221": [["221", "Barton", "ACT", -35.2, 149.1]],
3 "800": [["800", "Darwin", "NT", -12.8, 130.96]],
4 "801": [["801", "Darwin", "NT", -12.8, 130.96]],
5 "804": [["804", "Parap", "NT", -12.43, 130.84]],
6 "810": [["810", "Alawa", "NT", -12.38, 130.88], ["810", "Brinkin", "NT", -12.38, 130.88], ["810", "Casuarina", "NT", -12.38, 130.88], ["810", "Coconut Grove", "NT", -12.38, 130.88], ["810", "Jingili", "NT", -12.38, 130.88], ["810", "Lee Point", "NT", -12.38, 130.88], ["810", "Lyons", "NT", -12.38, 130.88], ["810", "Millner", "NT", -12.38, 130.88], ["810", "Moil", "NT", -12.38, 130.88], ["810", "Muirhead", "NT", -12.38, 130.88], ["810", "Nakara", "NT", -12.38, 130.88], ["810", "Nightcliff", "NT", -12.38, 130.88], ["810", "Rapid Creek", "NT", -12.38, 130.88], ["810", "Tiwi", "NT", -12.38, 130.88], ["810", "Wagaman", "NT", -12.38, 130.88], ["810", "Wanguri", "NT", -12.38, 130.88]],
7 "811": [["811", "Casuarina", "NT", -12.38, 130.85]],
8 "812": [["812", "Anula", "NT", -12.4, 130.91], ["812", "Buffalo Creek", "NT", -12.4, 130.91], ["812", "Holmes", "NT", -12.4, 130.91], ["812", "Karama", "NT", -12.4, 130.91], ["812", "Leanyer", "NT", -12.4, 130.91], ["812", "Malak", "NT", -12.4, 130.91], ["812", "Marrara", "NT", -12.4, 130.91], ["812", "Northlakes", "NT", -12.4, 130.91], ["812", "Sanderson", "NT", -12.4, 130.91], ["812", "Woodleigh Gardens", "NT", -12.4, 130.91], ["812", "Wulagi", "NT", -12.4, 130.91]],
9 "813": [["813", "Sanderson", "NT", 0.0, 0.0]],
10 "814": [["814", "Nightcliff", "NT", -12.38, 130.85]],

aly / lawtechie-story-links.py

0 likes
0 forks
1 files
Last active 1591603422
A script to parse lawtechie's post history and show, for each story that is linked to in another story, which stories link to it. Used for collecting multiple parts together. Does not show story singletons. More parsing work to do in the future
1 #!/usr/bin/env python3
2 from requests import get
3 from json import load,dump,dumps
4 from collections import defaultdict
5 from markdown import markdown
6 from lxml import etree
7 from time import sleep
8
9 html = False
10 url = "https://www.reddit.com/search.json"