Steven Smith revised this gist . Go to revision
1 file changed, 7 insertions, 1 deletion
torrenttags.py
| @@ -16,6 +16,7 @@ from random import choice | |||
| 16 | 16 | from bs4 import BeautifulSoup as Soup | |
| 17 | 17 | from argparse import ArgumentParser | |
| 18 | 18 | from sys import exit, stderr | |
| 19 | + | from base64 import b16encode, b32decode | |
| 19 | 20 | ||
| 20 | 21 | urls = ["http://api1.torrenttags.com/v1/", "http://api2.torrenttags.com/v1/", "http://api3.torrenttags.com/v1/"] | |
| 21 | 22 | ||
| @@ -30,12 +31,17 @@ def get_result(hash, ticket=None, rethtml=False): | |||
| 30 | 31 | if not ticket: | |
| 31 | 32 | ticket = get_ticket() | |
| 32 | 33 | if len(hash) != 40: | |
| 33 | - | return False | |
| 34 | + | hash = convert_hash(hash) | |
| 34 | 35 | data = Soup(get(get_api_url("get-tags-html"), params={'ticket': ticket, 'torrent': hash}).json()["html"], "html.parser") | |
| 35 | 36 | if "no_claims" in data.find('img')["src"]: | |
| 36 | 37 | return True, data | |
| 37 | 38 | return False, data | |
| 38 | 39 | ||
| 40 | + | def convert_hash(torrent_hash): | |
| 41 | + | """ Turns out hashes in magnet links can be base32 encoded, which shortens them to 32 characters """ | |
| 42 | + | b16 = b16encode(b32decode(torrent_hash)) | |
| 43 | + | return b16.decode('utf-8').lower() | |
| 44 | + | ||
| 39 | 45 | def main(): | |
| 40 | 46 | parser = ArgumentParser() | |
| 41 | 47 | parser.add_argument("hash", help="Torrent hash to look up") | |
Steven Smith revised this gist . Go to revision
1 file changed, 10 insertions, 6 deletions
torrenttags.py
| @@ -26,24 +26,28 @@ def get_ticket(): | |||
| 26 | 26 | data = get(get_api_url("get-ticket")).json() | |
| 27 | 27 | return data["status"], data["ticket"] | |
| 28 | 28 | ||
| 29 | - | def get_result(hash, ticket=None): | |
| 29 | + | def get_result(hash, ticket=None, rethtml=False): | |
| 30 | 30 | if not ticket: | |
| 31 | 31 | ticket = get_ticket() | |
| 32 | 32 | if len(hash) != 40: | |
| 33 | 33 | return False | |
| 34 | 34 | data = Soup(get(get_api_url("get-tags-html"), params={'ticket': ticket, 'torrent': hash}).json()["html"], "html.parser") | |
| 35 | 35 | if "no_claims" in data.find('img')["src"]: | |
| 36 | - | return True | |
| 37 | - | return False | |
| 36 | + | return True, data | |
| 37 | + | return False, data | |
| 38 | 38 | ||
| 39 | 39 | def main(): | |
| 40 | 40 | parser = ArgumentParser() | |
| 41 | 41 | parser.add_argument("hash", help="Torrent hash to look up") | |
| 42 | + | parser.add_argument("--html", help="Prints html response to stdout, includes Chilling Effects reports if available", action="store_true") | |
| 42 | 43 | args = parser.parse_args() | |
| 43 | - | if get_result(args.hash): | |
| 44 | - | return "No claims (yet!)", 0 | |
| 44 | + | result, data = get_result(args.hash, rethtml=args.html) | |
| 45 | + | if args.html: | |
| 46 | + | print(data) | |
| 47 | + | if result: | |
| 48 | + | return "{}: No claims (yet!)".format(args.hash), 0 | |
| 45 | 49 | else: | |
| 46 | - | return "Claims found", 2 | |
| 50 | + | return "{}: Claims found".format(args.hash), 2 | |
| 47 | 51 | ||
| 48 | 52 | if __name__ == "__main__": | |
| 49 | 53 | outp, ret = main() | |
Steven Smith revised this gist . Go to revision
1 file changed, 51 insertions
torrenttags.py(file created)
| @@ -0,0 +1,51 @@ | |||
| 1 | + | #!/usr/bin/env python3 | |
| 2 | + | __usage__ = """ | |
| 3 | + | usage: torrenttags [-h] [--html] hash | |
| 4 | + | ||
| 5 | + | positional arguments: | |
| 6 | + | hash Torrent hash to look up | |
| 7 | + | ||
| 8 | + | optional arguments: | |
| 9 | + | -h, --help show this help message and exit | |
| 10 | + | --html Prints html response to stdout, includes Chilling Effects | |
| 11 | + | reports if available | |
| 12 | + | """ | |
| 13 | + | ||
| 14 | + | from requests import get | |
| 15 | + | from random import choice | |
| 16 | + | from bs4 import BeautifulSoup as Soup | |
| 17 | + | from argparse import ArgumentParser | |
| 18 | + | from sys import exit, stderr | |
| 19 | + | ||
| 20 | + | urls = ["http://api1.torrenttags.com/v1/", "http://api2.torrenttags.com/v1/", "http://api3.torrenttags.com/v1/"] | |
| 21 | + | ||
| 22 | + | def get_api_url(method=None): | |
| 23 | + | return choice(urls) + method | |
| 24 | + | ||
| 25 | + | def get_ticket(): | |
| 26 | + | data = get(get_api_url("get-ticket")).json() | |
| 27 | + | return data["status"], data["ticket"] | |
| 28 | + | ||
| 29 | + | def get_result(hash, ticket=None): | |
| 30 | + | if not ticket: | |
| 31 | + | ticket = get_ticket() | |
| 32 | + | if len(hash) != 40: | |
| 33 | + | return False | |
| 34 | + | data = Soup(get(get_api_url("get-tags-html"), params={'ticket': ticket, 'torrent': hash}).json()["html"], "html.parser") | |
| 35 | + | if "no_claims" in data.find('img')["src"]: | |
| 36 | + | return True | |
| 37 | + | return False | |
| 38 | + | ||
| 39 | + | def main(): | |
| 40 | + | parser = ArgumentParser() | |
| 41 | + | parser.add_argument("hash", help="Torrent hash to look up") | |
| 42 | + | args = parser.parse_args() | |
| 43 | + | if get_result(args.hash): | |
| 44 | + | return "No claims (yet!)", 0 | |
| 45 | + | else: | |
| 46 | + | return "Claims found", 2 | |
| 47 | + | ||
| 48 | + | if __name__ == "__main__": | |
| 49 | + | outp, ret = main() | |
| 50 | + | print(outp, file=stderr) | |
| 51 | + | exit(ret) | |