mirror of
https://github.com/adulau/hashlookup-server.git
synced 2024-11-22 01:57:08 +00:00
Alexandre Dulaunoy
a4b3b7ba60
- This includes a simple HTTP server for doing bulk and lookup of hashes. - A simple DNS server to do lookup via DNS - Various import script for NSRL This works on a test instance. TODO: - Automatic script for NSRL download and import - Bloomfilter export - Improved documentation
29 lines
902 B
Python
29 lines
902 B
Python
import sys
|
|
import redis
|
|
rdb = redis.Redis(host='127.0.0.1', port='6666', decode_responses=True)
|
|
|
|
lines = open('../data/xcyclopedia/strontic-xcyclopedia.csv', 'r')
|
|
ln = 0
|
|
rdb.delete("stat:xcyclopedia-import")
|
|
maxvalue = 5000000000
|
|
for l in lines:
|
|
if ln == 0:
|
|
headers = l.rstrip().replace("\"","").split(",")
|
|
print (headers)
|
|
else:
|
|
records = l.rstrip().replace("\"","").split(",")
|
|
drecords = {}
|
|
for index, value in enumerate(records):
|
|
try:
|
|
drecords[headers[index]] = value
|
|
except:
|
|
continue
|
|
|
|
print(drecords)
|
|
print(drecords['hash_md5'])
|
|
#rdb.set("l:{}".format(drecords['hash_md5']), drecords['hash_sha1'])
|
|
#rdb.hmset("h:{}".format(drecords['hash_sha1']), drecords)
|
|
#rdb.incrby("stat:xcyclopedia-import")
|
|
if ln == maxvalue:
|
|
sys.exit(1)
|
|
ln = ln + 1
|