Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Make scripts/ and scripts-dev/ pass pyflakes (and the rest of the codebase on py3) #4068

Merged
merged 9 commits into from
Oct 20, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ matrix:
- python: 2.7
env: TOX_ENV=packaging

- python: 2.7
- python: 3.6
env: TOX_ENV=pep8

- python: 2.7
Expand Down
1 change: 1 addition & 0 deletions changelog.d/4068.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Make the Python scripts in the top-level scripts folders meet pep8 and pass flake8.
36 changes: 15 additions & 21 deletions scripts-dev/check_auth.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,20 @@
from synapse.events import FrozenEvent
from synapse.api.auth import Auth

from mock import Mock
from __future__ import print_function

import argparse
import itertools
import json
import sys

from mock import Mock

from synapse.api.auth import Auth
from synapse.events import FrozenEvent


def check_auth(auth, auth_chain, events):
auth_chain.sort(key=lambda e: e.depth)

auth_map = {
e.event_id: e
for e in auth_chain
}
auth_map = {e.event_id: e for e in auth_chain}

create_events = {}
for e in auth_chain:
Expand All @@ -25,31 +24,26 @@ def check_auth(auth, auth_chain, events):
for e in itertools.chain(auth_chain, events):
auth_events_list = [auth_map[i] for i, _ in e.auth_events]

auth_events = {
(e.type, e.state_key): e
for e in auth_events_list
}
auth_events = {(e.type, e.state_key): e for e in auth_events_list}

auth_events[("m.room.create", "")] = create_events[e.room_id]

try:
auth.check(e, auth_events=auth_events)
except Exception as ex:
print "Failed:", e.event_id, e.type, e.state_key
print "Auth_events:", auth_events
print ex
print json.dumps(e.get_dict(), sort_keys=True, indent=4)
print("Failed:", e.event_id, e.type, e.state_key)
print("Auth_events:", auth_events)
print(ex)
print(json.dumps(e.get_dict(), sort_keys=True, indent=4))
# raise
print "Success:", e.event_id, e.type, e.state_key
print("Success:", e.event_id, e.type, e.state_key)


if __name__ == '__main__':
parser = argparse.ArgumentParser()

parser.add_argument(
'json',
nargs='?',
type=argparse.FileType('r'),
default=sys.stdin,
'json', nargs='?', type=argparse.FileType('r'), default=sys.stdin
)

args = parser.parse_args()
Expand Down
32 changes: 18 additions & 14 deletions scripts-dev/check_event_hash.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,15 @@
from synapse.crypto.event_signing import *
from unpaddedbase64 import encode_base64

import argparse
import hashlib
import sys
import json
import logging
import sys

from unpaddedbase64 import encode_base64

from synapse.crypto.event_signing import (
check_event_content_hash,
compute_event_reference_hash,
)


class dictobj(dict):
Expand All @@ -24,27 +29,26 @@ def get_pdu_json(self):

def main():
parser = argparse.ArgumentParser()
parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
default=sys.stdin)
parser.add_argument(
"input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
)
args = parser.parse_args()
logging.basicConfig()

event_json = dictobj(json.load(args.input_json))

algorithms = {
"sha256": hashlib.sha256,
}
algorithms = {"sha256": hashlib.sha256}

for alg_name in event_json.hashes:
if check_event_content_hash(event_json, algorithms[alg_name]):
print "PASS content hash %s" % (alg_name,)
print("PASS content hash %s" % (alg_name,))
else:
print "FAIL content hash %s" % (alg_name,)
print("FAIL content hash %s" % (alg_name,))

for algorithm in algorithms.values():
name, h_bytes = compute_event_reference_hash(event_json, algorithm)
print "Reference hash %s: %s" % (name, encode_base64(h_bytes))
print("Reference hash %s: %s" % (name, encode_base64(h_bytes)))

if __name__=="__main__":
main()

if __name__ == "__main__":
main()
36 changes: 19 additions & 17 deletions scripts-dev/check_signature.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@

from signedjson.sign import verify_signed_json
from signedjson.key import decode_verify_key_bytes, write_signing_keys
from unpaddedbase64 import decode_base64

import urllib2
import argparse
import json
import logging
import sys
import urllib2

import dns.resolver
import pprint
import argparse
import logging
from signedjson.key import decode_verify_key_bytes, write_signing_keys
from signedjson.sign import verify_signed_json
from unpaddedbase64 import decode_base64


def get_targets(server_name):
if ":" in server_name:
Expand All @@ -23,6 +23,7 @@ def get_targets(server_name):
except dns.resolver.NXDOMAIN:
yield (server_name, 8448)


def get_server_keys(server_name, target, port):
url = "https://%s:%i/_matrix/key/v1" % (target, port)
keys = json.load(urllib2.urlopen(url))
Expand All @@ -33,12 +34,14 @@ def get_server_keys(server_name, target, port):
verify_keys[key_id] = verify_key
return verify_keys


def main():

parser = argparse.ArgumentParser()
parser.add_argument("signature_name")
parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
default=sys.stdin)
parser.add_argument(
"input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
)

args = parser.parse_args()
logging.basicConfig()
Expand All @@ -48,24 +51,23 @@ def main():
for target, port in get_targets(server_name):
try:
keys = get_server_keys(server_name, target, port)
print "Using keys from https://%s:%s/_matrix/key/v1" % (target, port)
print("Using keys from https://%s:%s/_matrix/key/v1" % (target, port))
write_signing_keys(sys.stdout, keys.values())
break
except:
except Exception:
logging.exception("Error talking to %s:%s", target, port)

json_to_check = json.load(args.input_json)
print "Checking JSON:"
print("Checking JSON:")
for key_id in json_to_check["signatures"][args.signature_name]:
try:
key = keys[key_id]
verify_signed_json(json_to_check, args.signature_name, key)
print "PASS %s" % (key_id,)
except:
print("PASS %s" % (key_id,))
except Exception:
logging.exception("Check for key %s failed" % (key_id,))
print "FAIL %s" % (key_id,)
print("FAIL %s" % (key_id,))


if __name__ == '__main__':
main()

40 changes: 22 additions & 18 deletions scripts-dev/convert_server_keys.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,21 @@
import psycopg2
import yaml
import sys
import hashlib
import json
import sys
import time
import hashlib
from unpaddedbase64 import encode_base64

import six

import psycopg2
import yaml
from canonicaljson import encode_canonical_json
from signedjson.key import read_signing_keys
from signedjson.sign import sign_json
from canonicaljson import encode_canonical_json
from unpaddedbase64 import encode_base64

if six.PY2:
db_type = six.moves.builtins.buffer
else:
db_type = memoryview


def select_v1_keys(connection):
Expand Down Expand Up @@ -39,18 +47,17 @@ def select_v2_json(connection):
cursor.close()
results = {}
for server_name, key_id, key_json in rows:
results.setdefault(server_name, {})[key_id] = json.loads(str(key_json).decode("utf-8"))
results.setdefault(server_name, {})[key_id] = json.loads(
str(key_json).decode("utf-8")
)
return results


def convert_v1_to_v2(server_name, valid_until, keys, certificate):
return {
"old_verify_keys": {},
"server_name": server_name,
"verify_keys": {
key_id: {"key": key}
for key_id, key in keys.items()
},
"verify_keys": {key_id: {"key": key} for key_id, key in keys.items()},
"valid_until_ts": valid_until,
"tls_fingerprints": [fingerprint(certificate)],
}
Expand All @@ -65,7 +72,7 @@ def rows_v2(server, json):
valid_until = json["valid_until_ts"]
key_json = encode_canonical_json(json)
for key_id in json["verify_keys"]:
yield (server, key_id, "-", valid_until, valid_until, buffer(key_json))
yield (server, key_id, "-", valid_until, valid_until, db_type(key_json))


def main():
Expand All @@ -87,7 +94,7 @@ def main():

result = {}
for server in keys:
if not server in json:
if server not in json:
v2_json = convert_v1_to_v2(
server, valid_until, keys[server], certificates[server]
)
Expand All @@ -96,18 +103,15 @@ def main():

yaml.safe_dump(result, sys.stdout, default_flow_style=False)

rows = list(
row for server, json in result.items()
for row in rows_v2(server, json)
)
rows = list(row for server, json in result.items() for row in rows_v2(server, json))

cursor = connection.cursor()
cursor.executemany(
"INSERT INTO server_keys_json ("
" server_name, key_id, from_server,"
" ts_added_ms, ts_valid_until_ms, key_json"
") VALUES (%s, %s, %s, %s, %s, %s)",
rows
rows,
)
connection.commit()

Expand Down
Loading