Skip to content

Commit

Permalink
fixup! Add serve_header.py for rapid testing on Compiler Explorer
Browse files Browse the repository at this point in the history
  • Loading branch information
falbrechtskirchinger committed Apr 26, 2022
1 parent 80d3a5e commit c6f40b7
Show file tree
Hide file tree
Showing 2 changed files with 53 additions and 37 deletions.
5 changes: 2 additions & 3 deletions scripts/serve_header/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ https:
This configuration serves the `json.hpp` header on `https://localhost:8443/single_include/nlohmann/json.hpp`.

Make sure you have the following python dependencies installed:
Make sure you have the following python dependencies installed:
```
PyYAML
watchdog
Expand All @@ -50,8 +50,7 @@ using namespace nlohmann;
#include <iostream>
int main() {
auto m = json::meta();
std::cout << m["build_time"] << " (" << m["build_count"] << ")\n";
std::cout << JSON_BUILD_TIME << " (" << JSON_BUILD_COUNT << ")\n";
return 0;
}
Expand Down
85 changes: 51 additions & 34 deletions scripts/serve_header/serve_header.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env python3

import os.path, sys, time
import os, sys
import shutil
import logging
import re
Expand All @@ -25,8 +25,7 @@

DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'

JSON_META_PROTOTYPE_RE = re.compile(r'\s*static\s+basic_json\s+meta\s*\(\s*\)')
JSON_META_RETURN_RE = re.compile(r'\s*return\s+result\s*;')
JSON_VERSION_RE = re.compile(r'\s*#\s*define\s+NLOHMANN_JSON_VERSION_MAJOR\s+')

class ExitHandler(logging.StreamHandler):
def __init__(self, level):
Expand All @@ -37,10 +36,10 @@ def emit(self, record):
if record.levelno >= self.level:
sys.exit(1)

def is_project_root(dir='.'):
makefile = os.path.join(dir, MAKEFILE)
include = os.path.join(dir, INCLUDE)
single_include = os.path.join(dir, SINGLE_INCLUDE)
def is_project_root(test_dir='.'):
makefile = os.path.join(test_dir, MAKEFILE)
include = os.path.join(test_dir, INCLUDE)
single_include = os.path.join(test_dir, SINGLE_INCLUDE)

return (os.path.exists(makefile)
and os.path.isfile(makefile)
Expand All @@ -57,11 +56,15 @@ def __init__(self, root_dir, tree_dir):
self.rel_header = os.path.relpath(self.header, root_dir)
self.dirty = True
self.build_count = 0
self.build_time = None
t = os.path.getmtime(self.header)
t = datetime.fromtimestamp(t)
self.build_time = t.strftime(DATETIME_FORMAT)

"""Work tree hash is derived from tree_dir"""
def __hash__(self):
return hash((self.tree_dir))

"""Work tree identity is based on tree_dir"""
def __eq__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
Expand Down Expand Up @@ -105,21 +108,21 @@ def __init__(self, root_dir):
self.observer.start()

def scan(self, base_dir):
dirs = [base_dir]
while len(dirs) > 0:
dir = dirs.pop()
if is_project_root(dir):
dir = os.path.abspath(dir)
scan_dirs = [base_dir]
while len(scan_dirs) > 0:
scan_dir = scan_dirs.pop()
if is_project_root(scan_dir):
scan_dir = os.path.abspath(scan_dir)
# skip work trees in build directories
if not dir.endswith('/_deps/json-src'):
tree = WorkTree(self.root_dir, dir)
if not scan_dir.endswith('/_deps/json-src'):
tree = WorkTree(self.root_dir, scan_dir)
if not tree in self.trees:
logging.info('adding work tree: %s', tree.rel_dir)
self.trees.add(tree)
with os.scandir(dir) as it:
with os.scandir(scan_dir) as it:
for entry in it:
if entry.is_dir():
dirs.append(entry.path)
scan_dirs.append(entry.path)

def find(self, path):
path = os.path.abspath(path)
Expand Down Expand Up @@ -148,12 +151,26 @@ def stop(self):
self.observer.join()

class HeaderRequestHandler(SimpleHTTPRequestHandler):
"""Request handler for serving json.hpp header"""

def __init__(self, request, client_address, server):
self.worktrees = server.worktrees
self.worktree = None
super().__init__(request, client_address, server,
directory=server.worktrees.root_dir)

def translate_path(self, path):
path = os.path.abspath(super().translate_path(path))

# add single_include/nlohmann to path, if needed
header = os.path.join('/', HEADER)
header_path = os.path.join('/', SINGLE_INCLUDE, HEADER)
if (path.endswith(header)
and not path.endswith(header_path)):
path = os.path.join(os.path.dirname(path), SINGLE_INCLUDE, HEADER)

return path

def send_head(self):
# check if the translated path matches a work tree
# and fullfill the request; otherwise, send 404
Expand All @@ -163,7 +180,8 @@ def send_head(self):
self.worktree.amalgamate_header()
logging.info(f'serving header ({self.worktree.build_count}): {self.worktree.rel_header}')
return super().send_head()
super().send_error(HTTPStatus.NOT_FOUND, "File not found")
logging.info(f'invalid request path: {self.path}')
super().send_error(HTTPStatus.NOT_FOUND, 'Not Found')
return None

def send_header(self, keyword, value):
Expand All @@ -177,19 +195,17 @@ def end_headers (self):
pass

def copyfile(self, source, outputfile):
inside_meta = False
injected = False
content = BytesIO()
length = 0
for line in source:
line = line.decode('utf-8')
if not inside_meta and JSON_META_PROTOTYPE_RE.match(line):
inside_meta = True
elif inside_meta and JSON_META_RETURN_RE.match(line):
length += content.write(bytes('result["build_count"] = '\
f'{self.worktree.build_count};\n', 'utf-8'))
length += content.write(bytes('result["build_time"] = '\
f'"{self.worktree.build_time}";\n', 'utf-8'))
inside_meta = False
if not injected and JSON_VERSION_RE.match(line):
length += content.write(bytes('#define JSON_BUILD_COUNT '\
f'{self.worktree.build_count}\n', 'utf-8'))
length += content.write(bytes('#define JSON_BUILD_TIME '\
f'"{self.worktree.build_time}"\n', 'utf-8'))
injected = True
length += content.write(bytes(line, 'utf-8'))

super().send_header('Content-Length', length)
Expand All @@ -204,10 +220,12 @@ def copyfile(self, source, outputfile):
content.seek(0)
shutil.copyfileobj(content, outputfile)

def log_message(self, format, *args):
def log_message(self, fmt, *args):
pass

class DualStackServer(ThreadingHTTPServer):
"""ThreadingHTTPServer which ensures dual-stack is not disabled"""

def __init__(self, addr, worktrees):
self.worktrees = worktrees
super().__init__(addr, HeaderRequestHandler)
Expand All @@ -225,7 +243,7 @@ def get_best_family(*address):
type=socket.SOCK_STREAM,
flags=socket.AI_PASSIVE,
)
family, type, proto, canonname, sockaddr = next(iter(infos))
family, _, _, _, sockaddr = next(iter(infos))
return family, sockaddr

if __name__ == '__main__':
Expand Down Expand Up @@ -261,9 +279,11 @@ def get_best_family(*address):
scheme = 'HTTP'
https = config.get('https', {})
if https.get('enabled', False):
ssl.minimum_version = ssl.TLSVersion.TLSv1_3
ssl.maximum_version = ssl.TLSVersion.MAXIMUM_SUPPORTED
httpd.socket = ssl.wrap_socket(httpd.socket,
certfile=https['cert_file'], keyfile=https['key_file'],
server_side=True)
server_side=True, ssl_version=ssl.PROTOCOL_TLS)
scheme = 'HTTPS'
host, port = httpd.socket.getsockname()[:2]
log.info(f'serving {scheme} on {host} port {port}')
Expand All @@ -276,8 +296,5 @@ def get_best_family(*address):
ec = 1
finally:
if worktrees is not None:
try:
worktrees.stop()
except:
pass
worktrees.stop()
sys.exit(ec)

0 comments on commit c6f40b7

Please sign in to comment.