-
Notifications
You must be signed in to change notification settings - Fork 0
/
utils.py
143 lines (120 loc) · 4.02 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
from pydantic import BaseModel, BaseSettings
from logging.config import dictConfig
import logging
import shutil
from pathlib import Path
import hashlib
from fastapi import UploadFile
from iroha import IrohaCrypto
from simhash import Simhash
from functools import lru_cache
import jieba
class Settings(BaseSettings):
iroha_address: str
account_id: str
nextcloud_url: str
nextcloud_username: str
nextcloud_password: str
private_key: str
public_key: str
class Config:
env_file = ".env"
class LogConfig(BaseModel):
"""Logging configuration to be set for the server"""
LOGGER_NAME: str = "HyperDB"
LOG_FORMAT: str = "%(levelprefix)s | %(asctime)s | %(message)s"
LOG_LEVEL: str = "DEBUG"
# Logging config
LOG_FORMAT: str = "%(levelprefix)s | %(asctime)s | %(message)s"
LOG_LEVEL: str = "DEBUG"
# Logging config
version = 1
disable_existing_loggers = False
formatters = {
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": LOG_FORMAT,
"datefmt": "%Y-%m-%d %H:%M:%S",
},
}
handlers = {
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stderr",
},
}
loggers = {
"HyperDB": {"handlers": ["default"], "level": LOG_LEVEL},
}
dictConfig(LogConfig().dict())
logger = logging.getLogger("HyperDB")
@lru_cache
def get_settings():
"""
Get the settings from the environment variables
"""
return Settings()
def save_upload_file(upload_file: UploadFile, destination: str) -> None:
destination = Path(destination)
try:
with destination.open("wb") as buffer:
shutil.copyfileobj(upload_file.file, buffer)
finally:
upload_file.file.close()
def trace(func):
"""
A decorator for tracing methods' begin/end execution points
"""
def tracer(*args, **kwargs):
name = func.__name__
print('\tEntering "{}"'.format(name))
result = func(*args, **kwargs)
print('\tLeaving "{}"'.format(name))
return result
return tracer
class Keypair:
"""
Store the private key and public key in a class.
"""
def __init__(self, private_key, public_key):
self.private_key = private_key
self.public_key = public_key
def __str__(self):
return '{} {}'.format(self.private_key, self.public_key)
class Entry:
"""
Store the database entry in a class
"""
def __init__(self, id, name, experiment_time, author, email, institution, environment, parameters, details, attachment, timestamp, hash=None, offset=-1):
self.id = id
self.name = name
self.experiment_time = experiment_time
self.author = author
self.email = email
self.institution = institution
self.environment = environment
self.parameters = parameters
self.details = details
self.attachment = attachment
self.hash = hash
self.offset = offset
self.timestamp = timestamp
self.simhash = None
def cal_hash(self):
"""
Return hash of data.
"""
data = '{}{}{}{}{}{}{}{}{}{}'.format(self.name, self.experiment_time, self.author, self.email, self.institution, self.environment, self.parameters, self.details, self.attachment, self.timestamp)
return hashlib.sha256(data.encode('utf-8'))
def cal_simhash(self):
data = '{};{};{};{};{};{};{};{};{};{}'.format(self.name, self.experiment_time, self.author, self.email, self.institution, self.environment, self.parameters, self.details, self.attachment,self.timestamp)
words = jieba.lcut(data)
return Simhash(words)
@classmethod
def from_tuple(cls, tuple):
"""
Create an entry from query results.
"""
logging.debug('from_tuple: {}'.format(tuple))
return cls(tuple[0], tuple[1], tuple[2], tuple[3], tuple[4], tuple[5], tuple[6], tuple[7], tuple[8], tuple[9], tuple[12], tuple[10], tuple[11])