Skip to content

Commit

Permalink
Rerun claims when the claim body or those of dependent claims changes (
Browse files Browse the repository at this point in the history
…#2099)

* Rerun claims based on digest, computed from claim body + dependent claims

* Set Version: 1.0.309

* Make KClaimJob a frozen class and cache digest

* Set Version: 1.0.310

* Change to using frozenset

* Raise exception when label is not found

* Set Version: 1.0.310

* Set Version: 1.0.312

* Set Version: 1.0.314

* Set Version: 1.0.329

* Update kevm-pyk/src/kevm_pyk/__main__.py

Co-authored-by: Andrei Văcaru <[email protected]>

* Fix formatting

* Set Version: 1.0.330

* Set Version: 1.0.331

* Set Version: 1.0.332

---------

Co-authored-by: devops <[email protected]>
Co-authored-by: Andrei Văcaru <[email protected]>
Co-authored-by: rv-jenkins <[email protected]>
  • Loading branch information
4 people authored and spencerhaoxiao committed Nov 2, 2023
1 parent 46f6c8f commit cdc1915
Show file tree
Hide file tree
Showing 4 changed files with 88 additions and 11 deletions.
2 changes: 1 addition & 1 deletion kevm-pyk/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "kevm-pyk"
version = "1.0.331"
version = "1.0.332"
description = ""
authors = [
"Runtime Verification, Inc. <[email protected]>",
Expand Down
2 changes: 1 addition & 1 deletion kevm-pyk/src/kevm_pyk/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
from typing import Final


VERSION: Final = '1.0.331'
VERSION: Final = '1.0.332'
93 changes: 85 additions & 8 deletions kevm-pyk/src/kevm_pyk/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
import sys
import time
from argparse import ArgumentParser
from dataclasses import dataclass
from functools import cached_property
from pathlib import Path
from typing import TYPE_CHECKING

Expand All @@ -24,7 +26,7 @@
from pyk.proof.equality import EqualityProof
from pyk.proof.show import APRProofShow
from pyk.proof.tui import APRProofViewer
from pyk.utils import single
from pyk.utils import FrozenDict, hash_str, single

from . import VERSION, config, kdist
from .cli import KEVMCLIArgs, node_id_like
Expand Down Expand Up @@ -200,6 +202,69 @@ def wrap_process_pool(workers: int) -> Iterator[ZeroProcessPool | ProcessPool]:
yield pp


class JSONEncoder(json.JSONEncoder):
def default(self, obj: Any) -> Any:
if isinstance(obj, FrozenDict):
return json.JSONEncoder.encode(self, dict(obj))
return json.JSONEncoder.default(self, obj)


@dataclass(frozen=True)
class KClaimJob:
claim: KClaim
dependencies: frozenset[KClaimJob]

@cached_property
def digest(self) -> str:
deps_digest = ''.join([dep.digest for dep in self.dependencies])
claim_hash = hash_str(json.dumps(self.claim.to_dict(), sort_keys=True, cls=JSONEncoder))
return hash_str(f'{claim_hash}{deps_digest}')

def up_to_date(self, digest_file: Path | None) -> bool:
if not isinstance(digest_file, Path) or not digest_file.exists():
return False
digest_dict = json.loads(digest_file.read_text())
if 'claims' not in digest_dict:
digest_dict['claims'] = {}
digest_file.write_text(json.dumps(digest_dict, indent=4))
if self.claim.label not in digest_dict['claims']:
return False
return digest_dict['claims'][self.claim.label] == self.digest

def update_digest(self, digest_file: Path | None) -> None:
if digest_file is None:
return
digest_dict = {}
if digest_file.exists():
digest_dict = json.loads(digest_file.read_text())
if 'claims' not in digest_dict:
digest_dict['claims'] = {}
digest_dict['claims'][self.claim.label] = self.digest
digest_file.write_text(json.dumps(digest_dict, indent=4))

_LOGGER.info(f'Updated claim {self.claim.label} in digest file: {digest_file}')


def init_claim_jobs(spec_module_name: str, claims: list[KClaim]) -> frozenset[KClaimJob]:
labels_to_claims = {claim.label: claim for claim in claims}
labels_to_claim_jobs: dict[str, KClaimJob] = {}

def get_or_load_claim_job(claim_label: str) -> KClaimJob:
if claim_label not in labels_to_claim_jobs:
if claim_label in labels_to_claims:
claim = labels_to_claims[claim_label]
elif f'{spec_module_name}.{claim_label}' in labels_to_claims:
claim = labels_to_claims[f'{spec_module_name}.{claim_label}']
else:
raise ValueError(f'Claim with label {claim_label} not found.')
deps = frozenset({get_or_load_claim_job(dep_label) for dep_label in claim.dependencies})
claim_job = KClaimJob(claim, deps)
labels_to_claim_jobs[claim_label] = claim_job
return labels_to_claim_jobs[claim_label]

return frozenset({get_or_load_claim_job(claim.label) for claim in claims})


def exec_prove(
spec_file: Path,
includes: Iterable[str],
Expand Down Expand Up @@ -230,6 +295,8 @@ def exec_prove(
_ignore_arg(kwargs, 'md_selector', f'--md-selector: {kwargs["md_selector"]}')
md_selector = 'k'

digest_file = save_directory / 'digest' if save_directory is not None else None

if definition_dir is None:
definition_dir = kdist.get('haskell')

Expand Down Expand Up @@ -262,16 +329,24 @@ def is_functional(claim: KClaim) -> bool:
spec_module_name=spec_module,
include_dirs=include_dirs,
md_selector=md_selector,
claim_labels=None,
claim_labels=claim_labels,
exclude_claim_labels=exclude_claim_labels,
)
if all_claims is None:
raise ValueError(f'No claims found in file: {spec_file}')
all_claims_by_label: dict[str, KClaim] = {c.label: c for c in all_claims}
spec_module_name = spec_module if spec_module is not None else os.path.splitext(spec_file.name)[0].upper()
all_claim_jobs = init_claim_jobs(spec_module_name, all_claims)
all_claim_jobs_by_label = {c.claim.label: c for c in all_claim_jobs}
claims_graph = claim_dependency_dict(all_claims, spec_module_name=spec_module_name)

def _init_and_run_proof(claim: KClaim) -> tuple[bool, list[str] | None]:
def _init_and_run_proof(claim_job: KClaimJob) -> tuple[bool, list[str] | None]:
claim = claim_job.claim
up_to_date = claim_job.up_to_date(digest_file)
if up_to_date:
_LOGGER.info(f'Claim {claim.label} is up to date.')
else:
_LOGGER.info(f'Claim {claim.label} reinitialized because it is out of date.')
claim_job.update_digest(digest_file)
with legacy_explore(
kevm,
kcfg_semantics=KEVMSemantics(auto_abstract_gas=auto_abstract_gas),
Expand All @@ -288,6 +363,7 @@ def _init_and_run_proof(claim: KClaim) -> tuple[bool, list[str] | None]:
if (
save_directory is not None
and not reinit
and up_to_date
and EqualityProof.proof_exists(claim.label, save_directory)
):
proof_problem = EqualityProof.read_proof_data(save_directory, claim.label)
Expand All @@ -297,6 +373,7 @@ def _init_and_run_proof(claim: KClaim) -> tuple[bool, list[str] | None]:
if (
save_directory is not None
and not reinit
and up_to_date
and APRProof.proof_data_exists(claim.label, save_directory)
):
proof_problem = APRProof.read_proof_data(save_directory, claim.label)
Expand Down Expand Up @@ -366,21 +443,21 @@ def _init_and_run_proof(claim: KClaim) -> tuple[bool, list[str] | None]:
while topological_sorter.is_active():
ready = topological_sorter.get_ready()
_LOGGER.info(f'Discharging proof obligations: {ready}')
curr_claim_list = [all_claims_by_label[label] for label in ready]
curr_claim_list = [all_claim_jobs_by_label[label] for label in ready]
results: list[tuple[bool, list[str] | None]] = process_pool.map(_init_and_run_proof, curr_claim_list)
for label in ready:
topological_sorter.done(label)
selected_results.extend(results)
selected_claims.extend(curr_claim_list)

failed = 0
for claim, r in zip(selected_claims, selected_results, strict=True):
for job, r in zip(selected_claims, selected_results, strict=True):
passed, failure_log = r
if passed:
print(f'PROOF PASSED: {claim.label}')
print(f'PROOF PASSED: {job.claim.label}')
else:
failed += 1
print(f'PROOF FAILED: {claim.label}')
print(f'PROOF FAILED: {job.claim.label}')
if failure_info and failure_log is not None:
for line in failure_log:
print(line)
Expand Down
2 changes: 1 addition & 1 deletion package/version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.0.331
1.0.332

0 comments on commit cdc1915

Please sign in to comment.