mirror of
https://github.com/ellmau/adf-obdd.git
synced 2025-12-20 09:39:38 +01:00
Compare commits
No commits in common. "main" and "0.3.1" have entirely different histories.
6
.github/workflows/devskim.yml
vendored
6
.github/workflows/devskim.yml
vendored
@ -16,19 +16,19 @@ on:
|
||||
jobs:
|
||||
lint:
|
||||
name: DevSkim
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run DevSkim scanner
|
||||
uses: microsoft/DevSkim-Action@v1
|
||||
|
||||
- name: Upload DevSkim scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
uses: github/codeql-action/upload-sarif@v2
|
||||
with:
|
||||
sarif_file: devskim-results.sarif
|
||||
|
||||
2251
Cargo.lock
generated
2251
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "adf-bdd-bin"
|
||||
version = "0.3.0-dev"
|
||||
version = "0.3.0"
|
||||
authors = ["Stefan Ellmauthaler <stefan.ellmauthaler@tu-dresden.de>"]
|
||||
edition = "2021"
|
||||
homepage = "https://ellmau.github.io/adf-obdd"
|
||||
@ -17,7 +17,7 @@ path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
adf_bdd = { version="0.3.1", path="../lib", default-features = false }
|
||||
clap = {version = "4.3.0", features = [ "derive", "cargo", "env" ]}
|
||||
clap = {version = "4.1.4", features = [ "derive", "cargo", "env" ]}
|
||||
log = { version = "0.4", features = [ "max_level_trace", "release_max_level_info" ] }
|
||||
serde = { version = "1.0", features = ["derive","rc"] }
|
||||
serde_json = "1.0"
|
||||
@ -27,7 +27,7 @@ crossbeam-channel = "0.5"
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2.0"
|
||||
predicates = "3.0"
|
||||
predicates = "2.1"
|
||||
assert_fs = "1.0"
|
||||
|
||||
[features]
|
||||
|
||||
@ -63,6 +63,7 @@ OPTIONS:
|
||||
#![deny(
|
||||
missing_debug_implementations,
|
||||
missing_copy_implementations,
|
||||
missing_copy_implementations,
|
||||
trivial_casts,
|
||||
trivial_numeric_casts,
|
||||
unsafe_code
|
||||
@ -180,7 +181,7 @@ impl App {
|
||||
let input = std::fs::read_to_string(self.input.clone()).expect("Error Reading File");
|
||||
match self.implementation.as_str() {
|
||||
"hybrid" => {
|
||||
let parser = AdfParser::default();
|
||||
let parser = adf_bdd::parser::AdfParser::default();
|
||||
match parser.parse()(&input) {
|
||||
Ok(_) => log::info!("[Done] parsing"),
|
||||
Err(e) => {
|
||||
@ -204,14 +205,14 @@ impl App {
|
||||
Some("nai") => {
|
||||
let naive_adf = adf.hybrid_step_opt(false);
|
||||
for ac_counts in naive_adf.formulacounts(false) {
|
||||
print!("{ac_counts:?} ");
|
||||
print!("{:?} ", ac_counts);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
Some("mem") => {
|
||||
let naive_adf = adf.hybrid_step_opt(false);
|
||||
for ac_counts in naive_adf.formulacounts(true) {
|
||||
print!("{ac_counts:?}");
|
||||
print!("{:?}", ac_counts);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
@ -282,7 +283,7 @@ impl App {
|
||||
if self.counter.is_some() {
|
||||
log::error!("Modelcounting not supported in biodivine mode");
|
||||
}
|
||||
let parser = AdfParser::default();
|
||||
let parser = adf_bdd::parser::AdfParser::default();
|
||||
match parser.parse()(&input) {
|
||||
Ok(_) => log::info!("[Done] parsing"),
|
||||
Err(e) => {
|
||||
@ -383,13 +384,13 @@ impl App {
|
||||
match self.counter.as_deref() {
|
||||
Some("nai") => {
|
||||
for ac_counts in adf.formulacounts(false) {
|
||||
print!("{ac_counts:?} ");
|
||||
print!("{:?} ", ac_counts);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
Some("mem") => {
|
||||
for ac_counts in adf.formulacounts(true) {
|
||||
print!("{ac_counts:?}");
|
||||
print!("{:?}", ac_counts);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
87
flake.lock
generated
87
flake.lock
generated
@ -1,53 +1,64 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils_2"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1738591040,
|
||||
"narHash": "sha256-4WNeriUToshQ/L5J+dTSWC5OJIwT39SEP7V7oylndi8=",
|
||||
"owner": "gytis-ivaskevicius",
|
||||
"repo": "flake-utils-plus",
|
||||
"rev": "afcb15b845e74ac5e998358709b2b5fe42a948d1",
|
||||
"lastModified": 1678901627,
|
||||
"narHash": "sha256-U02riOqrKKzwjsxc/400XnElV+UtPUQWpANPlyazjH0=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "93a2b84fc4b70d9e089d029deacc3583435c2ed6",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "gytis-ivaskevicius",
|
||||
"repo": "flake-utils-plus",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils_2": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"gitignoresrc": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1694529238,
|
||||
"narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "ff7b65b44d01cf9ba6a71320833626af21126384",
|
||||
"lastModified": 1660459072,
|
||||
"narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=",
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"rev": "a20de23b925fd8264fd7fad6454652e142fd7f73",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1750969886,
|
||||
"narHash": "sha256-zW/OFnotiz/ndPFdebpo3X0CrbVNf22n4DjN2vxlb58=",
|
||||
"lastModified": 1679966490,
|
||||
"narHash": "sha256-k0jV+y1jawE6w4ZvKgXDNg4+O9NNtcaWwzw8gufv0b4=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "a676066377a2fe7457369dd37c31fd2263b662f4",
|
||||
"rev": "5b7cd5c39befee629be284970415b6eb3b0ff000",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-25.05",
|
||||
"ref": "nixos-22.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-unstable": {
|
||||
"locked": {
|
||||
"lastModified": 1679944645,
|
||||
"narHash": "sha256-e5Qyoe11UZjVfgRfwNoSU57ZeKuEmjYb77B9IVW7L/M=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "4bb072f0a8b267613c127684e099a70e1f6ff106",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
@ -55,22 +66,27 @@
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"gitignoresrc": "gitignoresrc",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"nixpkgs-unstable": "nixpkgs-unstable",
|
||||
"rust-overlay": "rust-overlay"
|
||||
}
|
||||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"flake-utils": [
|
||||
"flake-utils"
|
||||
],
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1751251399,
|
||||
"narHash": "sha256-y+viCuy/eKKpkX1K2gDvXIJI/yzvy6zA3HObapz9XZ0=",
|
||||
"lastModified": 1680056830,
|
||||
"narHash": "sha256-WB4KD8oLSxAAtmXYSzwVwQusC2Gy5vTEln1uTt0GI2k=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "b22d5ee8c60ed1291521f2dde48784edd6bf695b",
|
||||
"rev": "c8d8d05b8100d451243b614d950fa3f966c1fcc2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -78,21 +94,6 @@
|
||||
"repo": "rust-overlay",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
|
||||
107
flake.nix
107
flake.nix
@ -1,89 +1,50 @@
|
||||
rec {
|
||||
description = "adf-bdd, Abstract Dialectical Frameworks solved by Binary Decision Diagrams; developed in Dresden";
|
||||
{
|
||||
description = "basic rust flake";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11";
|
||||
nixpkgs-unstable.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs = {
|
||||
nixpkgs.follows = "nixpkgs";
|
||||
flake-utils.follows = "flake-utils/flake-utils";
|
||||
flake-utils.follows = "flake-utils";
|
||||
};
|
||||
};
|
||||
flake-utils.url = "github:gytis-ivaskevicius/flake-utils-plus";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
gitignoresrc = {
|
||||
url = "github:hercules-ci/gitignore.nix";
|
||||
flake = false;
|
||||
};
|
||||
};
|
||||
|
||||
outputs = inputs @ {
|
||||
self,
|
||||
flake-utils,
|
||||
rust-overlay,
|
||||
...
|
||||
}:
|
||||
flake-utils.lib.mkFlake {
|
||||
inherit self inputs;
|
||||
channels.nixpkgs.overlaysBuilder = channels: [rust-overlay.overlays.default];
|
||||
outputsBuilder = channels: let
|
||||
pkgs = channels.nixpkgs;
|
||||
toolchain = pkgs.rust-bin.stable.latest.default;
|
||||
platform = pkgs.makeRustPlatform {
|
||||
cargo = toolchain;
|
||||
rustc = toolchain;
|
||||
outputs = { self, nixpkgs, nixpkgs-unstable, flake-utils, gitignoresrc
|
||||
, rust-overlay, ... }@inputs:
|
||||
{
|
||||
#overlay = import ./nix { inherit gitignoresrc; };
|
||||
} // (flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
unstable = import nixpkgs-unstable { inherit system; };
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [ (import rust-overlay) ];
|
||||
};
|
||||
in rec {
|
||||
packages = let
|
||||
cargoMetaBin = (builtins.fromTOML (builtins.readFile ./bin/Cargo.toml)).package;
|
||||
cargoMetaLib = (builtins.fromTOML (builtins.readFile ./lib/Cargo.toml)).package;
|
||||
meta = {
|
||||
inherit description;
|
||||
homepage = "https://github.com/ellmau/adf-obdd";
|
||||
license = [pkgs.lib.licenses.mit];
|
||||
|
||||
nativeBuildInputs = with platform; [
|
||||
cargoBuildHook
|
||||
cargoCheckHook
|
||||
];
|
||||
};
|
||||
in rec {
|
||||
adf-bdd = platform.buildRustPackage {
|
||||
pname = "adf-bdd";
|
||||
inherit (cargoMetaBin) version;
|
||||
inherit meta;
|
||||
|
||||
src = ./.;
|
||||
cargoLock.lockFile = ./Cargo.lock;
|
||||
|
||||
buildAndTestSubdir = "bin";
|
||||
};
|
||||
adf_bdd = platform.buildRustPackage {
|
||||
pname = "adf_bdd";
|
||||
inherit (cargoMetaLib) version;
|
||||
inherit meta;
|
||||
|
||||
src = ./.;
|
||||
cargoLock.lockFile = ./Cargo.lock;
|
||||
|
||||
buildAndTestSubdir = "lib";
|
||||
};
|
||||
};
|
||||
devShells.default = pkgs.mkShell {
|
||||
devShell = pkgs.mkShell {
|
||||
RUST_LOG = "debug";
|
||||
RUST_BACKTRACE = 1;
|
||||
shellHook = ''
|
||||
export PATH=''${HOME}/.cargo/bin''${PATH+:''${PATH}}
|
||||
'';
|
||||
buildInputs = let
|
||||
notOn = systems:
|
||||
pkgs.lib.optionals (!builtins.elem pkgs.system systems);
|
||||
in
|
||||
[
|
||||
toolchain
|
||||
pkgs.rust-analyzer
|
||||
pkgs.cargo-audit
|
||||
pkgs.cargo-license
|
||||
]
|
||||
++ (notOn ["aarch64-darwin" "x86_64-darwin"] [pkgs.kcov pkgs.gnuplot pkgs.valgrind])
|
||||
++ (notOn ["aarch64-linux" "aarch64-darwin" "i686-linux"] [pkgs.cargo-tarpaulin]);
|
||||
buildInputs = [
|
||||
pkgs.rust-bin.stable.latest.rustfmt
|
||||
pkgs.rust-bin.stable.latest.default
|
||||
pkgs.rust-analyzer
|
||||
pkgs.cargo-audit
|
||||
pkgs.cargo-license
|
||||
pkgs.cargo-tarpaulin
|
||||
pkgs.cargo-kcov
|
||||
pkgs.valgrind
|
||||
pkgs.gnuplot
|
||||
pkgs.kcov
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
}));
|
||||
}
|
||||
|
||||
@ -9,32 +9,32 @@
|
||||
"build": "parcel build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@parcel/transformer-inline-string": "2.9.3",
|
||||
"@types/node": "^20.4.6",
|
||||
"@types/react": "^18.2.18",
|
||||
"@types/react-dom": "^18.2.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.2.1",
|
||||
"@typescript-eslint/parser": "^6.2.1",
|
||||
"eslint": "^8.46.0",
|
||||
"@parcel/transformer-inline-string": "2.8.2",
|
||||
"@types/node": "^18.15.11",
|
||||
"@types/react": "^18.0.26",
|
||||
"@types/react-dom": "^18.0.10",
|
||||
"@typescript-eslint/eslint-plugin": "^5.48.1",
|
||||
"@typescript-eslint/parser": "^5.48.1",
|
||||
"eslint": "^8.31.0",
|
||||
"eslint-config-airbnb": "^19.0.4",
|
||||
"eslint-config-airbnb-typescript": "^17.1.0",
|
||||
"eslint-plugin-import": "^2.28.0",
|
||||
"eslint-config-airbnb-typescript": "^17.0.0",
|
||||
"eslint-plugin-import": "^2.27.4",
|
||||
"eslint-plugin-jsx-a11y": "^6.7.1",
|
||||
"eslint-plugin-react": "^7.33.1",
|
||||
"parcel": "^2.9.3",
|
||||
"eslint-plugin-react": "^7.32.0",
|
||||
"parcel": "^2.8.2",
|
||||
"process": "^0.11.10",
|
||||
"typescript": "^5.1.6"
|
||||
"typescript": "^4.9.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@antv/g6": "^4.8.20",
|
||||
"@emotion/react": "^11.11.1",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@fontsource/roboto": "^5.0.6",
|
||||
"@mui/icons-material": "^5.14.3",
|
||||
"@mui/material": "^5.14.3",
|
||||
"markdown-to-jsx": "^7.2.1",
|
||||
"@antv/g6": "^4.8.3",
|
||||
"@emotion/react": "^11.10.5",
|
||||
"@emotion/styled": "^11.10.5",
|
||||
"@fontsource/roboto": "^4.5.8",
|
||||
"@mui/icons-material": "^5.11.16",
|
||||
"@mui/material": "^5.11.4",
|
||||
"markdown-to-jsx": "^7.2.0",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-router-dom": "^6.14.2"
|
||||
"react-router-dom": "^6.10.0"
|
||||
}
|
||||
}
|
||||
|
||||
@ -40,7 +40,6 @@ function AdfNewForm({ fetchProblems }: { fetchProblems: () => void; }) {
|
||||
const [code, setCode] = useState(PLACEHOLDER);
|
||||
const [filename, setFilename] = useState('');
|
||||
const [parsing, setParsing] = useState<Parsing>('Naive');
|
||||
const [isAf, setIsAf] = useState(false);
|
||||
const [name, setName] = useState('');
|
||||
const fileRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
@ -60,7 +59,6 @@ function AdfNewForm({ fetchProblems }: { fetchProblems: () => void; }) {
|
||||
}
|
||||
|
||||
formData.append('parsing', parsing);
|
||||
formData.append('is_af', isAf);
|
||||
formData.append('name', name);
|
||||
|
||||
fetch(`${process.env.NODE_ENV === 'development' ? '//localhost:8080' : ''}/adf/add`, {
|
||||
@ -121,13 +119,10 @@ function AdfNewForm({ fetchProblems }: { fetchProblems: () => void; }) {
|
||||
label="Put your code here:"
|
||||
helperText={(
|
||||
<>
|
||||
For more info on the ADF syntax, have a
|
||||
For more info on the syntax, have a
|
||||
look
|
||||
{' '}
|
||||
<Link href="https://github.com/ellmau/adf-obdd" target="_blank" rel="noopener noreferrer">here</Link>
|
||||
. For the AF syntax, we currently only allow the ICCMA competition format, see for example
|
||||
{' '}
|
||||
<Link href="https://argumentationcompetition.org/2025/rules.html" target="_blank" rel="noopener noreferrer">here</Link>
|
||||
.
|
||||
</>
|
||||
)}
|
||||
@ -142,20 +137,6 @@ function AdfNewForm({ fetchProblems }: { fetchProblems: () => void; }) {
|
||||
|
||||
<Container sx={{ marginTop: 2 }}>
|
||||
<Stack direction="row" justifyContent="center" spacing={2}>
|
||||
<FormControl>
|
||||
<FormLabel id="isAf-radio-group">ADF or AF?</FormLabel>
|
||||
<RadioGroup
|
||||
row
|
||||
aria-labelledby="isAf-radio-group"
|
||||
name="isAf"
|
||||
value={isAf}
|
||||
onChange={(e) => setIsAf(((e.target as HTMLInputElement).value))}
|
||||
>
|
||||
<FormControlLabel value={false} control={<Radio />} label="ADF" />
|
||||
<FormControlLabel value={true} control={<Radio />} label="AF" />
|
||||
</RadioGroup>
|
||||
<span style={{ fontSize: "0.7em" }}>AFs are converted to ADFs internally.</span>
|
||||
</FormControl>
|
||||
<FormControl>
|
||||
<FormLabel id="parsing-radio-group">Parsing Strategy</FormLabel>
|
||||
<RadioGroup
|
||||
|
||||
@ -3,7 +3,7 @@ The Web UI mimics many options of the CLI version of the [underlying adf-bdd too
|
||||
|
||||
In the below form, you can either type/paste your `code` or upload a file in the same format.
|
||||
To put it briefly, an ADF consists of statements and accectance conditions for these statements.
|
||||
For instance, the following code indicates that `a,b,c,d` are statements, that `a` is assumed to be true (verum), `b` is true if `b` is true (which is self-supporting), `c` is true if `a` and `b` are true, and `d` is true if `b` is false.
|
||||
For instance, the following code indicates that `a,b,c,d` are statements, that `a` is assumed to be true (verum), `b` is true if is true (which is tautological), `c` is true if `a` and `b` are true, and `d` is true if `b` is false.
|
||||
|
||||
```
|
||||
s(a).
|
||||
@ -21,17 +21,3 @@ The `Parsing Strategy` determines the internal implementation used for these. `N
|
||||
You will get a view on the BDD in the detail view after you added the problem.
|
||||
|
||||
You can optionally set a name for you ADF problem. Otherwise a random name will be chosen. At the moment the name cannot be changed later (but you could remove and re-add the problem).
|
||||
|
||||
We also support adding AFs in the ICCMA competition format. They are converted to ADFs internally in the obvious way.
|
||||
For example you can try the following code and change the option below from ADF to AF.
|
||||
|
||||
```
|
||||
p af 5
|
||||
# this is a comment
|
||||
1 2
|
||||
2 4
|
||||
4 5
|
||||
5 4
|
||||
5 5
|
||||
```
|
||||
|
||||
|
||||
@ -1,212 +1,99 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<title>ADF-BDD.dev - Legal Notice</title>
|
||||
<meta
|
||||
name="description"
|
||||
content="Impressum and Data Protection Regulation for adf-bdd.dev"
|
||||
/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<style>
|
||||
body {
|
||||
font-family: Helvetica;
|
||||
}
|
||||
h1 {
|
||||
text-align: center;
|
||||
}
|
||||
section {
|
||||
max-width: 1000px;
|
||||
margin: 0 auto 32px;
|
||||
padding: 16px;
|
||||
box-shadow: 0 0 10px 0px rgba(0, 0, 0, 0.4);
|
||||
}
|
||||
section > :first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
section > :last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<h1>ADF-BDD.DEV Legal Notice</h1>
|
||||
</header>
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>ADF-BDD.dev - Legal Notice</title>
|
||||
<meta name="description" content="Impressum and Data Protection Regulation for adf-bdd.dev">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<style>
|
||||
body {
|
||||
font-family: Helvetica;
|
||||
}
|
||||
h1 {
|
||||
text-align: center;
|
||||
}
|
||||
section {
|
||||
max-width: 1000px;
|
||||
margin: 0 auto 32px;
|
||||
padding: 16px;
|
||||
box-shadow: 0 0 10px 0px rgba(0,0,0,0.4);
|
||||
}
|
||||
section > :first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
section > :last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<section>
|
||||
<h2>Impressum</h2>
|
||||
<header>
|
||||
<h1>ADF-BDD.DEV Legal Notice</h1>
|
||||
</header>
|
||||
|
||||
The
|
||||
<a
|
||||
href="https://tu-dresden.de/impressum?set_language=en"
|
||||
target="_blank"
|
||||
rel="noreferrer noopener"
|
||||
>Impressum of TU Dresden</a
|
||||
>
|
||||
applies with the following amendments:
|
||||
<section>
|
||||
<h2>Impressum</h2>
|
||||
|
||||
<h3>Responsibilities - Content and Technical Implementation</h3>
|
||||
The <a href="https://tu-dresden.de/impressum?set_language=en" target="_blank" rel="noreferrer noopener">Impressum of TU Dresden</a> applies with the following amendments:
|
||||
|
||||
<p>
|
||||
Dipl.-Inf. Lukas Gerlach<br />
|
||||
Technische Universität Dresden<br />
|
||||
Fakultät Informatik<br />
|
||||
Institut für Theoretische Informatik<br />
|
||||
Professur für Wissensbasierte Systeme<br />
|
||||
01062 Dresden<br />
|
||||
GERMANY
|
||||
</p>
|
||||
<p>
|
||||
Email: lukas.gerlach@tu-dresden.de<br />
|
||||
Phone: (+49) 351 / 463 43503
|
||||
</p>
|
||||
</section>
|
||||
<h3>Responsibilities - Content and Technical Implementation</h3>
|
||||
|
||||
<section>
|
||||
<h2>Data Protection Regulation</h2>
|
||||
<p>
|
||||
We process your personal data only in form of metadata that is
|
||||
send to us when you access the website. This is done to pursue
|
||||
our legitimate interest of providing and improving this publicly
|
||||
available website (https://adf-bdd.dev). To this aim, this
|
||||
metadata is also written to server log files. The data may
|
||||
contain the following of your personal information: public IP
|
||||
address, time of access, internet browser (e.g. user agent,
|
||||
version), operating system, referrer url, hostname of requesting
|
||||
machine. We only set cookies that are necessary for the
|
||||
provision of our service, i.e. to check if a user is logged in.
|
||||
</p>
|
||||
<h3>
|
||||
Data Processed for Website Provisioning and Log File Creation:
|
||||
Log Files for Website Provisioning
|
||||
</h3>
|
||||
<p>
|
||||
We use Cloudflare to resolve DNS requests for our website. To
|
||||
ensure the security and performance of our website, we log
|
||||
technical errors that may occur when accessing our website.
|
||||
Additionally, information that your device's browser
|
||||
automatically transmits to our server is collected. This
|
||||
information includes:
|
||||
</p>
|
||||
<p>
|
||||
Dipl.-Inf. Lukas Gerlach<br>
|
||||
Technische Universität Dresden<br>
|
||||
Fakultät Informatik<br>
|
||||
Institut für Theoretische Informatik<br>
|
||||
Professur für Wissensbasierte Systeme<br>
|
||||
01062 Dresden<br>
|
||||
GERMANY
|
||||
</p>
|
||||
<p>
|
||||
Email: lukas.gerlach@tu-dresden.de<br>
|
||||
Phone: (+49) 351 / 463 43503
|
||||
</p>
|
||||
</section>
|
||||
|
||||
<ul>
|
||||
<li>IP address and operating system of your device,</li>
|
||||
<li>Browser type, version, language,</li>
|
||||
<li>
|
||||
The website from which the access was made (referrer URL),
|
||||
</li>
|
||||
<li>The status code (e.g., 404), and</li>
|
||||
<li>The transmission protocol used (e.g., http/2).</li>
|
||||
</ul>
|
||||
<section>
|
||||
<h2>Data Protection Regulation</h2>
|
||||
<p>
|
||||
We process your personal data only in form of metadata that is send to us when you access the website.
|
||||
This is done to pursue our legitimate interest of providing and improving this publicly available website (https://adf-bdd.dev).
|
||||
To this aim, this metadata is also written to server log files.
|
||||
The data may contain the following of your personal information: public IP address, time of access, internet browser (e.g. user agent, version), operating system, referrer url, hostname of requesting machine.
|
||||
We only set cookies that are necessary for the provision of our service, i.e. to check if a user is logged in.
|
||||
We do not set any so-called tracking cookies and we do not use any third party analytics tools on this website.
|
||||
</p>
|
||||
<h3>Legal basis</h3>
|
||||
<p>
|
||||
The legal basis for the data processing is <a href="https://gdpr.eu/article-6-how-to-process-personal-data-legally/" target="_blank" rel="noreferrer noopener">Section §6 para.1 lit. f GDPR</a>.
|
||||
</p>
|
||||
<h3>Rights of data subjects</h3>
|
||||
<ul>
|
||||
<li>You have the right to obtain information from TU Dresden about the data stored about your person and/or to have incorrectly stored data corrected.</li>
|
||||
<li>You have the right to erasure or restriction of the processing and/or a right to object to the processing.</li>
|
||||
<li>You can contact TU Dresden's Data Protection Officer at any time.
|
||||
<p>
|
||||
Tel.: +49 351 / 463 32839<br>
|
||||
Fax: +49 351 / 463 39718<br>
|
||||
Email: informationssicherheit@tu-dresden.de<br>
|
||||
<a href="https://tu-dresden.de/informationssicherheit" target="_blank" rel="noreferrer noopener">https://tu-dresden.de/informationssicherheit</a>
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
You also have the right to complain to a supervisory authority if you are concerned that the processing of your personal data is an infringement of the law. The competent supervisory authority for data protection is:
|
||||
<p>
|
||||
Saxon Data Protection Commissioner<br>
|
||||
Ms. Dr. Juliane Hundert<br>
|
||||
Devrientstraße 5<br>
|
||||
01067 Dresden<br>
|
||||
Email: saechsdsb@slt.sachsen.de<br>
|
||||
Phone: + 49 351 / 85471 101<br>
|
||||
<a href="http://www.datenschutz.sachsen.de" target="_blank" rel="noreferrer noopener">www.datenschutz.sachsen.de</a>
|
||||
</p>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
|
||||
<p>
|
||||
The processing of this data is based on our legitimate interest
|
||||
according to Art. 6(1)(f) GDPR. Our legitimate interest lies in
|
||||
troubleshooting, optimizing, and ensuring the performance of our
|
||||
website, as well as guaranteeing the security of our network and
|
||||
systems. We do not use the data to personally identify
|
||||
individual users unless there is a legal reason to do so or
|
||||
explicit consent is obtained from you.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Cloudflare acts as an intermediary between your browser and our
|
||||
server. When a DNS record is set to "Proxied," Cloudflare
|
||||
answers DNS queries with a Cloudflare Anycast IP address instead
|
||||
of the actual IP address of our server. This directs HTTP/HTTPS
|
||||
requests to the Cloudflare network, which offers advantages in
|
||||
terms of security and performance. Cloudflare also hides the IP
|
||||
address of our origin server, making it more difficult for
|
||||
attackers to directly target it.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Cloudflare may store certain data related to DNS requests,
|
||||
including IP addresses. However, Cloudflare anonymizes IP
|
||||
addresses by truncating the last octets for IPv4 and the last 80
|
||||
bits for IPv6. The truncated IP addresses are deleted within 25
|
||||
hours. Cloudflare is committed to not selling or sharing users'
|
||||
personal data with third parties and not using the data for
|
||||
targeted advertising. For more information on data protection at
|
||||
Cloudflare, please see the Cloudflare Privacy Policy:
|
||||
<a href="https://www.cloudflare.com/de-de/privacypolicy/"
|
||||
>https://www.cloudflare.com/de-de/privacypolicy/</a
|
||||
>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
To meet the requirements of the GDPR, we have entered into a
|
||||
Data Processing Agreement (DPA) with Cloudflare, which ensures
|
||||
that Cloudflare processes the data on our behalf and in
|
||||
accordance with applicable data protection regulations. You have
|
||||
the right to access, rectify, erase, restrict processing, and
|
||||
data portability of your personal data. Please contact us if you
|
||||
wish to exercise these rights.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Please note that our website is hosted on our own servers, and
|
||||
Cloudflare merely serves as a DNS provider and proxy. We
|
||||
implement appropriate technical and organizational measures to
|
||||
ensure the protection of your data.
|
||||
</p>
|
||||
<h3>Legal basis</h3>
|
||||
<p>
|
||||
The legal basis for the data processing is
|
||||
<a
|
||||
href="https://gdpr.eu/article-6-how-to-process-personal-data-legally/"
|
||||
target="_blank"
|
||||
rel="noreferrer noopener"
|
||||
>Section §6 para.1 lit. f GDPR</a
|
||||
>.
|
||||
</p>
|
||||
<h3>Rights of data subjects</h3>
|
||||
<ul>
|
||||
<li>
|
||||
You have the right to obtain information from TU Dresden
|
||||
about the data stored about your person and/or to have
|
||||
incorrectly stored data corrected.
|
||||
</li>
|
||||
<li>
|
||||
You have the right to erasure or restriction of the
|
||||
processing and/or a right to object to the processing.
|
||||
</li>
|
||||
<li>
|
||||
You can contact TU Dresden's Data Protection Officer at any
|
||||
time.
|
||||
<p>
|
||||
Tel.: +49 351 / 463 32839<br />
|
||||
Fax: +49 351 / 463 39718<br />
|
||||
Email: informationssicherheit@tu-dresden.de<br />
|
||||
<a
|
||||
href="https://tu-dresden.de/informationssicherheit"
|
||||
target="_blank"
|
||||
rel="noreferrer noopener"
|
||||
>https://tu-dresden.de/informationssicherheit</a
|
||||
>
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
You also have the right to complain to a supervisory
|
||||
authority if you are concerned that the processing of your
|
||||
personal data is an infringement of the law. The competent
|
||||
supervisory authority for data protection is:
|
||||
<p>
|
||||
Saxon Data Protection Commissioner<br />
|
||||
Ms. Dr. Juliane Hundert<br />
|
||||
Maternistraße 17<br />
|
||||
01067 Dresden<br />
|
||||
Email: post@sdtb.sachsen.de<br />
|
||||
Phone: + 49 351 / 85471 101<br />
|
||||
<a
|
||||
href="http://www.datenschutz.sachsen.de"
|
||||
target="_blank"
|
||||
rel="noreferrer noopener"
|
||||
>www.datenschutz.sachsen.de</a
|
||||
>
|
||||
</p>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
</body>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
2982
frontend/yarn.lock
2982
frontend/yarn.lock
File diff suppressed because it is too large
Load Diff
@ -28,7 +28,7 @@ nom = "7.1.3"
|
||||
lexical-sort = "0.3.1"
|
||||
serde = { version = "1.0", features = ["derive","rc"] }
|
||||
serde_json = "1.0"
|
||||
biodivine-lib-bdd = "0.5.0"
|
||||
biodivine-lib-bdd = "0.4.2"
|
||||
derivative = "2.2.0"
|
||||
roaring = "0.10.1"
|
||||
strum = { version = "0.24", features = ["derive"] }
|
||||
|
||||
@ -1133,7 +1133,12 @@ mod test {
|
||||
|
||||
let grounded = adf.grounded();
|
||||
let (s, r) = unbounded();
|
||||
adf.nogood_internal(&grounded, heuristics::heu_simple, Adf::stability_check, s);
|
||||
adf.nogood_internal(
|
||||
&grounded,
|
||||
crate::adf::heuristics::heu_simple,
|
||||
crate::adf::Adf::stability_check,
|
||||
s,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
r.iter().collect::<Vec<_>>(),
|
||||
@ -1167,8 +1172,8 @@ mod test {
|
||||
let (s, r) = unbounded();
|
||||
adf.nogood_internal(
|
||||
&grounded,
|
||||
heuristics::heu_simple,
|
||||
Adf::stability_check,
|
||||
crate::adf::heuristics::heu_simple,
|
||||
crate::adf::Adf::stability_check,
|
||||
s.clone(),
|
||||
);
|
||||
let stable_result = r.try_iter().collect::<Vec<_>>();
|
||||
|
||||
@ -5,7 +5,6 @@
|
||||
//! - grounded
|
||||
//! - stable
|
||||
//! - complete
|
||||
//!
|
||||
//! semantics of ADFs.
|
||||
|
||||
use crate::{
|
||||
@ -90,7 +89,7 @@ impl Adf {
|
||||
|
||||
pub(crate) fn stm_rewriting(&mut self, parser: &AdfParser) {
|
||||
let expr = parser.formula_order().iter().enumerate().fold(
|
||||
BooleanExpression::Const(true),
|
||||
biodivine_lib_bdd::boolean_expression::BooleanExpression::Const(true),
|
||||
|acc, (insert_order, new_order)| {
|
||||
BooleanExpression::And(
|
||||
Box::new(acc),
|
||||
@ -322,15 +321,19 @@ impl Adf {
|
||||
fn stable_representation(&self) -> Bdd {
|
||||
log::debug!("[Start] stable representation rewriting");
|
||||
self.ac.iter().enumerate().fold(
|
||||
self.varset.eval_expression(&BooleanExpression::Const(true)),
|
||||
self.varset.eval_expression(
|
||||
&biodivine_lib_bdd::boolean_expression::BooleanExpression::Const(true),
|
||||
),
|
||||
|acc, (idx, formula)| {
|
||||
acc.and(
|
||||
&formula.iff(
|
||||
&self.varset.eval_expression(&BooleanExpression::Variable(
|
||||
self.ordering
|
||||
.name(crate::datatypes::Var(idx))
|
||||
.expect("Variable should exist"),
|
||||
)),
|
||||
&self.varset.eval_expression(
|
||||
&biodivine_lib_bdd::boolean_expression::BooleanExpression::Variable(
|
||||
self.ordering
|
||||
.name(crate::datatypes::Var(idx))
|
||||
.expect("Variable should exist"),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
},
|
||||
@ -383,7 +386,7 @@ pub trait BddRestrict {
|
||||
|
||||
impl BddRestrict for Bdd {
|
||||
fn var_restrict(&self, variable: biodivine_lib_bdd::BddVariable, value: bool) -> Bdd {
|
||||
self.var_select(variable, value).var_exists(variable)
|
||||
self.var_select(variable, value).var_project(variable)
|
||||
}
|
||||
|
||||
fn restrict(&self, variables: &[(biodivine_lib_bdd::BddVariable, bool)]) -> Bdd {
|
||||
@ -391,7 +394,7 @@ impl BddRestrict for Bdd {
|
||||
variables
|
||||
.iter()
|
||||
.for_each(|(var, _val)| variablelist.push(*var));
|
||||
self.select(variables).exists(&variablelist)
|
||||
self.select(variables).project(&variablelist)
|
||||
}
|
||||
}
|
||||
|
||||
@ -427,8 +430,8 @@ mod test {
|
||||
let c = variables.eval_expression_string("c");
|
||||
let d = variables.eval_expression_string("a & b & c");
|
||||
let e = variables.eval_expression_string("a ^ b");
|
||||
let t = variables.eval_expression(&BooleanExpression::Const(true));
|
||||
let f = variables.eval_expression(&BooleanExpression::Const(false));
|
||||
let t = variables.eval_expression(&boolean_expression::BooleanExpression::Const(true));
|
||||
let f = variables.eval_expression(&boolean_expression::BooleanExpression::Const(false));
|
||||
|
||||
println!("{:?}", a.to_string());
|
||||
println!("{:?}", a.to_bytes());
|
||||
|
||||
@ -275,8 +275,8 @@ mod test {
|
||||
let term: Term = Term::from(value);
|
||||
let var = Var::from(value);
|
||||
// display
|
||||
assert_eq!(format!("{term}"), format!("Term({})", value));
|
||||
assert_eq!(format!("{var}"), format!("Var({})", value));
|
||||
assert_eq!(format!("{}", term), format!("Term({})", value));
|
||||
assert_eq!(format!("{}", var), format!("Var({})", value));
|
||||
//deref
|
||||
assert_eq!(value, *term);
|
||||
true
|
||||
|
||||
@ -342,6 +342,7 @@ let parsed_adf: Adf = parsed_custom_adf.into();
|
||||
#![deny(
|
||||
missing_debug_implementations,
|
||||
missing_copy_implementations,
|
||||
missing_copy_implementations,
|
||||
trivial_casts,
|
||||
trivial_numeric_casts,
|
||||
unsafe_code
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
//! Collection of all nogood-related structures.
|
||||
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
fmt::{Debug, Display},
|
||||
ops::{BitAnd, BitOr, BitXor, BitXorAssign},
|
||||
};
|
||||
@ -22,8 +23,11 @@ pub struct NoGood {
|
||||
impl Eq for NoGood {}
|
||||
impl PartialEq for NoGood {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
(&self.active).bitxor(&other.active).is_empty()
|
||||
&& (&self.value).bitxor(&other.value).is_empty()
|
||||
self.active
|
||||
.borrow()
|
||||
.bitxor(other.active.borrow())
|
||||
.is_empty()
|
||||
&& self.value.borrow().bitxor(other.value.borrow()).is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
@ -109,11 +113,15 @@ impl NoGood {
|
||||
/// Given a [NoGood] and another one, conclude a non-conflicting value which can be concluded on basis of the given one.
|
||||
pub fn conclude(&self, other: &NoGood) -> Option<(usize, bool)> {
|
||||
log::debug!("conclude: {:?} other {:?}", self, other);
|
||||
let implication = (&self.active).bitxor(&other.active).bitand(&self.active);
|
||||
let implication = self
|
||||
.active
|
||||
.borrow()
|
||||
.bitxor(other.active.borrow())
|
||||
.bitand(self.active.borrow());
|
||||
|
||||
let bothactive = (&self.active).bitand(&other.active);
|
||||
let mut no_matches = (&bothactive).bitand(&other.value);
|
||||
no_matches.bitxor_assign(bothactive.bitand(&self.value));
|
||||
let bothactive = self.active.borrow().bitand(other.active.borrow());
|
||||
let mut no_matches = bothactive.borrow().bitand(other.value.borrow());
|
||||
no_matches.bitxor_assign(bothactive.bitand(self.value.borrow()));
|
||||
|
||||
if implication.len() == 1 && no_matches.is_empty() {
|
||||
let pos = implication
|
||||
@ -132,16 +140,16 @@ impl NoGood {
|
||||
|
||||
/// Updates the [NoGood] and a second one in a disjunctive (bitor) manner.
|
||||
pub fn disjunction(&mut self, other: &NoGood) {
|
||||
self.active = (&self.active).bitor(&other.active);
|
||||
self.value = (&self.value).bitor(&other.value);
|
||||
self.active = self.active.borrow().bitor(&other.active);
|
||||
self.value = self.value.borrow().bitor(&other.value);
|
||||
}
|
||||
|
||||
/// Returns [true] if the other [Interpretation] matches with all the assignments of the current [NoGood].
|
||||
pub fn is_violating(&self, other: &Interpretation) -> bool {
|
||||
let active = (&self.active).bitand(&other.active);
|
||||
let active = self.active.borrow().bitand(other.active.borrow());
|
||||
if self.active.len() == active.len() {
|
||||
let lhs = (&active).bitand(&self.value);
|
||||
let rhs = (&active).bitand(&other.value);
|
||||
let lhs = active.borrow().bitand(self.value.borrow());
|
||||
let rhs = active.borrow().bitand(other.value.borrow());
|
||||
if lhs.bitxor(rhs).is_empty() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -690,7 +690,7 @@ mod test {
|
||||
let a1 = bdd.and(v1, v2);
|
||||
let _a2 = bdd.or(a1, v3);
|
||||
|
||||
assert_eq!(format!("{bdd}"), " \n0 BddNode: Var(18446744073709551614), lo: Term(0), hi: Term(0)\n1 BddNode: Var(18446744073709551615), lo: Term(1), hi: Term(1)\n2 BddNode: Var(0), lo: Term(0), hi: Term(1)\n3 BddNode: Var(1), lo: Term(0), hi: Term(1)\n4 BddNode: Var(2), lo: Term(0), hi: Term(1)\n5 BddNode: Var(0), lo: Term(0), hi: Term(3)\n6 BddNode: Var(1), lo: Term(4), hi: Term(1)\n7 BddNode: Var(0), lo: Term(4), hi: Term(6)\n");
|
||||
assert_eq!(format!("{}", bdd), " \n0 BddNode: Var(18446744073709551614), lo: Term(0), hi: Term(0)\n1 BddNode: Var(18446744073709551615), lo: Term(1), hi: Term(1)\n2 BddNode: Var(0), lo: Term(0), hi: Term(1)\n3 BddNode: Var(1), lo: Term(0), hi: Term(1)\n4 BddNode: Var(2), lo: Term(0), hi: Term(1)\n5 BddNode: Var(0), lo: Term(0), hi: Term(3)\n6 BddNode: Var(1), lo: Term(4), hi: Term(1)\n7 BddNode: Var(0), lo: Term(4), hi: Term(6)\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@ -33,7 +33,6 @@ impl super::Bdd {
|
||||
/// # Attention
|
||||
/// - Constants for [`⊤`][crate::datatypes::Term::TOP] and [`⊥`][crate::datatypes::Term::BOT] concepts are not sent, as they are considered to be existing in every [Bdd][super::Bdd] structure.
|
||||
/// - Mixing manipulating operations and utilising the communication channel for a receiving [roBDD][super::Bdd] may end up in inconsistent data.
|
||||
///
|
||||
/// So far, only manipulate the [roBDD][super::Bdd] if no further [recv][Self::recv] will be called.
|
||||
pub fn with_sender_receiver(
|
||||
sender: crossbeam_channel::Sender<BddNode>,
|
||||
|
||||
@ -11,7 +11,7 @@ where
|
||||
V: Serialize + 'a,
|
||||
{
|
||||
let container: Vec<_> = target.into_iter().collect();
|
||||
Serialize::serialize(&container, ser)
|
||||
serde::Serialize::serialize(&container, ser)
|
||||
}
|
||||
|
||||
/// Deserialize from a [Vector][std::vec::Vec] to a [Map][std::collections::HashMap].
|
||||
@ -22,6 +22,6 @@ where
|
||||
K: Deserialize<'de>,
|
||||
V: Deserialize<'de>,
|
||||
{
|
||||
let container: Vec<_> = Deserialize::deserialize(des)?;
|
||||
Ok(T::from_iter(container))
|
||||
let container: Vec<_> = serde::Deserialize::deserialize(des)?;
|
||||
Ok(T::from_iter(container.into_iter()))
|
||||
}
|
||||
|
||||
@ -20,28 +20,28 @@ use crate::datatypes::adf::VarContainer;
|
||||
|
||||
/// A representation of a formula, still using the strings from the input.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub enum Formula {
|
||||
pub enum Formula<'a> {
|
||||
/// `c(f)` in the input format.
|
||||
Bot,
|
||||
/// `c(v)` in the input format.
|
||||
Top,
|
||||
/// Some atomic variable in the input format.
|
||||
Atom(String),
|
||||
Atom(&'a str),
|
||||
/// Negation of a subformula.
|
||||
Not(Box<Formula>),
|
||||
Not(Box<Formula<'a>>),
|
||||
/// Conjunction of two subformulae.
|
||||
And(Box<Formula>, Box<Formula>),
|
||||
And(Box<Formula<'a>>, Box<Formula<'a>>),
|
||||
/// Disjunction of two subformulae.
|
||||
Or(Box<Formula>, Box<Formula>),
|
||||
Or(Box<Formula<'a>>, Box<Formula<'a>>),
|
||||
/// Implication of two subformulae.
|
||||
Imp(Box<Formula>, Box<Formula>),
|
||||
Imp(Box<Formula<'a>>, Box<Formula<'a>>),
|
||||
/// Exclusive-Or of two subformulae.
|
||||
Xor(Box<Formula>, Box<Formula>),
|
||||
Xor(Box<Formula<'a>>, Box<Formula<'a>>),
|
||||
/// If and only if connective between two formulae.
|
||||
Iff(Box<Formula>, Box<Formula>),
|
||||
Iff(Box<Formula<'a>>, Box<Formula<'a>>),
|
||||
}
|
||||
|
||||
impl Formula {
|
||||
impl Formula<'_> {
|
||||
pub(crate) fn to_boolean_expr(
|
||||
&self,
|
||||
) -> biodivine_lib_bdd::boolean_expression::BooleanExpression {
|
||||
@ -90,29 +90,29 @@ impl Formula {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Formula {
|
||||
impl std::fmt::Debug for Formula<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Formula::Atom(a) => {
|
||||
write!(f, "{a}")?;
|
||||
write!(f, "{}", a)?;
|
||||
}
|
||||
Formula::Not(n) => {
|
||||
write!(f, "not({n:?})")?;
|
||||
write!(f, "not({:?})", n)?;
|
||||
}
|
||||
Formula::And(f1, f2) => {
|
||||
write!(f, "and({f1:?},{f2:?})")?;
|
||||
write!(f, "and({:?},{:?})", f1, f2)?;
|
||||
}
|
||||
Formula::Or(f1, f2) => {
|
||||
write!(f, "or({f1:?},{f2:?})")?;
|
||||
write!(f, "or({:?},{:?})", f1, f2)?;
|
||||
}
|
||||
Formula::Imp(f1, f2) => {
|
||||
write!(f, "imp({f1:?},{f2:?})")?;
|
||||
write!(f, "imp({:?},{:?})", f1, f2)?;
|
||||
}
|
||||
Formula::Xor(f1, f2) => {
|
||||
write!(f, "xor({f1:?},{f2:?})")?;
|
||||
write!(f, "xor({:?},{:?})", f1, f2)?;
|
||||
}
|
||||
Formula::Iff(f1, f2) => {
|
||||
write!(f, "iff({f1:?},{f2:?})")?;
|
||||
write!(f, "iff({:?},{:?})", f1, f2)?;
|
||||
}
|
||||
Formula::Bot => {
|
||||
write!(f, "Const(B)")?;
|
||||
@ -132,18 +132,14 @@ impl std::fmt::Debug for Formula {
|
||||
///
|
||||
/// Note that the parser can be utilised by an [ADF][`crate::adf::Adf`] to initialise it with minimal overhead.
|
||||
#[derive(Debug)]
|
||||
pub struct AdfParser {
|
||||
/// A name for each statement (identified by index in vector)
|
||||
pub namelist: Arc<RwLock<Vec<String>>>,
|
||||
/// Inverse mapping from name to index of statement in vector above
|
||||
pub dict: Arc<RwLock<HashMap<String, usize>>>,
|
||||
/// The formula (acceptance condition) for each statement identified by its index
|
||||
pub formulae: RefCell<Vec<Formula>>,
|
||||
/// The formula for each statement identified by its index
|
||||
pub formulaname: RefCell<Vec<String>>,
|
||||
pub struct AdfParser<'a> {
|
||||
namelist: Arc<RwLock<Vec<String>>>,
|
||||
dict: Arc<RwLock<HashMap<String, usize>>>,
|
||||
formulae: RefCell<Vec<Formula<'a>>>,
|
||||
formulaname: RefCell<Vec<String>>,
|
||||
}
|
||||
|
||||
impl Default for AdfParser {
|
||||
impl Default for AdfParser<'_> {
|
||||
fn default() -> Self {
|
||||
AdfParser {
|
||||
namelist: Arc::new(RwLock::new(Vec::new())),
|
||||
@ -154,7 +150,10 @@ impl Default for AdfParser {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> AdfParser {
|
||||
impl<'a, 'b> AdfParser<'b>
|
||||
where
|
||||
'a: 'b,
|
||||
{
|
||||
#[allow(dead_code)]
|
||||
fn parse_statements(&'a self) -> impl FnMut(&'a str) -> IResult<&'a str, ()> {
|
||||
move |input| {
|
||||
@ -213,9 +212,9 @@ impl<'a> AdfParser {
|
||||
}
|
||||
}
|
||||
|
||||
impl AdfParser {
|
||||
impl<'a> AdfParser<'a> {
|
||||
/// Creates a new parser, utilising the already existing [VarContainer]
|
||||
pub fn with_var_container(var_container: VarContainer) -> AdfParser {
|
||||
pub fn with_var_container(var_container: VarContainer) -> AdfParser<'a> {
|
||||
AdfParser {
|
||||
namelist: var_container.names(),
|
||||
dict: var_container.mappings(),
|
||||
@ -225,7 +224,7 @@ impl AdfParser {
|
||||
}
|
||||
}
|
||||
|
||||
impl AdfParser {
|
||||
impl AdfParser<'_> {
|
||||
/// after an update to the namelist, all indizes are updated
|
||||
fn regenerate_indizes(&self) {
|
||||
self.namelist
|
||||
@ -285,7 +284,7 @@ impl AdfParser {
|
||||
}
|
||||
|
||||
fn atomic_term(input: &str) -> IResult<&str, Formula> {
|
||||
AdfParser::atomic(input).map(|(input, result)| (input, Formula::Atom(result.to_string())))
|
||||
AdfParser::atomic(input).map(|(input, result)| (input, Formula::Atom(result)))
|
||||
}
|
||||
|
||||
fn formula(input: &str) -> IResult<&str, Formula> {
|
||||
@ -481,7 +480,7 @@ mod test {
|
||||
let (_remain, result) = AdfParser::formula(input).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
format!("{result:?}"),
|
||||
format!("{:?}", result),
|
||||
"and(or(not(a),iff( iff left ,b)),xor(imp(c,d),e))"
|
||||
);
|
||||
|
||||
@ -505,10 +504,7 @@ mod test {
|
||||
assert_eq!(parser.dict_value("b"), Some(2usize));
|
||||
assert_eq!(
|
||||
format!("{:?}", parser.ac_at(1).unwrap()),
|
||||
format!(
|
||||
"{:?}",
|
||||
Formula::Not(Box::new(Formula::Atom("a".to_string())))
|
||||
)
|
||||
format!("{:?}", Formula::Not(Box::new(Formula::Atom("a"))))
|
||||
);
|
||||
assert_eq!(parser.formula_count(), 3);
|
||||
assert_eq!(parser.formula_order(), vec![0, 2, 1]);
|
||||
|
||||
@ -3,7 +3,7 @@ version: '3.1'
|
||||
services:
|
||||
|
||||
mongo:
|
||||
image: mongo:6
|
||||
image: mongo
|
||||
restart: always
|
||||
ports:
|
||||
- 27017:27017
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
use std::cell::RefCell;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::sync::{Arc, RwLock};
|
||||
#[cfg(feature = "mock_long_computations")]
|
||||
@ -22,7 +21,7 @@ use serde::{Deserialize, Serialize};
|
||||
use adf_bdd::adf::Adf;
|
||||
use adf_bdd::adfbiodivine::Adf as BdAdf;
|
||||
use adf_bdd::obdd::Bdd;
|
||||
use adf_bdd::parser::{AdfParser, Formula};
|
||||
use adf_bdd::parser::AdfParser;
|
||||
|
||||
use crate::config::{AppState, RunningInfo, Task, ADF_COLL, COMPUTE_TIME, DB_NAME, USER_COLL};
|
||||
use crate::user::{username_exists, User};
|
||||
@ -206,8 +205,6 @@ pub(crate) struct AdfProblem {
|
||||
pub(crate) username: String,
|
||||
pub(crate) code: String,
|
||||
pub(crate) parsing_used: Parsing,
|
||||
#[serde(default)]
|
||||
pub(crate) is_af: bool,
|
||||
pub(crate) adf: SimplifiedAdfOpt,
|
||||
pub(crate) acs_per_strategy: AcsPerStrategy,
|
||||
}
|
||||
@ -218,7 +215,6 @@ struct AddAdfProblemBodyMultipart {
|
||||
code: Option<Text<String>>, // Either Code or File is set
|
||||
file: Option<TempFile>, // Either Code or File is set
|
||||
parsing: Text<Parsing>,
|
||||
is_af: Text<bool>, // if its not an AF then it is an ADF
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -226,7 +222,6 @@ struct AddAdfProblemBodyPlain {
|
||||
name: String,
|
||||
code: String,
|
||||
parsing: Parsing,
|
||||
is_af: bool, // if its not an AF then it is an ADF
|
||||
}
|
||||
|
||||
impl TryFrom<AddAdfProblemBodyMultipart> for AddAdfProblemBodyPlain {
|
||||
@ -242,7 +237,6 @@ impl TryFrom<AddAdfProblemBodyMultipart> for AddAdfProblemBodyPlain {
|
||||
.and_then(|code| (!code.is_empty()).then_some(code))
|
||||
.ok_or("Either a file or the code has to be provided.")?,
|
||||
parsing: source.parsing.into_inner(),
|
||||
is_af: source.is_af.into_inner(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -265,7 +259,6 @@ struct AdfProblemInfo {
|
||||
name: String,
|
||||
code: String,
|
||||
parsing_used: Parsing,
|
||||
is_af: bool,
|
||||
acs_per_strategy: AcsPerStrategy,
|
||||
running_tasks: Vec<Task>,
|
||||
}
|
||||
@ -276,7 +269,6 @@ impl AdfProblemInfo {
|
||||
name: adf.name.clone(),
|
||||
code: adf.code,
|
||||
parsing_used: adf.parsing_used,
|
||||
is_af: adf.is_af,
|
||||
acs_per_strategy: adf.acs_per_strategy,
|
||||
running_tasks: tasks
|
||||
.iter()
|
||||
@ -288,87 +280,6 @@ impl AdfProblemInfo {
|
||||
}
|
||||
}
|
||||
|
||||
struct AF(Vec<Vec<usize>>);
|
||||
|
||||
impl From<AF> for AdfParser {
|
||||
fn from(source: AF) -> Self {
|
||||
let names: Vec<String> = (0..source.0.len())
|
||||
.map(|val| (val + 1).to_string())
|
||||
.collect();
|
||||
let dict: HashMap<String, usize> = names
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, val)| (val.clone(), i))
|
||||
.collect();
|
||||
let formulae: Vec<Formula> = source
|
||||
.0
|
||||
.into_iter()
|
||||
.map(|attackers| {
|
||||
attackers.into_iter().fold(Formula::Top, |acc, attacker| {
|
||||
Formula::And(
|
||||
Box::new(acc),
|
||||
Box::new(Formula::Not(Box::new(Formula::Atom(
|
||||
(attacker + 1).to_string(),
|
||||
)))),
|
||||
)
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
let formulanames = names.clone();
|
||||
|
||||
Self {
|
||||
namelist: Arc::new(RwLock::new(names)),
|
||||
dict: Arc::new(RwLock::new(dict)),
|
||||
formulae: RefCell::new(formulae),
|
||||
formulaname: RefCell::new(formulanames),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_af(code: String) -> Result<AdfParser, &'static str> {
|
||||
let mut lines = code.lines();
|
||||
|
||||
let Some(first_line) = lines.next() else {
|
||||
return Err("There must be at least one line in the AF input.");
|
||||
};
|
||||
|
||||
let first_line: Vec<_> = first_line.split(" ").collect();
|
||||
if first_line[0] != "p" || first_line[1] != "af" {
|
||||
return Err("Expected first line to be of the form: p af <n>");
|
||||
}
|
||||
|
||||
let Ok(num_arguments) = first_line[2].parse::<usize>() else {
|
||||
return Err("Could not convert number of arguments to u32; expected first line to be of the form: p af <n>");
|
||||
};
|
||||
|
||||
let attacks_opt: Option<Vec<(usize, usize)>> = lines
|
||||
.filter(|line| !line.starts_with('#') && !line.is_empty())
|
||||
.map(|line| {
|
||||
let mut line = line.split(" ");
|
||||
let a = line.next()?;
|
||||
let b = line.next()?;
|
||||
if line.next().is_some() {
|
||||
None
|
||||
} else {
|
||||
Some((a.parse::<usize>().ok()?, b.parse::<usize>().ok()?))
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let Some(attacks) = attacks_opt else {
|
||||
return Err("Line must be of the form: n m");
|
||||
};
|
||||
|
||||
// index in outer vector represents attacked element
|
||||
let mut is_attacked_by: Vec<Vec<usize>> = vec![vec![]; num_arguments];
|
||||
for (a, b) in attacks {
|
||||
is_attacked_by[b - 1].push(a - 1); // we normalize names to be zero-indexed
|
||||
}
|
||||
|
||||
let hacked_adf_parser = AdfParser::from(AF(is_attacked_by));
|
||||
|
||||
Ok(hacked_adf_parser)
|
||||
}
|
||||
|
||||
#[post("/add")]
|
||||
async fn add_adf_problem(
|
||||
req: HttpRequest,
|
||||
@ -470,7 +381,6 @@ async fn add_adf_problem(
|
||||
username: username.clone(),
|
||||
code: adf_problem_input.code.clone(),
|
||||
parsing_used: adf_problem_input.parsing,
|
||||
is_af: adf_problem_input.is_af,
|
||||
adf: SimplifiedAdfOpt::None,
|
||||
acs_per_strategy: AcsPerStrategy::default(),
|
||||
};
|
||||
@ -503,20 +413,9 @@ async fn add_adf_problem(
|
||||
#[cfg(feature = "mock_long_computations")]
|
||||
std::thread::sleep(Duration::from_secs(20));
|
||||
|
||||
let (parser, parse_result) = {
|
||||
if adf_problem_input.is_af {
|
||||
parse_af(adf_problem_input.code)
|
||||
.map(|p| (p, Ok(())))
|
||||
.unwrap_or_else(|e| (AdfParser::default(), Err(e)))
|
||||
} else {
|
||||
let parser = AdfParser::default();
|
||||
let parse_result = parser.parse()(&adf_problem_input.code)
|
||||
.map(|_| ())
|
||||
.map_err(|_| "ADF could not be parsed, double check your input!");
|
||||
|
||||
(parser, parse_result)
|
||||
}
|
||||
};
|
||||
let parser = AdfParser::default();
|
||||
let parse_result = parser.parse()(&adf_problem_input.code)
|
||||
.map_err(|_| "ADF could not be parsed, double check your input!");
|
||||
|
||||
let result = parse_result.map(|_| {
|
||||
let lib_adf = match adf_problem_input.parsing {
|
||||
@ -601,7 +500,7 @@ async fn solve_adf_problem(
|
||||
.collection(ADF_COLL);
|
||||
|
||||
let username = match identity.map(|id| id.id()) {
|
||||
Option::None => {
|
||||
None => {
|
||||
return HttpResponse::Unauthorized().body("You need to login to add an ADF problem.")
|
||||
}
|
||||
Some(Err(err)) => return HttpResponse::InternalServerError().body(err.to_string()),
|
||||
@ -613,7 +512,7 @@ async fn solve_adf_problem(
|
||||
.await
|
||||
{
|
||||
Err(err) => return HttpResponse::InternalServerError().body(err.to_string()),
|
||||
Ok(Option::None) => {
|
||||
Ok(None) => {
|
||||
return HttpResponse::NotFound()
|
||||
.body(format!("ADF problem with name {problem_name} not found."))
|
||||
}
|
||||
@ -745,7 +644,7 @@ async fn get_adf_problem(
|
||||
.collection(ADF_COLL);
|
||||
|
||||
let username = match identity.map(|id| id.id()) {
|
||||
Option::None => {
|
||||
None => {
|
||||
return HttpResponse::Unauthorized().body("You need to login to get an ADF problem.")
|
||||
}
|
||||
Some(Err(err)) => return HttpResponse::InternalServerError().body(err.to_string()),
|
||||
@ -757,7 +656,7 @@ async fn get_adf_problem(
|
||||
.await
|
||||
{
|
||||
Err(err) => return HttpResponse::InternalServerError().body(err.to_string()),
|
||||
Ok(Option::None) => {
|
||||
Ok(None) => {
|
||||
return HttpResponse::NotFound()
|
||||
.body(format!("ADF problem with name {problem_name} not found."))
|
||||
}
|
||||
@ -783,7 +682,7 @@ async fn delete_adf_problem(
|
||||
.collection(ADF_COLL);
|
||||
|
||||
let username = match identity.map(|id| id.id()) {
|
||||
Option::None => {
|
||||
None => {
|
||||
return HttpResponse::Unauthorized().body("You need to login to get an ADF problem.")
|
||||
}
|
||||
Some(Err(err)) => return HttpResponse::InternalServerError().body(err.to_string()),
|
||||
@ -818,7 +717,7 @@ async fn get_adf_problems_for_user(
|
||||
.collection(ADF_COLL);
|
||||
|
||||
let username = match identity.map(|id| id.id()) {
|
||||
Option::None => {
|
||||
None => {
|
||||
return HttpResponse::Unauthorized().body("You need to login to get an ADF problem.")
|
||||
}
|
||||
Some(Err(err)) => return HttpResponse::InternalServerError().body(err.to_string()),
|
||||
|
||||
@ -92,7 +92,7 @@ impl DoubleLabeledGraph {
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(i, _)| node_indices.contains(i))
|
||||
.filter(|(_, node)| ![Var::TOP, Var::BOT].contains(&node.var()))
|
||||
.filter(|(_, node)| !vec![Var::TOP, Var::BOT].contains(&node.var()))
|
||||
.map(|(i, &node)| (i, node.lo().value()))
|
||||
.map(|(i, v)| (i.to_string(), v.to_string()))
|
||||
.collect();
|
||||
@ -103,7 +103,7 @@ impl DoubleLabeledGraph {
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(i, _)| node_indices.contains(i))
|
||||
.filter(|(_, node)| ![Var::TOP, Var::BOT].contains(&node.var()))
|
||||
.filter(|(_, node)| !vec![Var::TOP, Var::BOT].contains(&node.var()))
|
||||
.map(|(i, &node)| (i, node.hi().value()))
|
||||
.map(|(i, v)| (i.to_string(), v.to_string()))
|
||||
.collect();
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user