1
0
mirror of https://github.com/ellmau/adf-obdd.git synced 2025-12-20 09:39:38 +01:00

Compare commits

..

20 Commits
0.3.1 ... main

Author SHA1 Message Date
Lukas Gerlach
ff12d4fede
Add naive af support to webserver (#195)
* Update flake

* Remove cargo-kcov

* Make AdfParser publically accessible

* Fix mongodb version to 6

* Add naive AF support in Web

* Add missing doc strings

* Remove unused import

* Remove TODO comment

* Update DevSkim Action

* Upgrade flake

* Apply clippy suggestions

---------

Co-authored-by: monsterkrampe <monsterkrampe@users.noreply.github.com>
2025-06-30 08:51:23 +02:00
Lukas Gerlach
c9631346ab
Update Data Protection Commissioner address (#194)
Co-authored-by: monsterkrampe <monsterkrampe@users.noreply.github.com>
2025-05-22 10:55:50 +02:00
Stefan Ellmauthaler
ca6c94f2e5 Update versions and fix formatting and clippy issues
flake.lock: Update

Flake lock file updates:

• Updated input 'flake-utils':
    'github:gytis-ivaskevicius/flake-utils-plus/bfc53579db89de750b25b0c5e7af299e0c06d7d3?narHash=sha256-YkbRa/1wQWdWkVJ01JvV%2B75KIdM37UErqKgTf0L54Fk%3D' (2023-10-03)
  → 'github:gytis-ivaskevicius/flake-utils-plus/afcb15b845e74ac5e998358709b2b5fe42a948d1?narHash=sha256-4WNeriUToshQ/L5J%2BdTSWC5OJIwT39SEP7V7oylndi8%3D' (2025-02-03)
• Updated input 'rust-overlay':
    'github:oxalica/rust-overlay/2037779e018ebc2d381001a891e2a793fce7a74f?narHash=sha256-5bD6iSPDgVTLly2gy2oJVwzuyuFZOz2p4qt8c8UoYIE%3D' (2024-01-08)
  → 'github:oxalica/rust-overlay/f3cd1e0feb994188fe3ad9a5c3ab021ed433b8c8?narHash=sha256-HUtFcF4NLwvu7CAowWgqCHXVkNj0EOc/W6Ism4biV6I%3D' (2025-03-13)
• Removed input 'rust-overlay/flake-utils'

Signed-off-by: Stefan Ellmauthaler <ellmauthaler@echo-intelligence.at>
2025-03-13 18:30:16 +01:00
Stefan Ellmauthaler
54e57e3820 Update GDPR/legal document
Signed-off-by: Stefan Ellmauthaler <ellmauthaler@echo-intelligence.at>
2025-03-13 18:30:16 +01:00
f601d473cc Fix bug introduced by 530fb5c 2024-01-08 12:56:22 +01:00
530fb5c4e6 Fix clippy warnings 2024-01-08 12:01:10 +01:00
a85b0cb129 flake.lock: Update
Flake lock file updates:

• Updated input 'flake-utils':
    'github:gytis-ivaskevicius/flake-utils-plus/2bf0f91643c2e5ae38c1b26893ac2927ac9bd82a' (2022-07-07)
  → 'github:gytis-ivaskevicius/flake-utils-plus/bfc53579db89de750b25b0c5e7af299e0c06d7d3' (2023-10-03)
• Updated input 'flake-utils/flake-utils':
    'github:numtide/flake-utils/919d646de7be200f3bf08cb76ae1f09402b6f9b4' (2023-07-11)
  → 'github:numtide/flake-utils/ff7b65b44d01cf9ba6a71320833626af21126384' (2023-09-12)
• Updated input 'nixpkgs':
    'github:NixOS/nixpkgs/bd836ac5e5a7358dea73cb74a013ca32864ccb86' (2023-08-01)
  → 'github:NixOS/nixpkgs/70bdadeb94ffc8806c0570eb5c2695ad29f0e421' (2024-01-03)
• Updated input 'rust-overlay':
    'github:oxalica/rust-overlay/99df4908445be37ddb2d332580365fce512a7dcf' (2023-08-03)
  → 'github:oxalica/rust-overlay/2037779e018ebc2d381001a891e2a793fce7a74f' (2024-01-08)
2024-01-08 12:01:10 +01:00
Stefan Ellmauthaler
627a1a1810
Add flake app and packages for adf-bdd (#155)
* Add flake packages for adf-bdd
2023-08-04 16:00:39 +02:00
dependabot[bot]
fc0042fcd1
Bump serde from 1.0.180 to 1.0.181 (#166) 2023-08-04 09:28:10 +00:00
dependabot[bot]
bc403d10cb
Merge pull request #165 from ellmau/dependabot/npm_and_yarn/frontend/semver-6.3.1 2023-08-04 09:20:41 +00:00
dependabot[bot]
34d3af0c0d
Bump semver from 6.3.0 to 6.3.1 in /frontend
Bumps [semver](https://github.com/npm/node-semver) from 6.3.0 to 6.3.1.
- [Release notes](https://github.com/npm/node-semver/releases)
- [Changelog](https://github.com/npm/node-semver/blob/v6.3.1/CHANGELOG.md)
- [Commits](https://github.com/npm/node-semver/compare/v6.3.0...v6.3.1)

---
updated-dependencies:
- dependency-name: semver
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-08-04 09:11:03 +00:00
monsterkrampe
8cafca22eb Fix typo in info text 2023-08-04 11:09:08 +02:00
monsterkrampe
3979f77d03 Upgrade dependencies 2023-08-04 10:59:27 +02:00
eeef4729b6 flake.lock: Update
Flake lock file updates:

• Updated input 'flake-utils':
    'github:numtide/flake-utils/93a2b84fc4b70d9e089d029deacc3583435c2ed6' (2023-03-15)
  → 'github:numtide/flake-utils/a1720a10a6cfe8234c0e93907ffe81be440f4cef' (2023-05-31)
• Added input 'flake-utils/systems':
    'github:nix-systems/default/da67096a3b9bf56a91d16901293e51ba5b49a27e' (2023-04-09)
• Updated input 'nixpkgs-unstable':
    'github:NixOS/nixpkgs/4bb072f0a8b267613c127684e099a70e1f6ff106' (2023-03-27)
  → 'github:NixOS/nixpkgs/4729ffac6fd12e26e5a8de002781ffc49b0e94b7' (2023-06-06)
• Updated input 'rust-overlay':
    'github:oxalica/rust-overlay/c8d8d05b8100d451243b614d950fa3f966c1fcc2' (2023-03-29)
  → 'github:oxalica/rust-overlay/b4b71458b92294e8f1c3a112d972e3cff8a2ab71' (2023-06-08)
2023-06-08 09:44:30 +02:00
b68c0b3d3f NixOs 23.05 2023-06-08 09:44:30 +02:00
dependabot[bot]
6c0da967b9 Bump clap from 4.2.7 to 4.3.0
Bumps [clap](https://github.com/clap-rs/clap) from 4.2.7 to 4.3.0.
- [Release notes](https://github.com/clap-rs/clap/releases)
- [Changelog](https://github.com/clap-rs/clap/blob/master/CHANGELOG.md)
- [Commits](https://github.com/clap-rs/clap/compare/v4.2.7...clap_complete-v4.3.0)

---
updated-dependencies:
- dependency-name: clap
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-06-08 09:44:30 +02:00
dependabot[bot]
716d1194a6 Bump predicates from 2.1.5 to 3.0.3
Bumps [predicates](https://github.com/assert-rs/predicates-rs) from 2.1.5 to 3.0.3.
- [Changelog](https://github.com/assert-rs/predicates-rs/blob/master/CHANGELOG.md)
- [Commits](https://github.com/assert-rs/predicates-rs/compare/v2.1.5...v3.0.3)

---
updated-dependencies:
- dependency-name: predicates
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-06-08 09:44:30 +02:00
dependabot[bot]
3445ae343f Bump serde from 1.0.162 to 1.0.163
Bumps [serde](https://github.com/serde-rs/serde) from 1.0.162 to 1.0.163.
- [Release notes](https://github.com/serde-rs/serde/releases)
- [Commits](https://github.com/serde-rs/serde/compare/v1.0.162...v1.0.163)

---
updated-dependencies:
- dependency-name: serde
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-06-08 09:44:30 +02:00
Lukas Gerlach
26e978ca47
Update Cargo.lock (#148)
* Update Cargo.lock

* Update biodivine-lib-bdd

---------

Co-authored-by: monsterkrampe <monsterkrampe@users.noreply.github.com>
Co-authored-by: Stefan Ellmauthaler <stefan.ellmauthaler@tu-dresden.de>
2023-05-05 15:37:34 +02:00
Lukas Gerlach
dd8524e02e
Update dependencies in yarn.lock (#146) 2023-05-05 11:07:50 +02:00
24 changed files with 3512 additions and 2595 deletions

View File

@ -16,19 +16,19 @@ on:
jobs:
lint:
name: DevSkim
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Run DevSkim scanner
uses: microsoft/DevSkim-Action@v1
- name: Upload DevSkim scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v2
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: devskim-results.sarif

2269
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
[package]
name = "adf-bdd-bin"
version = "0.3.0"
version = "0.3.0-dev"
authors = ["Stefan Ellmauthaler <stefan.ellmauthaler@tu-dresden.de>"]
edition = "2021"
homepage = "https://ellmau.github.io/adf-obdd"
@ -17,7 +17,7 @@ path = "src/main.rs"
[dependencies]
adf_bdd = { version="0.3.1", path="../lib", default-features = false }
clap = {version = "4.1.4", features = [ "derive", "cargo", "env" ]}
clap = {version = "4.3.0", features = [ "derive", "cargo", "env" ]}
log = { version = "0.4", features = [ "max_level_trace", "release_max_level_info" ] }
serde = { version = "1.0", features = ["derive","rc"] }
serde_json = "1.0"
@ -27,7 +27,7 @@ crossbeam-channel = "0.5"
[dev-dependencies]
assert_cmd = "2.0"
predicates = "2.1"
predicates = "3.0"
assert_fs = "1.0"
[features]

View File

@ -63,7 +63,6 @@ OPTIONS:
#![deny(
missing_debug_implementations,
missing_copy_implementations,
missing_copy_implementations,
trivial_casts,
trivial_numeric_casts,
unsafe_code
@ -181,7 +180,7 @@ impl App {
let input = std::fs::read_to_string(self.input.clone()).expect("Error Reading File");
match self.implementation.as_str() {
"hybrid" => {
let parser = adf_bdd::parser::AdfParser::default();
let parser = AdfParser::default();
match parser.parse()(&input) {
Ok(_) => log::info!("[Done] parsing"),
Err(e) => {
@ -205,14 +204,14 @@ impl App {
Some("nai") => {
let naive_adf = adf.hybrid_step_opt(false);
for ac_counts in naive_adf.formulacounts(false) {
print!("{:?} ", ac_counts);
print!("{ac_counts:?} ");
}
println!();
}
Some("mem") => {
let naive_adf = adf.hybrid_step_opt(false);
for ac_counts in naive_adf.formulacounts(true) {
print!("{:?}", ac_counts);
print!("{ac_counts:?}");
}
println!();
}
@ -283,7 +282,7 @@ impl App {
if self.counter.is_some() {
log::error!("Modelcounting not supported in biodivine mode");
}
let parser = adf_bdd::parser::AdfParser::default();
let parser = AdfParser::default();
match parser.parse()(&input) {
Ok(_) => log::info!("[Done] parsing"),
Err(e) => {
@ -384,13 +383,13 @@ impl App {
match self.counter.as_deref() {
Some("nai") => {
for ac_counts in adf.formulacounts(false) {
print!("{:?} ", ac_counts);
print!("{ac_counts:?} ");
}
println!();
}
Some("mem") => {
for ac_counts in adf.formulacounts(true) {
print!("{:?}", ac_counts);
print!("{ac_counts:?}");
}
println!();
}

87
flake.lock generated
View File

@ -1,64 +1,53 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"flake-utils": "flake-utils_2"
},
"locked": {
"lastModified": 1678901627,
"narHash": "sha256-U02riOqrKKzwjsxc/400XnElV+UtPUQWpANPlyazjH0=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "93a2b84fc4b70d9e089d029deacc3583435c2ed6",
"lastModified": 1738591040,
"narHash": "sha256-4WNeriUToshQ/L5J+dTSWC5OJIwT39SEP7V7oylndi8=",
"owner": "gytis-ivaskevicius",
"repo": "flake-utils-plus",
"rev": "afcb15b845e74ac5e998358709b2b5fe42a948d1",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"owner": "gytis-ivaskevicius",
"repo": "flake-utils-plus",
"type": "github"
}
},
"gitignoresrc": {
"flake": false,
"flake-utils_2": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1660459072,
"narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=",
"owner": "hercules-ci",
"repo": "gitignore.nix",
"rev": "a20de23b925fd8264fd7fad6454652e142fd7f73",
"lastModified": 1694529238,
"narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "ff7b65b44d01cf9ba6a71320833626af21126384",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "gitignore.nix",
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1679966490,
"narHash": "sha256-k0jV+y1jawE6w4ZvKgXDNg4+O9NNtcaWwzw8gufv0b4=",
"lastModified": 1750969886,
"narHash": "sha256-zW/OFnotiz/ndPFdebpo3X0CrbVNf22n4DjN2vxlb58=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "5b7cd5c39befee629be284970415b6eb3b0ff000",
"rev": "a676066377a2fe7457369dd37c31fd2263b662f4",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-22.11",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-unstable": {
"locked": {
"lastModified": 1679944645,
"narHash": "sha256-e5Qyoe11UZjVfgRfwNoSU57ZeKuEmjYb77B9IVW7L/M=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "4bb072f0a8b267613c127684e099a70e1f6ff106",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"ref": "nixos-25.05",
"repo": "nixpkgs",
"type": "github"
}
@ -66,27 +55,22 @@
"root": {
"inputs": {
"flake-utils": "flake-utils",
"gitignoresrc": "gitignoresrc",
"nixpkgs": "nixpkgs",
"nixpkgs-unstable": "nixpkgs-unstable",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"flake-utils": [
"flake-utils"
],
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1680056830,
"narHash": "sha256-WB4KD8oLSxAAtmXYSzwVwQusC2Gy5vTEln1uTt0GI2k=",
"lastModified": 1751251399,
"narHash": "sha256-y+viCuy/eKKpkX1K2gDvXIJI/yzvy6zA3HObapz9XZ0=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "c8d8d05b8100d451243b614d950fa3f966c1fcc2",
"rev": "b22d5ee8c60ed1291521f2dde48784edd6bf695b",
"type": "github"
},
"original": {
@ -94,6 +78,21 @@
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",

107
flake.nix
View File

@ -1,50 +1,89 @@
{
description = "basic rust flake";
rec {
description = "adf-bdd, Abstract Dialectical Frameworks solved by Binary Decision Diagrams; developed in Dresden";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11";
nixpkgs-unstable.url = "github:NixOS/nixpkgs/nixos-unstable";
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs = {
nixpkgs.follows = "nixpkgs";
flake-utils.follows = "flake-utils";
flake-utils.follows = "flake-utils/flake-utils";
};
};
flake-utils.url = "github:numtide/flake-utils";
gitignoresrc = {
url = "github:hercules-ci/gitignore.nix";
flake = false;
};
flake-utils.url = "github:gytis-ivaskevicius/flake-utils-plus";
};
outputs = { self, nixpkgs, nixpkgs-unstable, flake-utils, gitignoresrc
, rust-overlay, ... }@inputs:
{
#overlay = import ./nix { inherit gitignoresrc; };
} // (flake-utils.lib.eachDefaultSystem (system:
let
unstable = import nixpkgs-unstable { inherit system; };
pkgs = import nixpkgs {
inherit system;
overlays = [ (import rust-overlay) ];
outputs = inputs @ {
self,
flake-utils,
rust-overlay,
...
}:
flake-utils.lib.mkFlake {
inherit self inputs;
channels.nixpkgs.overlaysBuilder = channels: [rust-overlay.overlays.default];
outputsBuilder = channels: let
pkgs = channels.nixpkgs;
toolchain = pkgs.rust-bin.stable.latest.default;
platform = pkgs.makeRustPlatform {
cargo = toolchain;
rustc = toolchain;
};
in rec {
devShell = pkgs.mkShell {
packages = let
cargoMetaBin = (builtins.fromTOML (builtins.readFile ./bin/Cargo.toml)).package;
cargoMetaLib = (builtins.fromTOML (builtins.readFile ./lib/Cargo.toml)).package;
meta = {
inherit description;
homepage = "https://github.com/ellmau/adf-obdd";
license = [pkgs.lib.licenses.mit];
nativeBuildInputs = with platform; [
cargoBuildHook
cargoCheckHook
];
};
in rec {
adf-bdd = platform.buildRustPackage {
pname = "adf-bdd";
inherit (cargoMetaBin) version;
inherit meta;
src = ./.;
cargoLock.lockFile = ./Cargo.lock;
buildAndTestSubdir = "bin";
};
adf_bdd = platform.buildRustPackage {
pname = "adf_bdd";
inherit (cargoMetaLib) version;
inherit meta;
src = ./.;
cargoLock.lockFile = ./Cargo.lock;
buildAndTestSubdir = "lib";
};
};
devShells.default = pkgs.mkShell {
RUST_LOG = "debug";
RUST_BACKTRACE = 1;
buildInputs = [
pkgs.rust-bin.stable.latest.rustfmt
pkgs.rust-bin.stable.latest.default
pkgs.rust-analyzer
pkgs.cargo-audit
pkgs.cargo-license
pkgs.cargo-tarpaulin
pkgs.cargo-kcov
pkgs.valgrind
pkgs.gnuplot
pkgs.kcov
];
shellHook = ''
export PATH=''${HOME}/.cargo/bin''${PATH+:''${PATH}}
'';
buildInputs = let
notOn = systems:
pkgs.lib.optionals (!builtins.elem pkgs.system systems);
in
[
toolchain
pkgs.rust-analyzer
pkgs.cargo-audit
pkgs.cargo-license
]
++ (notOn ["aarch64-darwin" "x86_64-darwin"] [pkgs.kcov pkgs.gnuplot pkgs.valgrind])
++ (notOn ["aarch64-linux" "aarch64-darwin" "i686-linux"] [pkgs.cargo-tarpaulin]);
};
}));
};
};
}

View File

@ -9,32 +9,32 @@
"build": "parcel build"
},
"devDependencies": {
"@parcel/transformer-inline-string": "2.8.2",
"@types/node": "^18.15.11",
"@types/react": "^18.0.26",
"@types/react-dom": "^18.0.10",
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"eslint": "^8.31.0",
"@parcel/transformer-inline-string": "2.9.3",
"@types/node": "^20.4.6",
"@types/react": "^18.2.18",
"@types/react-dom": "^18.2.7",
"@typescript-eslint/eslint-plugin": "^6.2.1",
"@typescript-eslint/parser": "^6.2.1",
"eslint": "^8.46.0",
"eslint-config-airbnb": "^19.0.4",
"eslint-config-airbnb-typescript": "^17.0.0",
"eslint-plugin-import": "^2.27.4",
"eslint-config-airbnb-typescript": "^17.1.0",
"eslint-plugin-import": "^2.28.0",
"eslint-plugin-jsx-a11y": "^6.7.1",
"eslint-plugin-react": "^7.32.0",
"parcel": "^2.8.2",
"eslint-plugin-react": "^7.33.1",
"parcel": "^2.9.3",
"process": "^0.11.10",
"typescript": "^4.9.4"
"typescript": "^5.1.6"
},
"dependencies": {
"@antv/g6": "^4.8.3",
"@emotion/react": "^11.10.5",
"@emotion/styled": "^11.10.5",
"@fontsource/roboto": "^4.5.8",
"@mui/icons-material": "^5.11.16",
"@mui/material": "^5.11.4",
"markdown-to-jsx": "^7.2.0",
"@antv/g6": "^4.8.20",
"@emotion/react": "^11.11.1",
"@emotion/styled": "^11.11.0",
"@fontsource/roboto": "^5.0.6",
"@mui/icons-material": "^5.14.3",
"@mui/material": "^5.14.3",
"markdown-to-jsx": "^7.2.1",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-router-dom": "^6.10.0"
"react-router-dom": "^6.14.2"
}
}

View File

@ -40,6 +40,7 @@ function AdfNewForm({ fetchProblems }: { fetchProblems: () => void; }) {
const [code, setCode] = useState(PLACEHOLDER);
const [filename, setFilename] = useState('');
const [parsing, setParsing] = useState<Parsing>('Naive');
const [isAf, setIsAf] = useState(false);
const [name, setName] = useState('');
const fileRef = useRef<HTMLInputElement>(null);
@ -59,6 +60,7 @@ function AdfNewForm({ fetchProblems }: { fetchProblems: () => void; }) {
}
formData.append('parsing', parsing);
formData.append('is_af', isAf);
formData.append('name', name);
fetch(`${process.env.NODE_ENV === 'development' ? '//localhost:8080' : ''}/adf/add`, {
@ -119,10 +121,13 @@ function AdfNewForm({ fetchProblems }: { fetchProblems: () => void; }) {
label="Put your code here:"
helperText={(
<>
For more info on the syntax, have a
For more info on the ADF syntax, have a
look
{' '}
<Link href="https://github.com/ellmau/adf-obdd" target="_blank" rel="noopener noreferrer">here</Link>
. For the AF syntax, we currently only allow the ICCMA competition format, see for example
{' '}
<Link href="https://argumentationcompetition.org/2025/rules.html" target="_blank" rel="noopener noreferrer">here</Link>
.
</>
)}
@ -137,6 +142,20 @@ function AdfNewForm({ fetchProblems }: { fetchProblems: () => void; }) {
<Container sx={{ marginTop: 2 }}>
<Stack direction="row" justifyContent="center" spacing={2}>
<FormControl>
<FormLabel id="isAf-radio-group">ADF or AF?</FormLabel>
<RadioGroup
row
aria-labelledby="isAf-radio-group"
name="isAf"
value={isAf}
onChange={(e) => setIsAf(((e.target as HTMLInputElement).value))}
>
<FormControlLabel value={false} control={<Radio />} label="ADF" />
<FormControlLabel value={true} control={<Radio />} label="AF" />
</RadioGroup>
<span style={{ fontSize: "0.7em" }}>AFs are converted to ADFs internally.</span>
</FormControl>
<FormControl>
<FormLabel id="parsing-radio-group">Parsing Strategy</FormLabel>
<RadioGroup

View File

@ -3,7 +3,7 @@ The Web UI mimics many options of the CLI version of the [underlying adf-bdd too
In the below form, you can either type/paste your `code` or upload a file in the same format.
To put it briefly, an ADF consists of statements and accectance conditions for these statements.
For instance, the following code indicates that `a,b,c,d` are statements, that `a` is assumed to be true (verum), `b` is true if is true (which is tautological), `c` is true if `a` and `b` are true, and `d` is true if `b` is false.
For instance, the following code indicates that `a,b,c,d` are statements, that `a` is assumed to be true (verum), `b` is true if `b` is true (which is self-supporting), `c` is true if `a` and `b` are true, and `d` is true if `b` is false.
```
s(a).
@ -21,3 +21,17 @@ The `Parsing Strategy` determines the internal implementation used for these. `N
You will get a view on the BDD in the detail view after you added the problem.
You can optionally set a name for you ADF problem. Otherwise a random name will be chosen. At the moment the name cannot be changed later (but you could remove and re-add the problem).
We also support adding AFs in the ICCMA competition format. They are converted to ADFs internally in the obvious way.
For example you can try the following code and change the option below from ADF to AF.
```
p af 5
# this is a comment
1 2
2 4
4 5
5 4
5 5
```

View File

@ -1,99 +1,212 @@
<!DOCTYPE html>
<html>
<head>
<title>ADF-BDD.dev - Legal Notice</title>
<meta name="description" content="Impressum and Data Protection Regulation for adf-bdd.dev">
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
body {
font-family: Helvetica;
}
h1 {
text-align: center;
}
section {
max-width: 1000px;
margin: 0 auto 32px;
padding: 16px;
box-shadow: 0 0 10px 0px rgba(0,0,0,0.4);
}
section > :first-child {
margin-top: 0;
}
section > :last-child {
margin-bottom: 0;
}
</style>
</head>
<body>
<!doctype html>
<html>
<head>
<title>ADF-BDD.dev - Legal Notice</title>
<meta
name="description"
content="Impressum and Data Protection Regulation for adf-bdd.dev"
/>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<style>
body {
font-family: Helvetica;
}
h1 {
text-align: center;
}
section {
max-width: 1000px;
margin: 0 auto 32px;
padding: 16px;
box-shadow: 0 0 10px 0px rgba(0, 0, 0, 0.4);
}
section > :first-child {
margin-top: 0;
}
section > :last-child {
margin-bottom: 0;
}
</style>
</head>
<body>
<header>
<h1>ADF-BDD.DEV Legal Notice</h1>
</header>
<header>
<h1>ADF-BDD.DEV Legal Notice</h1>
</header>
<section>
<h2>Impressum</h2>
<section>
<h2>Impressum</h2>
The
<a
href="https://tu-dresden.de/impressum?set_language=en"
target="_blank"
rel="noreferrer noopener"
>Impressum of TU Dresden</a
>
applies with the following amendments:
The <a href="https://tu-dresden.de/impressum?set_language=en" target="_blank" rel="noreferrer noopener">Impressum of TU Dresden</a> applies with the following amendments:
<h3>Responsibilities - Content and Technical Implementation</h3>
<h3>Responsibilities - Content and Technical Implementation</h3>
<p>
Dipl.-Inf. Lukas Gerlach<br />
Technische Universität Dresden<br />
Fakultät Informatik<br />
Institut für Theoretische Informatik<br />
Professur für Wissensbasierte Systeme<br />
01062 Dresden<br />
GERMANY
</p>
<p>
Email: lukas.gerlach@tu-dresden.de<br />
Phone: (+49) 351 / 463 43503
</p>
</section>
<p>
Dipl.-Inf. Lukas Gerlach<br>
Technische Universität Dresden<br>
Fakultät Informatik<br>
Institut für Theoretische Informatik<br>
Professur für Wissensbasierte Systeme<br>
01062 Dresden<br>
GERMANY
</p>
<p>
Email: lukas.gerlach@tu-dresden.de<br>
Phone: (+49) 351 / 463 43503
</p>
</section>
<section>
<h2>Data Protection Regulation</h2>
<p>
We process your personal data only in form of metadata that is
send to us when you access the website. This is done to pursue
our legitimate interest of providing and improving this publicly
available website (https://adf-bdd.dev). To this aim, this
metadata is also written to server log files. The data may
contain the following of your personal information: public IP
address, time of access, internet browser (e.g. user agent,
version), operating system, referrer url, hostname of requesting
machine. We only set cookies that are necessary for the
provision of our service, i.e. to check if a user is logged in.
</p>
<h3>
Data Processed for Website Provisioning and Log File Creation:
Log Files for Website Provisioning
</h3>
<p>
We use Cloudflare to resolve DNS requests for our website. To
ensure the security and performance of our website, we log
technical errors that may occur when accessing our website.
Additionally, information that your device's browser
automatically transmits to our server is collected. This
information includes:
</p>
<section>
<h2>Data Protection Regulation</h2>
<p>
We process your personal data only in form of metadata that is send to us when you access the website.
This is done to pursue our legitimate interest of providing and improving this publicly available website (https://adf-bdd.dev).
To this aim, this metadata is also written to server log files.
The data may contain the following of your personal information: public IP address, time of access, internet browser (e.g. user agent, version), operating system, referrer url, hostname of requesting machine.
We only set cookies that are necessary for the provision of our service, i.e. to check if a user is logged in.
We do not set any so-called tracking cookies and we do not use any third party analytics tools on this website.
</p>
<h3>Legal basis</h3>
<p>
The legal basis for the data processing is <a href="https://gdpr.eu/article-6-how-to-process-personal-data-legally/" target="_blank" rel="noreferrer noopener">Section §6 para.1 lit. f GDPR</a>.
</p>
<h3>Rights of data subjects</h3>
<ul>
<li>You have the right to obtain information from TU Dresden about the data stored about your person and/or to have incorrectly stored data corrected.</li>
<li>You have the right to erasure or restriction of the processing and/or a right to object to the processing.</li>
<li>You can contact TU Dresden's Data Protection Officer at any time.
<p>
Tel.: +49 351 / 463 32839<br>
Fax: +49 351 / 463 39718<br>
Email: informationssicherheit@tu-dresden.de<br>
<a href="https://tu-dresden.de/informationssicherheit" target="_blank" rel="noreferrer noopener">https://tu-dresden.de/informationssicherheit</a>
</p>
</li>
<li>
You also have the right to complain to a supervisory authority if you are concerned that the processing of your personal data is an infringement of the law. The competent supervisory authority for data protection is:
<p>
Saxon Data Protection Commissioner<br>
Ms. Dr. Juliane Hundert<br>
Devrientstraße 5<br>
01067 Dresden<br>
Email: saechsdsb@slt.sachsen.de<br>
Phone: + 49 351 / 85471 101<br>
<a href="http://www.datenschutz.sachsen.de" target="_blank" rel="noreferrer noopener">www.datenschutz.sachsen.de</a>
</p>
</li>
</ul>
</section>
<ul>
<li>IP address and operating system of your device,</li>
<li>Browser type, version, language,</li>
<li>
The website from which the access was made (referrer URL),
</li>
<li>The status code (e.g., 404), and</li>
<li>The transmission protocol used (e.g., http/2).</li>
</ul>
</body>
<p>
The processing of this data is based on our legitimate interest
according to Art. 6(1)(f) GDPR. Our legitimate interest lies in
troubleshooting, optimizing, and ensuring the performance of our
website, as well as guaranteeing the security of our network and
systems. We do not use the data to personally identify
individual users unless there is a legal reason to do so or
explicit consent is obtained from you.
</p>
<p>
Cloudflare acts as an intermediary between your browser and our
server. When a DNS record is set to "Proxied," Cloudflare
answers DNS queries with a Cloudflare Anycast IP address instead
of the actual IP address of our server. This directs HTTP/HTTPS
requests to the Cloudflare network, which offers advantages in
terms of security and performance. Cloudflare also hides the IP
address of our origin server, making it more difficult for
attackers to directly target it.
</p>
<p>
Cloudflare may store certain data related to DNS requests,
including IP addresses. However, Cloudflare anonymizes IP
addresses by truncating the last octets for IPv4 and the last 80
bits for IPv6. The truncated IP addresses are deleted within 25
hours. Cloudflare is committed to not selling or sharing users'
personal data with third parties and not using the data for
targeted advertising. For more information on data protection at
Cloudflare, please see the Cloudflare Privacy Policy:
<a href="https://www.cloudflare.com/de-de/privacypolicy/"
>https://www.cloudflare.com/de-de/privacypolicy/</a
>
</p>
<p>
To meet the requirements of the GDPR, we have entered into a
Data Processing Agreement (DPA) with Cloudflare, which ensures
that Cloudflare processes the data on our behalf and in
accordance with applicable data protection regulations. You have
the right to access, rectify, erase, restrict processing, and
data portability of your personal data. Please contact us if you
wish to exercise these rights.
</p>
<p>
Please note that our website is hosted on our own servers, and
Cloudflare merely serves as a DNS provider and proxy. We
implement appropriate technical and organizational measures to
ensure the protection of your data.
</p>
<h3>Legal basis</h3>
<p>
The legal basis for the data processing is
<a
href="https://gdpr.eu/article-6-how-to-process-personal-data-legally/"
target="_blank"
rel="noreferrer noopener"
>Section §6 para.1 lit. f GDPR</a
>.
</p>
<h3>Rights of data subjects</h3>
<ul>
<li>
You have the right to obtain information from TU Dresden
about the data stored about your person and/or to have
incorrectly stored data corrected.
</li>
<li>
You have the right to erasure or restriction of the
processing and/or a right to object to the processing.
</li>
<li>
You can contact TU Dresden's Data Protection Officer at any
time.
<p>
Tel.: +49 351 / 463 32839<br />
Fax: +49 351 / 463 39718<br />
Email: informationssicherheit@tu-dresden.de<br />
<a
href="https://tu-dresden.de/informationssicherheit"
target="_blank"
rel="noreferrer noopener"
>https://tu-dresden.de/informationssicherheit</a
>
</p>
</li>
<li>
You also have the right to complain to a supervisory
authority if you are concerned that the processing of your
personal data is an infringement of the law. The competent
supervisory authority for data protection is:
<p>
Saxon Data Protection Commissioner<br />
Ms. Dr. Juliane Hundert<br />
Maternistraße 17<br />
01067 Dresden<br />
Email: post@sdtb.sachsen.de<br />
Phone: + 49 351 / 85471 101<br />
<a
href="http://www.datenschutz.sachsen.de"
target="_blank"
rel="noreferrer noopener"
>www.datenschutz.sachsen.de</a
>
</p>
</li>
</ul>
</section>
</body>
</html>

File diff suppressed because it is too large Load Diff

View File

@ -28,7 +28,7 @@ nom = "7.1.3"
lexical-sort = "0.3.1"
serde = { version = "1.0", features = ["derive","rc"] }
serde_json = "1.0"
biodivine-lib-bdd = "0.4.2"
biodivine-lib-bdd = "0.5.0"
derivative = "2.2.0"
roaring = "0.10.1"
strum = { version = "0.24", features = ["derive"] }

View File

@ -1133,12 +1133,7 @@ mod test {
let grounded = adf.grounded();
let (s, r) = unbounded();
adf.nogood_internal(
&grounded,
crate::adf::heuristics::heu_simple,
crate::adf::Adf::stability_check,
s,
);
adf.nogood_internal(&grounded, heuristics::heu_simple, Adf::stability_check, s);
assert_eq!(
r.iter().collect::<Vec<_>>(),
@ -1172,8 +1167,8 @@ mod test {
let (s, r) = unbounded();
adf.nogood_internal(
&grounded,
crate::adf::heuristics::heu_simple,
crate::adf::Adf::stability_check,
heuristics::heu_simple,
Adf::stability_check,
s.clone(),
);
let stable_result = r.try_iter().collect::<Vec<_>>();

View File

@ -5,6 +5,7 @@
//! - grounded
//! - stable
//! - complete
//!
//! semantics of ADFs.
use crate::{
@ -89,7 +90,7 @@ impl Adf {
pub(crate) fn stm_rewriting(&mut self, parser: &AdfParser) {
let expr = parser.formula_order().iter().enumerate().fold(
biodivine_lib_bdd::boolean_expression::BooleanExpression::Const(true),
BooleanExpression::Const(true),
|acc, (insert_order, new_order)| {
BooleanExpression::And(
Box::new(acc),
@ -321,19 +322,15 @@ impl Adf {
fn stable_representation(&self) -> Bdd {
log::debug!("[Start] stable representation rewriting");
self.ac.iter().enumerate().fold(
self.varset.eval_expression(
&biodivine_lib_bdd::boolean_expression::BooleanExpression::Const(true),
),
self.varset.eval_expression(&BooleanExpression::Const(true)),
|acc, (idx, formula)| {
acc.and(
&formula.iff(
&self.varset.eval_expression(
&biodivine_lib_bdd::boolean_expression::BooleanExpression::Variable(
self.ordering
.name(crate::datatypes::Var(idx))
.expect("Variable should exist"),
),
),
&self.varset.eval_expression(&BooleanExpression::Variable(
self.ordering
.name(crate::datatypes::Var(idx))
.expect("Variable should exist"),
)),
),
)
},
@ -386,7 +383,7 @@ pub trait BddRestrict {
impl BddRestrict for Bdd {
fn var_restrict(&self, variable: biodivine_lib_bdd::BddVariable, value: bool) -> Bdd {
self.var_select(variable, value).var_project(variable)
self.var_select(variable, value).var_exists(variable)
}
fn restrict(&self, variables: &[(biodivine_lib_bdd::BddVariable, bool)]) -> Bdd {
@ -394,7 +391,7 @@ impl BddRestrict for Bdd {
variables
.iter()
.for_each(|(var, _val)| variablelist.push(*var));
self.select(variables).project(&variablelist)
self.select(variables).exists(&variablelist)
}
}
@ -430,8 +427,8 @@ mod test {
let c = variables.eval_expression_string("c");
let d = variables.eval_expression_string("a & b & c");
let e = variables.eval_expression_string("a ^ b");
let t = variables.eval_expression(&boolean_expression::BooleanExpression::Const(true));
let f = variables.eval_expression(&boolean_expression::BooleanExpression::Const(false));
let t = variables.eval_expression(&BooleanExpression::Const(true));
let f = variables.eval_expression(&BooleanExpression::Const(false));
println!("{:?}", a.to_string());
println!("{:?}", a.to_bytes());

View File

@ -275,8 +275,8 @@ mod test {
let term: Term = Term::from(value);
let var = Var::from(value);
// display
assert_eq!(format!("{}", term), format!("Term({})", value));
assert_eq!(format!("{}", var), format!("Var({})", value));
assert_eq!(format!("{term}"), format!("Term({})", value));
assert_eq!(format!("{var}"), format!("Var({})", value));
//deref
assert_eq!(value, *term);
true

View File

@ -342,7 +342,6 @@ let parsed_adf: Adf = parsed_custom_adf.into();
#![deny(
missing_debug_implementations,
missing_copy_implementations,
missing_copy_implementations,
trivial_casts,
trivial_numeric_casts,
unsafe_code

View File

@ -1,7 +1,6 @@
//! Collection of all nogood-related structures.
use std::{
borrow::Borrow,
fmt::{Debug, Display},
ops::{BitAnd, BitOr, BitXor, BitXorAssign},
};
@ -23,11 +22,8 @@ pub struct NoGood {
impl Eq for NoGood {}
impl PartialEq for NoGood {
fn eq(&self, other: &Self) -> bool {
self.active
.borrow()
.bitxor(other.active.borrow())
.is_empty()
&& self.value.borrow().bitxor(other.value.borrow()).is_empty()
(&self.active).bitxor(&other.active).is_empty()
&& (&self.value).bitxor(&other.value).is_empty()
}
}
@ -113,15 +109,11 @@ impl NoGood {
/// Given a [NoGood] and another one, conclude a non-conflicting value which can be concluded on basis of the given one.
pub fn conclude(&self, other: &NoGood) -> Option<(usize, bool)> {
log::debug!("conclude: {:?} other {:?}", self, other);
let implication = self
.active
.borrow()
.bitxor(other.active.borrow())
.bitand(self.active.borrow());
let implication = (&self.active).bitxor(&other.active).bitand(&self.active);
let bothactive = self.active.borrow().bitand(other.active.borrow());
let mut no_matches = bothactive.borrow().bitand(other.value.borrow());
no_matches.bitxor_assign(bothactive.bitand(self.value.borrow()));
let bothactive = (&self.active).bitand(&other.active);
let mut no_matches = (&bothactive).bitand(&other.value);
no_matches.bitxor_assign(bothactive.bitand(&self.value));
if implication.len() == 1 && no_matches.is_empty() {
let pos = implication
@ -140,16 +132,16 @@ impl NoGood {
/// Updates the [NoGood] and a second one in a disjunctive (bitor) manner.
pub fn disjunction(&mut self, other: &NoGood) {
self.active = self.active.borrow().bitor(&other.active);
self.value = self.value.borrow().bitor(&other.value);
self.active = (&self.active).bitor(&other.active);
self.value = (&self.value).bitor(&other.value);
}
/// Returns [true] if the other [Interpretation] matches with all the assignments of the current [NoGood].
pub fn is_violating(&self, other: &Interpretation) -> bool {
let active = self.active.borrow().bitand(other.active.borrow());
let active = (&self.active).bitand(&other.active);
if self.active.len() == active.len() {
let lhs = active.borrow().bitand(self.value.borrow());
let rhs = active.borrow().bitand(other.value.borrow());
let lhs = (&active).bitand(&self.value);
let rhs = (&active).bitand(&other.value);
if lhs.bitxor(rhs).is_empty() {
return true;
}

View File

@ -690,7 +690,7 @@ mod test {
let a1 = bdd.and(v1, v2);
let _a2 = bdd.or(a1, v3);
assert_eq!(format!("{}", bdd), " \n0 BddNode: Var(18446744073709551614), lo: Term(0), hi: Term(0)\n1 BddNode: Var(18446744073709551615), lo: Term(1), hi: Term(1)\n2 BddNode: Var(0), lo: Term(0), hi: Term(1)\n3 BddNode: Var(1), lo: Term(0), hi: Term(1)\n4 BddNode: Var(2), lo: Term(0), hi: Term(1)\n5 BddNode: Var(0), lo: Term(0), hi: Term(3)\n6 BddNode: Var(1), lo: Term(4), hi: Term(1)\n7 BddNode: Var(0), lo: Term(4), hi: Term(6)\n");
assert_eq!(format!("{bdd}"), " \n0 BddNode: Var(18446744073709551614), lo: Term(0), hi: Term(0)\n1 BddNode: Var(18446744073709551615), lo: Term(1), hi: Term(1)\n2 BddNode: Var(0), lo: Term(0), hi: Term(1)\n3 BddNode: Var(1), lo: Term(0), hi: Term(1)\n4 BddNode: Var(2), lo: Term(0), hi: Term(1)\n5 BddNode: Var(0), lo: Term(0), hi: Term(3)\n6 BddNode: Var(1), lo: Term(4), hi: Term(1)\n7 BddNode: Var(0), lo: Term(4), hi: Term(6)\n");
}
#[test]

View File

@ -33,6 +33,7 @@ impl super::Bdd {
/// # Attention
/// - Constants for [``][crate::datatypes::Term::TOP] and [`⊥`][crate::datatypes::Term::BOT] concepts are not sent, as they are considered to be existing in every [Bdd][super::Bdd] structure.
/// - Mixing manipulating operations and utilising the communication channel for a receiving [roBDD][super::Bdd] may end up in inconsistent data.
///
/// So far, only manipulate the [roBDD][super::Bdd] if no further [recv][Self::recv] will be called.
pub fn with_sender_receiver(
sender: crossbeam_channel::Sender<BddNode>,

View File

@ -11,7 +11,7 @@ where
V: Serialize + 'a,
{
let container: Vec<_> = target.into_iter().collect();
serde::Serialize::serialize(&container, ser)
Serialize::serialize(&container, ser)
}
/// Deserialize from a [Vector][std::vec::Vec] to a [Map][std::collections::HashMap].
@ -22,6 +22,6 @@ where
K: Deserialize<'de>,
V: Deserialize<'de>,
{
let container: Vec<_> = serde::Deserialize::deserialize(des)?;
Ok(T::from_iter(container.into_iter()))
let container: Vec<_> = Deserialize::deserialize(des)?;
Ok(T::from_iter(container))
}

View File

@ -20,28 +20,28 @@ use crate::datatypes::adf::VarContainer;
/// A representation of a formula, still using the strings from the input.
#[derive(Clone, PartialEq, Eq)]
pub enum Formula<'a> {
pub enum Formula {
/// `c(f)` in the input format.
Bot,
/// `c(v)` in the input format.
Top,
/// Some atomic variable in the input format.
Atom(&'a str),
Atom(String),
/// Negation of a subformula.
Not(Box<Formula<'a>>),
Not(Box<Formula>),
/// Conjunction of two subformulae.
And(Box<Formula<'a>>, Box<Formula<'a>>),
And(Box<Formula>, Box<Formula>),
/// Disjunction of two subformulae.
Or(Box<Formula<'a>>, Box<Formula<'a>>),
Or(Box<Formula>, Box<Formula>),
/// Implication of two subformulae.
Imp(Box<Formula<'a>>, Box<Formula<'a>>),
Imp(Box<Formula>, Box<Formula>),
/// Exclusive-Or of two subformulae.
Xor(Box<Formula<'a>>, Box<Formula<'a>>),
Xor(Box<Formula>, Box<Formula>),
/// If and only if connective between two formulae.
Iff(Box<Formula<'a>>, Box<Formula<'a>>),
Iff(Box<Formula>, Box<Formula>),
}
impl Formula<'_> {
impl Formula {
pub(crate) fn to_boolean_expr(
&self,
) -> biodivine_lib_bdd::boolean_expression::BooleanExpression {
@ -90,29 +90,29 @@ impl Formula<'_> {
}
}
impl std::fmt::Debug for Formula<'_> {
impl std::fmt::Debug for Formula {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Formula::Atom(a) => {
write!(f, "{}", a)?;
write!(f, "{a}")?;
}
Formula::Not(n) => {
write!(f, "not({:?})", n)?;
write!(f, "not({n:?})")?;
}
Formula::And(f1, f2) => {
write!(f, "and({:?},{:?})", f1, f2)?;
write!(f, "and({f1:?},{f2:?})")?;
}
Formula::Or(f1, f2) => {
write!(f, "or({:?},{:?})", f1, f2)?;
write!(f, "or({f1:?},{f2:?})")?;
}
Formula::Imp(f1, f2) => {
write!(f, "imp({:?},{:?})", f1, f2)?;
write!(f, "imp({f1:?},{f2:?})")?;
}
Formula::Xor(f1, f2) => {
write!(f, "xor({:?},{:?})", f1, f2)?;
write!(f, "xor({f1:?},{f2:?})")?;
}
Formula::Iff(f1, f2) => {
write!(f, "iff({:?},{:?})", f1, f2)?;
write!(f, "iff({f1:?},{f2:?})")?;
}
Formula::Bot => {
write!(f, "Const(B)")?;
@ -132,14 +132,18 @@ impl std::fmt::Debug for Formula<'_> {
///
/// Note that the parser can be utilised by an [ADF][`crate::adf::Adf`] to initialise it with minimal overhead.
#[derive(Debug)]
pub struct AdfParser<'a> {
namelist: Arc<RwLock<Vec<String>>>,
dict: Arc<RwLock<HashMap<String, usize>>>,
formulae: RefCell<Vec<Formula<'a>>>,
formulaname: RefCell<Vec<String>>,
pub struct AdfParser {
/// A name for each statement (identified by index in vector)
pub namelist: Arc<RwLock<Vec<String>>>,
/// Inverse mapping from name to index of statement in vector above
pub dict: Arc<RwLock<HashMap<String, usize>>>,
/// The formula (acceptance condition) for each statement identified by its index
pub formulae: RefCell<Vec<Formula>>,
/// The formula for each statement identified by its index
pub formulaname: RefCell<Vec<String>>,
}
impl Default for AdfParser<'_> {
impl Default for AdfParser {
fn default() -> Self {
AdfParser {
namelist: Arc::new(RwLock::new(Vec::new())),
@ -150,10 +154,7 @@ impl Default for AdfParser<'_> {
}
}
impl<'a, 'b> AdfParser<'b>
where
'a: 'b,
{
impl<'a> AdfParser {
#[allow(dead_code)]
fn parse_statements(&'a self) -> impl FnMut(&'a str) -> IResult<&'a str, ()> {
move |input| {
@ -212,9 +213,9 @@ where
}
}
impl<'a> AdfParser<'a> {
impl AdfParser {
/// Creates a new parser, utilising the already existing [VarContainer]
pub fn with_var_container(var_container: VarContainer) -> AdfParser<'a> {
pub fn with_var_container(var_container: VarContainer) -> AdfParser {
AdfParser {
namelist: var_container.names(),
dict: var_container.mappings(),
@ -224,7 +225,7 @@ impl<'a> AdfParser<'a> {
}
}
impl AdfParser<'_> {
impl AdfParser {
/// after an update to the namelist, all indizes are updated
fn regenerate_indizes(&self) {
self.namelist
@ -284,7 +285,7 @@ impl AdfParser<'_> {
}
fn atomic_term(input: &str) -> IResult<&str, Formula> {
AdfParser::atomic(input).map(|(input, result)| (input, Formula::Atom(result)))
AdfParser::atomic(input).map(|(input, result)| (input, Formula::Atom(result.to_string())))
}
fn formula(input: &str) -> IResult<&str, Formula> {
@ -480,7 +481,7 @@ mod test {
let (_remain, result) = AdfParser::formula(input).unwrap();
assert_eq!(
format!("{:?}", result),
format!("{result:?}"),
"and(or(not(a),iff( iff left ,b)),xor(imp(c,d),e))"
);
@ -504,7 +505,10 @@ mod test {
assert_eq!(parser.dict_value("b"), Some(2usize));
assert_eq!(
format!("{:?}", parser.ac_at(1).unwrap()),
format!("{:?}", Formula::Not(Box::new(Formula::Atom("a"))))
format!(
"{:?}",
Formula::Not(Box::new(Formula::Atom("a".to_string())))
)
);
assert_eq!(parser.formula_count(), 3);
assert_eq!(parser.formula_order(), vec![0, 2, 1]);

View File

@ -3,7 +3,7 @@ version: '3.1'
services:
mongo:
image: mongo
image: mongo:6
restart: always
ports:
- 27017:27017

View File

@ -1,3 +1,4 @@
use std::cell::RefCell;
use std::collections::{HashMap, HashSet};
use std::sync::{Arc, RwLock};
#[cfg(feature = "mock_long_computations")]
@ -21,7 +22,7 @@ use serde::{Deserialize, Serialize};
use adf_bdd::adf::Adf;
use adf_bdd::adfbiodivine::Adf as BdAdf;
use adf_bdd::obdd::Bdd;
use adf_bdd::parser::AdfParser;
use adf_bdd::parser::{AdfParser, Formula};
use crate::config::{AppState, RunningInfo, Task, ADF_COLL, COMPUTE_TIME, DB_NAME, USER_COLL};
use crate::user::{username_exists, User};
@ -205,6 +206,8 @@ pub(crate) struct AdfProblem {
pub(crate) username: String,
pub(crate) code: String,
pub(crate) parsing_used: Parsing,
#[serde(default)]
pub(crate) is_af: bool,
pub(crate) adf: SimplifiedAdfOpt,
pub(crate) acs_per_strategy: AcsPerStrategy,
}
@ -215,6 +218,7 @@ struct AddAdfProblemBodyMultipart {
code: Option<Text<String>>, // Either Code or File is set
file: Option<TempFile>, // Either Code or File is set
parsing: Text<Parsing>,
is_af: Text<bool>, // if its not an AF then it is an ADF
}
#[derive(Clone)]
@ -222,6 +226,7 @@ struct AddAdfProblemBodyPlain {
name: String,
code: String,
parsing: Parsing,
is_af: bool, // if its not an AF then it is an ADF
}
impl TryFrom<AddAdfProblemBodyMultipart> for AddAdfProblemBodyPlain {
@ -237,6 +242,7 @@ impl TryFrom<AddAdfProblemBodyMultipart> for AddAdfProblemBodyPlain {
.and_then(|code| (!code.is_empty()).then_some(code))
.ok_or("Either a file or the code has to be provided.")?,
parsing: source.parsing.into_inner(),
is_af: source.is_af.into_inner(),
})
}
}
@ -259,6 +265,7 @@ struct AdfProblemInfo {
name: String,
code: String,
parsing_used: Parsing,
is_af: bool,
acs_per_strategy: AcsPerStrategy,
running_tasks: Vec<Task>,
}
@ -269,6 +276,7 @@ impl AdfProblemInfo {
name: adf.name.clone(),
code: adf.code,
parsing_used: adf.parsing_used,
is_af: adf.is_af,
acs_per_strategy: adf.acs_per_strategy,
running_tasks: tasks
.iter()
@ -280,6 +288,87 @@ impl AdfProblemInfo {
}
}
struct AF(Vec<Vec<usize>>);
impl From<AF> for AdfParser {
fn from(source: AF) -> Self {
let names: Vec<String> = (0..source.0.len())
.map(|val| (val + 1).to_string())
.collect();
let dict: HashMap<String, usize> = names
.iter()
.enumerate()
.map(|(i, val)| (val.clone(), i))
.collect();
let formulae: Vec<Formula> = source
.0
.into_iter()
.map(|attackers| {
attackers.into_iter().fold(Formula::Top, |acc, attacker| {
Formula::And(
Box::new(acc),
Box::new(Formula::Not(Box::new(Formula::Atom(
(attacker + 1).to_string(),
)))),
)
})
})
.collect();
let formulanames = names.clone();
Self {
namelist: Arc::new(RwLock::new(names)),
dict: Arc::new(RwLock::new(dict)),
formulae: RefCell::new(formulae),
formulaname: RefCell::new(formulanames),
}
}
}
fn parse_af(code: String) -> Result<AdfParser, &'static str> {
let mut lines = code.lines();
let Some(first_line) = lines.next() else {
return Err("There must be at least one line in the AF input.");
};
let first_line: Vec<_> = first_line.split(" ").collect();
if first_line[0] != "p" || first_line[1] != "af" {
return Err("Expected first line to be of the form: p af <n>");
}
let Ok(num_arguments) = first_line[2].parse::<usize>() else {
return Err("Could not convert number of arguments to u32; expected first line to be of the form: p af <n>");
};
let attacks_opt: Option<Vec<(usize, usize)>> = lines
.filter(|line| !line.starts_with('#') && !line.is_empty())
.map(|line| {
let mut line = line.split(" ");
let a = line.next()?;
let b = line.next()?;
if line.next().is_some() {
None
} else {
Some((a.parse::<usize>().ok()?, b.parse::<usize>().ok()?))
}
})
.collect();
let Some(attacks) = attacks_opt else {
return Err("Line must be of the form: n m");
};
// index in outer vector represents attacked element
let mut is_attacked_by: Vec<Vec<usize>> = vec![vec![]; num_arguments];
for (a, b) in attacks {
is_attacked_by[b - 1].push(a - 1); // we normalize names to be zero-indexed
}
let hacked_adf_parser = AdfParser::from(AF(is_attacked_by));
Ok(hacked_adf_parser)
}
#[post("/add")]
async fn add_adf_problem(
req: HttpRequest,
@ -381,6 +470,7 @@ async fn add_adf_problem(
username: username.clone(),
code: adf_problem_input.code.clone(),
parsing_used: adf_problem_input.parsing,
is_af: adf_problem_input.is_af,
adf: SimplifiedAdfOpt::None,
acs_per_strategy: AcsPerStrategy::default(),
};
@ -413,9 +503,20 @@ async fn add_adf_problem(
#[cfg(feature = "mock_long_computations")]
std::thread::sleep(Duration::from_secs(20));
let parser = AdfParser::default();
let parse_result = parser.parse()(&adf_problem_input.code)
.map_err(|_| "ADF could not be parsed, double check your input!");
let (parser, parse_result) = {
if adf_problem_input.is_af {
parse_af(adf_problem_input.code)
.map(|p| (p, Ok(())))
.unwrap_or_else(|e| (AdfParser::default(), Err(e)))
} else {
let parser = AdfParser::default();
let parse_result = parser.parse()(&adf_problem_input.code)
.map(|_| ())
.map_err(|_| "ADF could not be parsed, double check your input!");
(parser, parse_result)
}
};
let result = parse_result.map(|_| {
let lib_adf = match adf_problem_input.parsing {
@ -500,7 +601,7 @@ async fn solve_adf_problem(
.collection(ADF_COLL);
let username = match identity.map(|id| id.id()) {
None => {
Option::None => {
return HttpResponse::Unauthorized().body("You need to login to add an ADF problem.")
}
Some(Err(err)) => return HttpResponse::InternalServerError().body(err.to_string()),
@ -512,7 +613,7 @@ async fn solve_adf_problem(
.await
{
Err(err) => return HttpResponse::InternalServerError().body(err.to_string()),
Ok(None) => {
Ok(Option::None) => {
return HttpResponse::NotFound()
.body(format!("ADF problem with name {problem_name} not found."))
}
@ -644,7 +745,7 @@ async fn get_adf_problem(
.collection(ADF_COLL);
let username = match identity.map(|id| id.id()) {
None => {
Option::None => {
return HttpResponse::Unauthorized().body("You need to login to get an ADF problem.")
}
Some(Err(err)) => return HttpResponse::InternalServerError().body(err.to_string()),
@ -656,7 +757,7 @@ async fn get_adf_problem(
.await
{
Err(err) => return HttpResponse::InternalServerError().body(err.to_string()),
Ok(None) => {
Ok(Option::None) => {
return HttpResponse::NotFound()
.body(format!("ADF problem with name {problem_name} not found."))
}
@ -682,7 +783,7 @@ async fn delete_adf_problem(
.collection(ADF_COLL);
let username = match identity.map(|id| id.id()) {
None => {
Option::None => {
return HttpResponse::Unauthorized().body("You need to login to get an ADF problem.")
}
Some(Err(err)) => return HttpResponse::InternalServerError().body(err.to_string()),
@ -717,7 +818,7 @@ async fn get_adf_problems_for_user(
.collection(ADF_COLL);
let username = match identity.map(|id| id.id()) {
None => {
Option::None => {
return HttpResponse::Unauthorized().body("You need to login to get an ADF problem.")
}
Some(Err(err)) => return HttpResponse::InternalServerError().body(err.to_string()),

View File

@ -92,7 +92,7 @@ impl DoubleLabeledGraph {
.iter()
.enumerate()
.filter(|(i, _)| node_indices.contains(i))
.filter(|(_, node)| !vec![Var::TOP, Var::BOT].contains(&node.var()))
.filter(|(_, node)| ![Var::TOP, Var::BOT].contains(&node.var()))
.map(|(i, &node)| (i, node.lo().value()))
.map(|(i, v)| (i.to_string(), v.to_string()))
.collect();
@ -103,7 +103,7 @@ impl DoubleLabeledGraph {
.iter()
.enumerate()
.filter(|(i, _)| node_indices.contains(i))
.filter(|(_, node)| !vec![Var::TOP, Var::BOT].contains(&node.var()))
.filter(|(_, node)| ![Var::TOP, Var::BOT].contains(&node.var()))
.map(|(i, &node)| (i, node.hi().value()))
.map(|(i, v)| (i.to_string(), v.to_string()))
.collect();