1
0
mirror of https://github.com/ellmau/adf-obdd.git synced 2025-12-20 09:39:38 +01:00

Compare commits

..

17 Commits

Author SHA1 Message Date
2754c45bb7
Use unoptimised hybrid step for better presentation 2022-09-06 11:16:57 +02:00
769758f997
Use standard example for adfs on the frontend 2022-09-05 11:36:24 +02:00
monsterkrampe
4ec6d5cc39
Support more options and multiple models 2022-09-02 15:03:07 +02:00
monsterkrampe
d3b5039131
Fix code formatting 2022-09-02 12:01:35 +02:00
monsterkrampe
7f59d038d9
Introduce custom node type for nicer layout 2022-09-02 11:46:15 +02:00
monsterkrampe
af9bc2855e
Support stable model semantics with nogoods 2022-09-02 11:45:52 +02:00
monsterkrampe
65b751150f
Support multiple solving strategies 2022-09-02 08:22:15 +02:00
monsterkrampe
7919ae177e
Add Dockerfile as example for server with frontend 2022-09-02 08:22:15 +02:00
monsterkrampe
c89743a051
Serve static files from './assets' directory 2022-09-02 08:22:15 +02:00
monsterkrampe
282a7af274
Introduce features flag for localhost cors support 2022-09-02 08:22:15 +02:00
monsterkrampe
71ba617982
Make fetch endpoint depend on environment 2022-09-02 08:22:15 +02:00
monsterkrampe
c36ef00c67
Finish basic visualization of solve response 2022-09-02 08:22:15 +02:00
monsterkrampe
e9f692a4cd
Continue implementing basic solving endpoint 2022-09-02 08:22:15 +02:00
monsterkrampe
7689d03375
Build basic ui with mui 2022-09-02 08:22:14 +02:00
monsterkrampe
f178084c16
Start implementing endpoint to solve ADF on demand 2022-09-02 08:22:14 +02:00
monsterkrampe
beedc9c90c
Introduce separate server package 2022-09-02 08:22:14 +02:00
monsterkrampe
19e0f4b235
Experiment with basic graph visualization 2022-09-02 08:22:02 +02:00
15 changed files with 1435 additions and 1784 deletions

662
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -17,7 +17,7 @@ path = "src/main.rs"
[dependencies]
adf_bdd = { version="0.3.1", path="../lib", default-features = false }
clap = {version = "4.0.32", features = [ "derive", "cargo", "env" ]}
clap = {version = "3.2.16", features = [ "derive", "cargo", "env" ]}
log = { version = "0.4", features = [ "max_level_trace", "release_max_level_info" ] }
serde = { version = "1.0", features = ["derive","rc"] }
serde_json = "1.0"

View File

@ -87,70 +87,70 @@ use crossbeam_channel::unbounded;
use strum::VariantNames;
#[derive(Parser, Debug)]
#[command(author, version, about)]
#[clap(author, version, about)]
struct App {
/// Input filename
#[arg(value_parser)]
#[clap(parse(from_os_str))]
input: PathBuf,
/// Sets the verbosity to 'warn', 'info', 'debug' or 'trace' if -v and -q are not use
#[arg(long = "rust_log", env)]
#[clap(long = "rust_log", env)]
rust_log: Option<String>,
/// Choose the bdd implementation of either 'biodivine', 'naive', or hybrid
#[arg(long = "lib", default_value = "hybrid")]
#[clap(long = "lib", default_value = "hybrid")]
implementation: String,
/// Sets log verbosity (multiple times means more verbose)
#[arg(short, action = clap::builder::ArgAction::Count, group = "verbosity")]
#[clap(short, parse(from_occurrences), group = "verbosity")]
verbose: u8,
/// Sets log verbosity to only errors
#[arg(short, group = "verbosity")]
#[clap(short, group = "verbosity")]
quiet: bool,
/// Sorts variables in an lexicographic manner
#[arg(long = "lx", group = "sorting")]
#[clap(long = "lx", group = "sorting")]
sort_lex: bool,
/// Sorts variables in an alphanumeric manner
#[arg(long = "an", group = "sorting")]
#[clap(long = "an", group = "sorting")]
sort_alphan: bool,
/// Compute the grounded model
#[arg(long = "grd")]
#[clap(long = "grd")]
grounded: bool,
/// Compute the stable models
#[arg(long = "stm")]
#[clap(long = "stm")]
stable: bool,
/// Compute the stable models with the help of modelcounting using heuristics a
#[arg(long = "stmca")]
#[clap(long = "stmca")]
stable_counting_a: bool,
/// Compute the stable models with the help of modelcounting using heuristics b
#[arg(long = "stmcb")]
#[clap(long = "stmcb")]
stable_counting_b: bool,
/// Compute the stable models with a pre-filter (only hybrid lib-mode)
#[arg(long = "stmpre")]
#[clap(long = "stmpre")]
stable_pre: bool,
/// Compute the stable models with a single-formula rewriting (only hybrid lib-mode)
#[arg(long = "stmrew")]
#[clap(long = "stmrew")]
stable_rew: bool,
/// Compute the stable models with a single-formula rewriting on internal representation(only hybrid lib-mode)
#[arg(long = "stmrew2")]
#[clap(long = "stmrew2")]
stable_rew2: bool,
/// Compute the stable models with the nogood-learning based approach
#[arg(long = "stmng")]
#[clap(long = "stmng")]
stable_ng: bool,
/// Choose which heuristics shall be used by the nogood-learning approach
#[arg(long, value_parser = clap::builder::PossibleValuesParser::new(adf_bdd::adf::heuristics::Heuristic::VARIANTS.iter().filter(|&v| v != &"Custom").collect::<Vec<_>>()))]
#[clap(long, possible_values = adf_bdd::adf::heuristics::Heuristic::VARIANTS.iter().filter(|&v| v != &"Custom"))]
heu: Option<adf_bdd::adf::heuristics::Heuristic<'static>>,
/// Compute the two valued models with the nogood-learning based approach
#[arg(long = "twoval")]
#[clap(long = "twoval")]
two_val: bool,
/// Compute the complete models
#[arg(long = "com")]
#[clap(long = "com")]
complete: bool,
/// Import an adf- bdd state instead of an adf
#[arg(long)]
#[clap(long)]
import: bool,
/// Export the adf-bdd state after parsing and BDD instantiation to the given filename
#[arg(long)]
#[clap(long)]
export: Option<PathBuf>,
/// Set if the (counter-)models shall be computed and printed, possible values are 'nai' and 'mem' for naive and memoization repectively (only works in hybrid and naive mode)
#[arg(long)]
#[clap(long)]
counter: Option<String>,
}
@ -369,7 +369,7 @@ impl App {
export.to_string_lossy()
);
} else {
let export_file = match File::create(export) {
let export_file = match File::create(&export) {
Err(reason) => {
panic!("couldn't create {}: {}", export.to_string_lossy(), reason)
}

View File

@ -19,9 +19,9 @@ fn arguments() -> Result<(), Box<dyn std::error::Error>> {
cmd = Command::cargo_bin("adf-bdd")?;
cmd.arg("-h");
cmd.assert()
.success()
.stdout(predicate::str::contains("adf-bdd [OPTIONS] <INPUT>"));
cmd.assert().success().stdout(predicate::str::contains(
"stefan.ellmauthaler@tu-dresden.de",
));
cmd = Command::cargo_bin("adf-bdd")?;
cmd.arg("--version");

32
flake.lock generated
View File

@ -2,11 +2,11 @@
"nodes": {
"flake-utils": {
"locked": {
"lastModified": 1667395993,
"narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
"lastModified": 1656928814,
"narHash": "sha256-RIFfgBuKz6Hp89yRr7+NR5tzIAbn52h8vT6vXkYjZoM=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
"rev": "7e2a3b3dfd9af950a856d66b0a7d01e3c18aa249",
"type": "github"
},
"original": {
@ -18,11 +18,11 @@
"gitignoresrc": {
"flake": false,
"locked": {
"lastModified": 1660459072,
"narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=",
"lastModified": 1658402513,
"narHash": "sha256-wk38v/mbLsOo6+IDmmH1H0ADR87iq9QTTD1BP9X2Ags=",
"owner": "hercules-ci",
"repo": "gitignore.nix",
"rev": "a20de23b925fd8264fd7fad6454652e142fd7f73",
"rev": "f2ea0f8ff1bce948ccb6b893d15d5ea3efaf1364",
"type": "github"
},
"original": {
@ -33,27 +33,27 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1672441588,
"narHash": "sha256-jx5kxOyeObnVD44HRebKYL3cjWrcKhhcDmEYm0/naDY=",
"lastModified": 1659342832,
"narHash": "sha256-ePnxG4hacRd6oZMk+YeCSYMNUnHCe+qPLI0/+VaTu48=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "6a0d2701705c3cf6f42c15aa92b7885f1f8a477f",
"rev": "e43cf1748462c81202a32b26294e9f8eefcc3462",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-22.11",
"ref": "nixos-22.05",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-unstable": {
"locked": {
"lastModified": 1672350804,
"narHash": "sha256-jo6zkiCabUBn3ObuKXHGqqORUMH27gYDIFFfLq5P4wg=",
"lastModified": 1659219666,
"narHash": "sha256-pzYr5fokQPHv7CmUXioOhhzDy/XyWOIXP4LZvv/T7Mk=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "677ed08a50931e38382dbef01cba08a8f7eac8f6",
"rev": "7b9be38c7250b22d829ab6effdee90d5e40c6e5c",
"type": "github"
},
"original": {
@ -82,11 +82,11 @@
]
},
"locked": {
"lastModified": 1672626196,
"narHash": "sha256-BfdLrMqxqa4YA1I1wgPBQyu4FPzL0Tp4WI2C5S6BuYo=",
"lastModified": 1659409092,
"narHash": "sha256-OBY2RCYZeeOA3FTYUb86BPMUBEyKEwpwhpU2QKboRJQ=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "c8bf9c162bb3f734cf357846e995eb70b94e2bcd",
"rev": "9055cb4f33f062c0dd33aa7e3c89140da8f70057",
"type": "github"
},
"original": {

View File

@ -2,7 +2,7 @@
description = "basic rust flake";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11";
nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.05";
nixpkgs-unstable.url = "github:NixOS/nixpkgs/nixos-unstable";
rust-overlay = {
url = "github:oxalica/rust-overlay";
@ -18,8 +18,7 @@
};
};
outputs = { self, nixpkgs, nixpkgs-unstable, flake-utils, gitignoresrc
, rust-overlay, ... }@inputs:
outputs = { self, nixpkgs, nixpkgs-unstable, flake-utils, gitignoresrc, rust-overlay, ... }@inputs:
{
#overlay = import ./nix { inherit gitignoresrc; };
} // (flake-utils.lib.eachDefaultSystem (system:
@ -27,24 +26,27 @@
unstable = import nixpkgs-unstable { inherit system; };
pkgs = import nixpkgs {
inherit system;
overlays = [ (import rust-overlay) ];
overlays = [ (import rust-overlay)];
};
in rec {
devShell = pkgs.mkShell {
RUST_LOG = "debug";
RUST_BACKTRACE = 1;
buildInputs = [
pkgs.rust-bin.stable.latest.rustfmt
pkgs.rust-bin.stable.latest.default
pkgs.rust-analyzer
pkgs.cargo-audit
pkgs.cargo-license
pkgs.cargo-tarpaulin
pkgs.cargo-kcov
pkgs.valgrind
pkgs.gnuplot
pkgs.kcov
];
};
}));
in
rec {
devShell =
pkgs.mkShell {
RUST_LOG = "debug";
RUST_BACKTRACE = 1;
buildInputs = [
pkgs.rust-bin.stable.latest.rustfmt
pkgs.rust-bin.stable.latest.default
pkgs.rust-analyzer
pkgs.cargo-audit
pkgs.cargo-license
pkgs.cargo-tarpaulin
pkgs.cargo-kcov
pkgs.valgrind
pkgs.gnuplot
pkgs.kcov
];
};
}
));
}

View File

@ -8,26 +8,26 @@
"build": "parcel build"
},
"devDependencies": {
"@types/react": "^18.0.26",
"@types/react-dom": "^18.0.10",
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"eslint": "^8.31.0",
"@types/react": "^18.0.12",
"@types/react-dom": "^18.0.5",
"@typescript-eslint/eslint-plugin": "^5.28.0",
"@typescript-eslint/parser": "^5.28.0",
"eslint": "^8.17.0",
"eslint-config-airbnb": "^19.0.4",
"eslint-plugin-import": "^2.27.4",
"eslint-plugin-jsx-a11y": "^6.7.1",
"eslint-plugin-react": "^7.32.0",
"parcel": "^2.8.2",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-jsx-a11y": "^6.5.1",
"eslint-plugin-react": "^7.30.0",
"parcel": "^2.6.0",
"process": "^0.11.10",
"typescript": "^4.9.4"
"typescript": "^4.7.3"
},
"dependencies": {
"@antv/g6": "^4.8.3",
"@emotion/react": "^11.10.5",
"@emotion/styled": "^11.10.5",
"@antv/g6": "^4.6.4",
"@emotion/react": "^11.10.0",
"@emotion/styled": "^11.10.0",
"@fontsource/roboto": "^4.5.8",
"@mui/material": "^5.11.4",
"react": "^18.2.0",
"react-dom": "^18.2.0"
"@mui/material": "^5.10.2",
"react": "^18.1.0",
"react-dom": "^18.1.0"
}
}

View File

@ -1,8 +0,0 @@
{ pkgs ? import <nixpkgs> {} }:
pkgs.mkShell {
buildInputs = [
pkgs.yarn
];
}

View File

@ -19,7 +19,7 @@ import {
TextField,
} from '@mui/material';
import GraphG6 from './graph-g6.tsx';
import Graph from './graph.tsx';
const { useState, useCallback } = React;
@ -155,7 +155,7 @@ function App() {
{graphs.length > 0
&& (
<Paper elevation={3} square sx={{ marginTop: 4, marginBottom: 4 }}>
<GraphG6 graph={graphs[graphIndex]} />
<Graph graph={graphs[graphIndex]} />
</Paper>
)}
{graphs.length === 0

View File

@ -131,45 +131,16 @@ G6.registerNode('nodeWithFlag', {
},
});
interface GraphProps {
lo_edges: [number, number][],
hi_edges: [number, number][],
node_labels: { [key: number]: string },
tree_root_labels: { [key: number]: string[] },
}
function nodesAndEdgesFromGraphProps(graphProps: GraphProps) {
const nodes = Object.keys(graphProps.node_labels).map((id) => {
const mainLabel = graphProps.node_labels[id];
const subLabel = graphProps.tree_root_labels[id].length > 0 ? `Root for: ${graphProps.tree_root_labels[id].join(' ; ')}` : undefined;
// const label = subLabel.length > 0 ? `${mainLabel}\n${subLabel}` : mainLabel;
return {
id: id.toString(),
mainLabel,
subLabel,
// style: {
// height: subLabel.length > 0 ? 60 : 30,
// width: Math.max(30, 5 * mainLabel.length + 10, 5 * subLabel.length + 10),
// },
};
});
const edges = graphProps.lo_edges.map(([source, target]) => ({
id: `LO_${source}_${target}`, source: source.toString(), target: target.toString(), style: { stroke: '#ed6c02', lineWidth: 2 },
}))
.concat(graphProps.hi_edges.map(([source, target]) => ({
id: `HI_${source}_${target}`, source: source.toString(), target: target.toString(), style: { stroke: '#1976d2', lineWidth: 2 },
})));
return { nodes, edges };
}
interface Props {
graph: GraphProps,
graph: {
lo_edges: [number, number][],
hi_edges: [number, number][],
node_labels: { [key: number]: string },
tree_root_labels: { [key: number]: string[] },
}
}
function GraphG6(props: Props) {
function Graph(props: Props) {
const { graph: graphProps } = props;
const ref = useRef(null);
@ -227,11 +198,6 @@ function GraphG6(props: Props) {
opacity: 0.3,
},
},
animate: true,
animateCfg: {
duration: 500,
easing: 'easePolyInOut',
},
});
}
@ -349,12 +315,34 @@ function GraphG6(props: Props) {
() => {
const graph = graphRef.current;
const { nodes, edges } = nodesAndEdgesFromGraphProps(graphProps);
const nodes = Object.keys(graphProps.node_labels).map((id) => {
const mainLabel = graphProps.node_labels[id];
const subLabel = graphProps.tree_root_labels[id].length > 0 ? `Root for: ${graphProps.tree_root_labels[id].join(' ; ')}` : undefined;
graph.changeData({
// const label = subLabel.length > 0 ? `${mainLabel}\n${subLabel}` : mainLabel;
return {
id: id.toString(),
mainLabel,
subLabel,
// style: {
// height: subLabel.length > 0 ? 60 : 30,
// width: Math.max(30, 5 * mainLabel.length + 10, 5 * subLabel.length + 10),
// },
};
});
const edges = graphProps.lo_edges.map(([source, target]) => ({
id: `LO_${source}_${target}`, source: source.toString(), target: target.toString(), style: { stroke: '#ed6c02', lineWidth: 2 },
}))
.concat(graphProps.hi_edges.map(([source, target]) => ({
id: `HI_${source}_${target}`, source: source.toString(), target: target.toString(), style: { stroke: '#1976d2', lineWidth: 2 },
})));
graph.data({
nodes,
edges,
});
graph.render();
},
[graphProps],
);
@ -371,4 +359,4 @@ function GraphG6(props: Props) {
);
}
export default GraphG6;
export default Graph;

File diff suppressed because it is too large Load Diff

View File

@ -28,9 +28,9 @@ nom = "7.1.1"
lexical-sort = "0.3.1"
serde = { version = "1.0", features = ["derive","rc"] }
serde_json = "1.0"
biodivine-lib-bdd = "0.4.1"
biodivine-lib-bdd = "0.4.0"
derivative = "2.2.0"
roaring = "0.10.1"
roaring = "0.9.0"
strum = { version = "0.24", features = ["derive"] }
crossbeam-channel = "0.5"
rand = {version = "0.8.5", features = ["std_rng"]}

View File

@ -15,7 +15,7 @@ fn main() {
fn gen_tests() {
let out_dir = env::var("OUT_DIR").unwrap();
let destination = Path::new(&out_dir).join("tests.rs");
let mut test_file = File::create(destination).unwrap();
let mut test_file = File::create(&destination).unwrap();
if let Ok(test_data_directory) = read_dir("../res/adf-instances/instances/") {
// write test file header, put `use`, `const` etc there

View File

@ -11,7 +11,7 @@ use strum::{EnumString, EnumVariantNames};
/// Return value for heuristics.
pub type RetVal = Option<(Var, Term)>;
/// Signature for heuristics functions.
pub type HeuristicFn = dyn Fn(&Adf, &[Term]) -> RetVal + Sync;
pub type HeuristicFn = dyn Fn(&Adf, &[Term]) -> RetVal;
pub(crate) fn heu_simple(_adf: &Adf, interpr: &[Term]) -> Option<(Var, Term)> {
for (idx, term) in interpr.iter().enumerate() {

View File

@ -148,7 +148,7 @@ impl TwoValuedInterpretationsIterator {
let indexes = term
.iter()
.enumerate()
.filter_map(|(idx, &v)| (!v.is_truth_value()).then_some(idx))
.filter_map(|(idx, &v)| (!v.is_truth_value()).then(|| idx))
.rev()
.collect::<Vec<_>>();
let current = term
@ -212,7 +212,7 @@ impl ThreeValuedInterpretationsIterator {
let indexes = term
.iter()
.enumerate()
.filter_map(|(idx, &v)| (!v.is_truth_value()).then_some(idx))
.filter_map(|(idx, &v)| (!v.is_truth_value()).then(|| idx))
.rev()
.collect::<Vec<_>>();
let current = vec![2; indexes.len()];