1
0
mirror of https://github.com/ellmau/adf-obdd.git synced 2025-12-20 09:39:38 +01:00

Add API for uploading and solving adf problems

This commit is contained in:
monsterkrampe 2023-03-30 17:35:49 +02:00
parent 620e86e10b
commit e562631f1c
No known key found for this signature in database
GPG Key ID: B8ADC1F5A5CE5057
9 changed files with 907 additions and 319 deletions

87
Cargo.lock generated
View File

@ -261,7 +261,7 @@ dependencies = [
"adf_bdd",
"assert_cmd",
"assert_fs",
"clap",
"clap 4.1.13",
"crossbeam-channel",
"env_logger 0.10.0",
"log",
@ -282,10 +282,10 @@ dependencies = [
"actix-web",
"adf_bdd",
"argon2",
"derive_more",
"env_logger 0.9.3",
"log",
"mongodb",
"names",
"serde",
]
@ -695,6 +695,23 @@ dependencies = [
"inout",
]
[[package]]
name = "clap"
version = "3.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5"
dependencies = [
"atty",
"bitflags",
"clap_derive 3.2.18",
"clap_lex 0.2.4",
"indexmap",
"once_cell",
"strsim",
"termcolor",
"textwrap",
]
[[package]]
name = "clap"
version = "4.1.13"
@ -702,14 +719,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c911b090850d79fc64fe9ea01e28e465f65e821e08813ced95bced72f7a8a9b"
dependencies = [
"bitflags",
"clap_derive",
"clap_lex",
"clap_derive 4.1.12",
"clap_lex 0.3.3",
"is-terminal",
"once_cell",
"strsim",
"termcolor",
]
[[package]]
name = "clap_derive"
version = "3.2.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea0c8bce528c4be4da13ea6fead8965e95b6073585a2f05204bd8f4119f82a65"
dependencies = [
"heck",
"proc-macro-error",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "clap_derive"
version = "4.1.12"
@ -722,6 +752,15 @@ dependencies = [
"syn 2.0.8",
]
[[package]]
name = "clap_lex"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5"
dependencies = [
"os_str_bytes",
]
[[package]]
name = "clap_lex"
version = "0.3.3"
@ -1734,6 +1773,16 @@ dependencies = [
"webpki-roots",
]
[[package]]
name = "names"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7bddcd3bf5144b6392de80e04c347cd7fab2508f6df16a85fc496ecd5cec39bc"
dependencies = [
"clap 3.2.23",
"rand",
]
[[package]]
name = "nom"
version = "7.1.3"
@ -1941,6 +1990,30 @@ dependencies = [
"termtree",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn 1.0.109",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]]
name = "proc-macro2"
version = "1.0.53"
@ -2489,6 +2562,12 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "textwrap"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d"
[[package]]
name = "thiserror"
version = "1.0.40"

View File

@ -9,6 +9,7 @@ pub mod heuristics;
use std::cell::RefCell;
use std::collections::{HashMap, HashSet};
use crate::datatypes::BddNode;
use crate::{
datatypes::{
adf::{
@ -31,23 +32,23 @@ use self::heuristics::Heuristic;
///
/// Please note that due to the nature of the underlying reduced and ordered Bdd the concept of a [`Term`][crate::datatypes::Term] represents one (sub) formula as well as truth-values.
pub struct Adf {
ordering: VarContainer,
bdd: Bdd,
ac: Vec<Term>,
pub ordering: VarContainer,
pub bdd: Bdd,
pub ac: Vec<Term>,
#[serde(skip, default = "Adf::default_rng")]
rng: RefCell<StdRng>,
}
#[derive(Serialize, Debug)]
#[derive(Clone, Deserialize, Serialize, Debug)]
/// This is a DTO for the graph output
pub struct DoubleLabeledGraph {
// number of nodes equals the number of node labels
// nodes implicitly have their index as their ID
node_labels: HashMap<usize, String>,
node_labels: HashMap<String, String>,
// every node gets this label containing multiple entries (it might be empty)
tree_root_labels: HashMap<usize, Vec<String>>,
lo_edges: Vec<(usize, usize)>,
hi_edges: Vec<(usize, usize)>,
tree_root_labels: HashMap<String, Vec<String>>,
lo_edges: Vec<(String, String)>,
hi_edges: Vec<(String, String)>,
}
impl Default for Adf {
@ -62,6 +63,21 @@ impl Default for Adf {
}
impl Adf {
pub fn from_ord_nodes_and_ac(
ordering: VarContainer,
bdd_nodes: Vec<BddNode>,
ac: Vec<Term>,
) -> Self {
let bdd = Bdd::from_nodes(bdd_nodes);
Adf {
ordering,
bdd,
ac,
rng: Self::default_rng(),
}
}
/// Instantiates a new ADF, based on the [parser-data][crate::parser::AdfParser].
pub fn from_parser(parser: &AdfParser) -> Self {
log::info!("[Start] instantiating BDD");
@ -960,7 +976,7 @@ impl Adf {
}
}
let node_labels: HashMap<usize, String> = self
let node_labels: HashMap<String, String> = self
.bdd
.nodes
.iter()
@ -975,11 +991,11 @@ impl Adf {
),
};
(i, value_part)
(i.to_string(), value_part)
})
.collect();
let tree_root_labels: HashMap<usize, Vec<String>> = ac.iter().enumerate().fold(
let tree_root_labels_with_usize: HashMap<usize, Vec<String>> = ac.iter().enumerate().fold(
self.bdd
.nodes
.iter()
@ -998,7 +1014,12 @@ impl Adf {
},
);
let lo_edges: Vec<(usize, usize)> = self
let tree_root_labels: HashMap<String, Vec<String>> = tree_root_labels_with_usize
.into_iter()
.map(|(i, vec)| (i.to_string(), vec))
.collect();
let lo_edges: Vec<(String, String)> = self
.bdd
.nodes
.iter()
@ -1006,9 +1027,10 @@ impl Adf {
.filter(|(i, _)| node_indices.contains(i))
.filter(|(_, node)| !vec![Var::TOP, Var::BOT].contains(&node.var()))
.map(|(i, &node)| (i, node.lo().value()))
.map(|(i, v)| (i.to_string(), v.to_string()))
.collect();
let hi_edges: Vec<(usize, usize)> = self
let hi_edges: Vec<(String, String)> = self
.bdd
.nodes
.iter()
@ -1016,6 +1038,7 @@ impl Adf {
.filter(|(i, _)| node_indices.contains(i))
.filter(|(_, node)| !vec![Var::TOP, Var::BOT].contains(&node.var()))
.map(|(i, &node)| (i, node.hi().value()))
.map(|(i, v)| (i.to_string(), v.to_string()))
.collect();
log::debug!("{:?}", node_labels);

View File

@ -26,7 +26,7 @@ impl Default for VarContainer {
}
impl VarContainer {
pub(crate) fn from_parser(
pub fn from_parser(
names: Arc<RwLock<Vec<String>>>,
mapping: Arc<RwLock<HashMap<String, usize>>>,
) -> VarContainer {
@ -51,11 +51,11 @@ impl VarContainer {
.and_then(|name| name.get(var.value()).cloned())
}
pub(crate) fn names(&self) -> Arc<RwLock<Vec<String>>> {
pub fn names(&self) -> Arc<RwLock<Vec<String>>> {
Arc::clone(&self.names)
}
pub(crate) fn mappings(&self) -> Arc<RwLock<HashMap<String, usize>>> {
pub fn mappings(&self) -> Arc<RwLock<HashMap<String, usize>>> {
Arc::clone(&self.mapping)
}

View File

@ -13,7 +13,7 @@ use std::{cell::RefCell, cmp::min, collections::HashMap, fmt::Display};
/// Each roBDD is identified by its corresponding [`Term`], which implicitly identifies the root node of a roBDD.
#[derive(Debug, Serialize, Deserialize)]
pub struct Bdd {
pub(crate) nodes: Vec<BddNode>,
pub nodes: Vec<BddNode>,
#[cfg(feature = "variablelist")]
#[serde(skip)]
var_deps: Vec<HashSet<Var>>,
@ -100,6 +100,16 @@ impl Bdd {
RefCell::new(HashMap::new())
}
pub fn from_nodes(nodes: Vec<BddNode>) -> Self {
let mut bdd = Self::new();
for node in nodes {
bdd.node(node.var(), node.lo(), node.hi());
}
bdd
}
/// Instantiates a [variable][crate::datatypes::Var] and returns the representing roBDD as a [`Term`][crate::datatypes::Term].
pub fn variable(&mut self, var: Var) -> Term {
self.node(var, Term::BOT, Term::TOP)

View File

@ -16,7 +16,6 @@ adf_bdd = { version="0.3.1", path="../lib", features = ["frontend"] }
actix-web = "4"
actix-cors = "0.6"
actix-files = "0.6"
derive_more = "0.99.17"
env_logger = "0.9"
log = "0.4"
serde = "1"
@ -24,6 +23,7 @@ mongodb = "2.4.0"
actix-identity = "0.5.2"
argon2 = "0.5.0"
actix-session = { version="0.7.2", features = ["cookie-session"] }
names = "0.14.0"
[features]
cors_for_local_development = []

484
server/src/adf.rs Normal file
View File

@ -0,0 +1,484 @@
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
use actix_identity::Identity;
use actix_web::rt::task::spawn_blocking;
use actix_web::rt::time::timeout;
use actix_web::{post, put, web, HttpMessage, HttpRequest, HttpResponse, Responder};
use adf_bdd::datatypes::adf::VarContainer;
use adf_bdd::datatypes::{BddNode, Term, Var};
use mongodb::bson::doc;
use mongodb::bson::{to_bson, Bson};
use names::{Generator, Name};
use serde::{Deserialize, Serialize};
use adf_bdd::adf::{Adf, DoubleLabeledGraph};
use adf_bdd::adfbiodivine::Adf as BdAdf;
use adf_bdd::parser::AdfParser;
use crate::config::{AppState, RunningInfo, Task, ADF_COLL, COMPUTE_TIME, DB_NAME, USER_COLL};
use crate::user::{username_exists, User};
type Ac = Vec<Term>;
type AcDb = Vec<String>;
#[derive(Clone, Deserialize, Serialize)]
pub(crate) enum Parsing {
Naive,
Hybrid,
}
#[derive(Clone, PartialEq, Eq, Hash, Deserialize, Serialize)]
pub(crate) enum Strategy {
Ground,
Complete,
Stable,
StableCountingA,
StableCountingB,
StableNogood,
}
#[derive(Clone, Deserialize, Serialize)]
pub(crate) struct AcAndGraph {
pub(crate) ac: AcDb,
pub(crate) graph: DoubleLabeledGraph,
}
impl Into<Bson> for AcAndGraph {
fn into(self) -> Bson {
to_bson(&self).expect("Serialization should work")
}
}
type AcsAndGraphsOpt = Option<Vec<AcAndGraph>>;
#[derive(Default, Deserialize, Serialize)]
pub(crate) struct AcsPerStrategy {
pub(crate) parse_only: AcsAndGraphsOpt,
pub(crate) ground: AcsAndGraphsOpt,
pub(crate) complete: AcsAndGraphsOpt,
pub(crate) stable: AcsAndGraphsOpt,
pub(crate) stable_counting_a: AcsAndGraphsOpt,
pub(crate) stable_counting_b: AcsAndGraphsOpt,
pub(crate) stable_nogood: AcsAndGraphsOpt,
}
#[derive(Deserialize, Serialize)]
pub(crate) struct VarContainerDb {
names: Vec<String>,
mapping: HashMap<String, String>,
}
impl From<VarContainer> for VarContainerDb {
fn from(source: VarContainer) -> Self {
Self {
names: source.names().read().unwrap().clone(),
mapping: source
.mappings()
.read()
.unwrap()
.iter()
.map(|(k, v)| (k.clone(), v.to_string()))
.collect(),
}
}
}
impl From<VarContainerDb> for VarContainer {
fn from(source: VarContainerDb) -> Self {
Self::from_parser(
Arc::new(RwLock::new(source.names)),
Arc::new(RwLock::new(
source
.mapping
.into_iter()
.map(|(k, v)| (k, v.parse().unwrap()))
.collect(),
)),
)
}
}
#[derive(Deserialize, Serialize)]
pub(crate) struct BddNodeDb {
var: String,
lo: String,
hi: String,
}
impl From<BddNode> for BddNodeDb {
fn from(source: BddNode) -> Self {
Self {
var: source.var().0.to_string(),
lo: source.lo().0.to_string(),
hi: source.hi().0.to_string(),
}
}
}
impl From<BddNodeDb> for BddNode {
fn from(source: BddNodeDb) -> Self {
Self::new(
Var(source.var.parse().unwrap()),
Term(source.lo.parse().unwrap()),
Term(source.hi.parse().unwrap()),
)
}
}
type SimplifiedBdd = Vec<BddNodeDb>;
#[derive(Deserialize, Serialize)]
pub(crate) struct SimplifiedAdf {
pub(crate) ordering: VarContainerDb,
pub(crate) bdd: SimplifiedBdd,
pub(crate) ac: AcDb,
}
impl SimplifiedAdf {
fn from_lib_adf(adf: Adf) -> Self {
SimplifiedAdf {
ordering: adf.ordering.into(),
bdd: adf.bdd.nodes.into_iter().map(Into::into).collect(),
ac: adf.ac.into_iter().map(|t| t.0.to_string()).collect(),
}
}
}
#[derive(Deserialize, Serialize)]
pub(crate) struct AdfProblem {
pub(crate) name: String,
pub(crate) username: String,
pub(crate) code: String,
pub(crate) parsing_used: Parsing,
pub(crate) adf: SimplifiedAdf,
pub(crate) acs_per_strategy: AcsPerStrategy,
}
#[derive(Clone, Deserialize)]
struct AddAdfProblemBody {
name: Option<String>,
code: String,
parse_strategy: Parsing,
}
async fn adf_problem_exists(
adf_coll: &mongodb::Collection<AdfProblem>,
name: &str,
username: &str,
) -> bool {
adf_coll
.find_one(doc! { "name": name, "username": username }, None)
.await
.ok()
.flatten()
.is_some()
}
#[post("/add")]
async fn add_adf_problem(
req: HttpRequest,
app_state: web::Data<AppState>,
identity: Option<Identity>,
req_body: web::Json<AddAdfProblemBody>,
) -> impl Responder {
let adf_problem_input: AddAdfProblemBody = req_body.into_inner();
let adf_coll: mongodb::Collection<AdfProblem> = app_state
.mongodb_client
.database(DB_NAME)
.collection(ADF_COLL);
let user_coll: mongodb::Collection<User> = app_state
.mongodb_client
.database(DB_NAME)
.collection(USER_COLL);
let username = match identity.map(|id| id.id()) {
None => {
// Create and log in temporary user
let gen = Generator::with_naming(Name::Numbered);
let candidates = gen.take(10);
let mut name: Option<String> = None;
for candidate in candidates {
if name.is_some() {
continue;
}
if !(username_exists(&user_coll, &candidate).await) {
name = Some(candidate);
}
}
let username = match name {
Some(name) => name,
None => {
return HttpResponse::InternalServerError().body("Could not generate new name.")
}
};
match user_coll
.insert_one(
User {
username: username.clone(),
password: None,
},
None,
)
.await
{
Ok(_) => (),
Err(err) => return HttpResponse::InternalServerError().body(err.to_string()),
}
Identity::login(&req.extensions(), username.clone()).unwrap();
username
}
Some(Err(err)) => return HttpResponse::InternalServerError().body(err.to_string()),
Some(Ok(username)) => username,
};
let problem_name = match &adf_problem_input.name {
Some(name) => {
if adf_problem_exists(&adf_coll, name, &username).await {
return HttpResponse::Conflict()
.body("ADF Problem with that name already exists. Please pick another one!");
}
name.clone()
}
None => {
let gen = Generator::with_naming(Name::Numbered);
let candidates = gen.take(10);
let mut name: Option<String> = None;
for candidate in candidates {
if name.is_some() {
continue;
}
if !(adf_problem_exists(&adf_coll, &candidate, &username).await) {
name = Some(candidate);
}
}
match name {
Some(name) => name,
None => {
return HttpResponse::InternalServerError().body("Could not generate new name.")
}
}
}
};
let adf_problem_input_clone = adf_problem_input.clone();
let username_clone = username.clone();
let problem_name_clone = problem_name.clone();
let adf_res = timeout(
COMPUTE_TIME,
spawn_blocking(move || {
let running_info = RunningInfo {
username: username_clone,
adf_name: problem_name_clone,
task: Task::Parse,
};
app_state
.currently_running
.lock()
.unwrap()
.insert(running_info.clone());
let parser = AdfParser::default();
parser.parse()(&adf_problem_input_clone.code)
.map_err(|_| "ADF could not be parsed, double check your input!")?;
let lib_adf = match adf_problem_input_clone.parse_strategy {
Parsing::Naive => Adf::from_parser(&parser),
Parsing::Hybrid => {
let bd_adf = BdAdf::from_parser(&parser);
let naive_adf = bd_adf.hybrid_step_opt(false);
naive_adf
}
};
app_state
.currently_running
.lock()
.unwrap()
.remove(&running_info);
let ac_and_graph = AcAndGraph {
ac: lib_adf.ac.iter().map(|t| t.0.to_string()).collect(),
graph: lib_adf.into_double_labeled_graph(None),
};
Ok::<_, &str>((SimplifiedAdf::from_lib_adf(lib_adf), ac_and_graph))
}),
)
.await;
match adf_res {
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
Ok(Err(err)) => HttpResponse::InternalServerError().body(err.to_string()),
Ok(Ok(Err(err))) => HttpResponse::InternalServerError().body(err.to_string()),
Ok(Ok(Ok((adf, ac_and_graph)))) => {
let mut acs = AcsPerStrategy::default();
acs.parse_only = Some(vec![ac_and_graph]);
let adf_problem: AdfProblem = AdfProblem {
name: problem_name,
username,
code: adf_problem_input.code,
parsing_used: adf_problem_input.parse_strategy,
adf,
acs_per_strategy: acs,
};
let result = adf_coll.insert_one(&adf_problem, None).await;
match result {
Ok(_) => HttpResponse::Ok().json(adf_problem), // TODO: return name of problem here (especially since it may be generated)
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
}
}
}
}
#[derive(Deserialize)]
struct SolveAdfProblemBody {
strategy: Strategy,
}
#[put("/{problem_name}/solve")]
async fn solve_adf_problem(
app_state: web::Data<AppState>,
identity: Option<Identity>,
path: web::Path<String>,
req_body: web::Json<SolveAdfProblemBody>,
) -> impl Responder {
let problem_name = path.into_inner();
let adf_problem_input: SolveAdfProblemBody = req_body.into_inner();
let adf_coll: mongodb::Collection<AdfProblem> = app_state
.mongodb_client
.database(DB_NAME)
.collection(ADF_COLL);
let username = match identity.map(|id| id.id()) {
None => {
return HttpResponse::Unauthorized().body("You need to login to add an ADF problem.")
}
Some(Err(err)) => return HttpResponse::InternalServerError().body(err.to_string()),
Some(Ok(username)) => username,
};
let adf_problem = match adf_coll
.find_one(doc! { "name": &problem_name, "username": &username }, None)
.await
{
Err(err) => return HttpResponse::InternalServerError().body(err.to_string()),
Ok(None) => {
return HttpResponse::NotFound()
.body(format!("ADF problem with name {problem_name} not found."))
}
Ok(Some(prob)) => prob,
};
let has_been_solved = match adf_problem_input.strategy {
Strategy::Complete => adf_problem.acs_per_strategy.complete.is_some(),
Strategy::Ground => adf_problem.acs_per_strategy.ground.is_some(),
Strategy::Stable => adf_problem.acs_per_strategy.stable.is_some(),
Strategy::StableCountingA => adf_problem.acs_per_strategy.stable_counting_a.is_some(),
Strategy::StableCountingB => adf_problem.acs_per_strategy.stable_counting_b.is_some(),
Strategy::StableNogood => adf_problem.acs_per_strategy.stable_nogood.is_some(),
};
// NOTE: we could also return the result here instead of throwing an error but I think the canonical way should just be to call the get endpoint for the problem.
if has_been_solved {
return HttpResponse::Conflict()
.body("The ADF problem has already been solved with this strategy. You can just get the solution from the problem data directly.");
}
let username_clone = username.clone();
let problem_name_clone = problem_name.clone();
let strategy_clone = adf_problem_input.strategy.clone();
let acs_and_graphs_res = timeout(
COMPUTE_TIME,
spawn_blocking(move || {
let running_info = RunningInfo {
username: username_clone,
adf_name: problem_name_clone,
task: Task::Solve(strategy_clone.clone()),
};
app_state
.currently_running
.lock()
.unwrap()
.insert(running_info.clone());
let mut adf: Adf = Adf::from_ord_nodes_and_ac(
adf_problem.adf.ordering.into(),
adf_problem.adf.bdd.into_iter().map(Into::into).collect(),
adf_problem
.adf
.ac
.into_iter()
.map(|t| Term(t.parse().unwrap()))
.collect(),
);
let acs: Vec<Ac> = match strategy_clone {
Strategy::Complete => adf.complete().collect(),
Strategy::Ground => vec![adf.grounded()],
Strategy::Stable => adf.stable().collect(),
// TODO: INPUT VALIDATION: only allow this for hybrid parsing
Strategy::StableCountingA => adf.stable_count_optimisation_heu_a().collect(),
// TODO: INPUT VALIDATION: only allow this for hybrid parsing
Strategy::StableCountingB => adf.stable_count_optimisation_heu_b().collect(),
// TODO: support more than just default heuristics
Strategy::StableNogood => adf
.stable_nogood(adf_bdd::adf::heuristics::Heuristic::default())
.collect(),
};
let acs_and_graphs: Vec<AcAndGraph> = acs
.iter()
.map(|ac| AcAndGraph {
ac: ac.iter().map(|t| t.0.to_string()).collect(),
graph: adf.into_double_labeled_graph(Some(ac)),
})
.collect();
app_state
.currently_running
.lock()
.unwrap()
.remove(&running_info);
acs_and_graphs
}),
)
.await;
match acs_and_graphs_res {
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
Ok(Err(err)) => HttpResponse::InternalServerError().body(err.to_string()),
Ok(Ok(acs_and_graphs)) => {
let result = adf_coll.update_one(doc! { "name": &problem_name, "username": &username }, match adf_problem_input.strategy {
Strategy::Complete => doc! { "$set": { "acs_per_strategy.complete": Some(&acs_and_graphs) } },
Strategy::Ground => doc! { "$set": { "acs_per_strategy.ground": Some(&acs_and_graphs) } },
Strategy::Stable => doc! { "$set": { "acs_per_strategy.stable": Some(&acs_and_graphs) } },
Strategy::StableCountingA => doc! { "$set": { "acs_per_strategy.stable_counting_a": Some(&acs_and_graphs) } },
Strategy::StableCountingB => doc! { "$set": { "acs_per_strategy.stable_counting_b": Some(&acs_and_graphs) } },
Strategy::StableNogood => doc! { "$set": { "acs_per_strategy.stable_nogood": Some(&acs_and_graphs) } },
}, None).await;
match result {
Ok(_) => HttpResponse::Ok().json(acs_and_graphs),
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
}
}
}
}

35
server/src/config.rs Normal file
View File

@ -0,0 +1,35 @@
use std::collections::HashSet;
use std::sync::Mutex;
use std::time::Duration;
use mongodb::Client;
use crate::adf::Strategy;
pub(crate) const COOKIE_DURATION: actix_web::cookie::time::Duration =
actix_web::cookie::time::Duration::minutes(30);
pub(crate) const COMPUTE_TIME: Duration = Duration::from_secs(120);
pub(crate) const ASSET_DIRECTORY: &str = "./assets";
pub(crate) const DB_NAME: &str = "adf-obdd";
pub(crate) const USER_COLL: &str = "users";
pub(crate) const ADF_COLL: &str = "adf-problems";
#[derive(Clone, PartialEq, Eq, Hash)]
pub(crate) enum Task {
Parse,
Solve(Strategy),
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub(crate) struct RunningInfo {
pub(crate) username: String,
pub(crate) adf_name: String,
pub(crate) task: Task,
}
pub(crate) struct AppState {
pub(crate) mongodb_client: Client,
pub(crate) currently_running: Mutex<HashSet<RunningInfo>>,
}

View File

@ -1,292 +1,25 @@
use std::time::Duration;
use std::collections::HashSet;
use std::sync::Mutex;
use actix_files as fs;
use actix_identity::{Identity, IdentityMiddleware};
use actix_identity::IdentityMiddleware;
use actix_session::config::PersistentSession;
use actix_session::storage::CookieSessionStore;
use actix_session::SessionMiddleware;
use actix_web::cookie::Key;
use actix_web::rt::task::spawn_blocking;
use actix_web::rt::time::timeout;
use actix_web::{
delete, post, web, App, HttpMessage, HttpRequest, HttpResponse, HttpServer, Responder,
ResponseError,
};
use adf_bdd::datatypes::Term;
use argon2::password_hash::rand_core::OsRng;
use argon2::password_hash::SaltString;
use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier};
use mongodb::results::DeleteResult;
use mongodb::{bson::doc, options::IndexOptions, Client, IndexModel};
use serde::{Deserialize, Serialize};
use derive_more::{Display, Error};
use actix_web::{web, App, HttpServer};
use mongodb::Client;
#[cfg(feature = "cors_for_local_development")]
use actix_cors::Cors;
use adf_bdd::adf::{Adf, DoubleLabeledGraph};
use adf_bdd::adfbiodivine::Adf as BdAdf;
use adf_bdd::parser::AdfParser;
mod adf;
mod config;
mod user;
const THIRTY_MINUTES: actix_web::cookie::time::Duration =
actix_web::cookie::time::Duration::minutes(30);
const ASSET_DIRECTORY: &str = "./assets";
const DB_NAME: &str = "adf-obdd";
const USER_COLL: &str = "users";
const ADF_COLL: &str = "adf-problems";
#[derive(Deserialize, Serialize)]
struct User {
username: String,
password: String,
}
// Creates an index on the "username" field to force the values to be unique.
async fn create_username_index(client: &Client) {
let options = IndexOptions::builder().unique(true).build();
let model = IndexModel::builder()
.keys(doc! { "username": 1 })
.options(options)
.build();
client
.database(DB_NAME)
.collection::<User>(USER_COLL)
.create_index(model, None)
.await
.expect("creating an index should succeed");
}
// Add new user
#[post("/register")]
async fn register(client: web::Data<Client>, user: web::Json<User>) -> impl Responder {
let mut user: User = user.into_inner();
let user_coll = client.database(DB_NAME).collection(USER_COLL);
let user_exists: bool = user_coll
.find_one(doc! { "username": &user.username }, None)
.await
.ok()
.flatten()
.is_some();
if user_exists {
return HttpResponse::Conflict()
.body("Username is already taken. Please pick another one!");
}
let pw = &user.password;
let salt = SaltString::generate(&mut OsRng);
let hashed_pw = Argon2::default()
.hash_password(pw.as_bytes(), &salt)
.expect("Error while hashing password!")
.to_string();
user.password = hashed_pw;
let result = user_coll.insert_one(user, None).await;
match result {
Ok(_) => HttpResponse::Ok().body("Registration successful!"),
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
}
}
// Remove user
#[delete("/delete")]
async fn delete_account(client: web::Data<Client>, identity: Option<Identity>) -> impl Responder {
let user_coll: mongodb::Collection<User> = client.database(DB_NAME).collection(USER_COLL);
match identity.map(|id| id.id()) {
None => HttpResponse::Unauthorized().body("You need to login to delete your account."),
Some(Err(err)) => HttpResponse::InternalServerError().body(err.to_string()),
Some(Ok(username)) => {
match user_coll
.delete_one(doc! { "username": username }, None)
.await
{
Ok(DeleteResult {
deleted_count: 0, ..
}) => HttpResponse::InternalServerError().body("Account could not be deleted."),
Ok(DeleteResult {
deleted_count: 1, ..
}) => HttpResponse::Ok().body("Account deleted."),
Ok(_) => unreachable!(
"delete_one removes at most one entry so all cases are covered already"
),
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
}
}
}
}
// Login
#[post("/login")]
async fn login(
req: HttpRequest,
client: web::Data<Client>,
user_data: web::Json<User>,
) -> impl Responder {
let username = &user_data.username;
let pw = &user_data.password;
let user_coll: mongodb::Collection<User> = client.database(DB_NAME).collection(USER_COLL);
match user_coll
.find_one(doc! { "username": username }, None)
.await
{
Ok(Some(user)) => {
let stored_hash = PasswordHash::new(&user.password).unwrap();
let pw_valid = Argon2::default()
.verify_password(pw.as_bytes(), &stored_hash)
.is_ok();
if pw_valid {
Identity::login(&req.extensions(), username.to_string()).unwrap();
HttpResponse::Ok().body("Login successful!")
} else {
HttpResponse::BadRequest().body("Invalid email or password")
}
}
Ok(None) => HttpResponse::NotFound().body(format!(
"No user found with username {}",
&user_data.username
)),
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
}
}
#[delete("/logout")]
async fn logout(id: Identity) -> impl Responder {
id.logout();
HttpResponse::Ok().body("Logout successful!")
}
type Ac = Option<Vec<Term>>;
#[derive(Deserialize, Serialize)]
enum Parsing {
Naive,
Hybrid,
}
#[derive(Deserialize, Serialize)]
enum Strategy {
ParseOnly,
Ground,
Complete,
Stable,
StableCountingA,
StableCountingB,
StableNogood,
}
#[derive(Serialize)]
struct AcsPerStrategy {
parse_only: Ac,
ground: Ac,
complete: Ac,
stable: Ac,
stable_counting_a: Ac,
stable_counting_b: Ac,
stable_nogood: Ac,
}
#[derive(Serialize)]
struct AdfProblem {
code: String,
parsing_used: Parsing,
adf: Adf,
acs_per_strategy: AcsPerStrategy,
}
// #[get("/")]
// fn index() -> impl Responder {
// }
#[derive(Deserialize)]
struct SolveReqBody {
code: String,
parsing: Parsing,
strategy: Strategy,
}
fn solve(req_body: web::Json<SolveReqBody>) -> impl Responder {
let input = &req_body.code;
let parsing = &req_body.parsing;
let strategy = &req_body.strategy;
let parser = AdfParser::default();
match parser.parse()(input) {
Ok(_) => log::info!("[Done] parsing"),
Err(e) => {
log::error!("Error during parsing:\n{} \n\n cannot continue, panic!", e);
panic!("Parsing failed, see log for further details")
}
}
let mut adf = match parsing {
Parsing::Naive => Adf::from_parser(&parser),
Parsing::Hybrid => {
let bd_adf = BdAdf::from_parser(&parser);
log::info!("[Start] translate into naive representation");
let naive_adf = bd_adf.hybrid_step_opt(false);
log::info!("[Done] translate into naive representation");
naive_adf
}
};
log::debug!("{:?}", adf);
let acs: Vec<Ac> = match strategy {
Strategy::ParseOnly => vec![None],
Strategy::Ground => vec![Some(adf.grounded())],
Strategy::Complete => adf.complete().map(Some).collect(),
Strategy::Stable => adf.stable().map(Some).collect(),
// TODO: INPUT VALIDATION: only allow this for hybrid parsing
Strategy::StableCountingA => adf.stable_count_optimisation_heu_a().map(Some).collect(),
// TODO: INPUT VALIDATION: only allow this for hybrid parsing
Strategy::StableCountingB => adf.stable_count_optimisation_heu_b().map(Some).collect(),
// TODO: support more than just default heuristics
Strategy::StableNogood => adf
.stable_nogood(adf_bdd::adf::heuristics::Heuristic::default())
.map(Some)
.collect(),
};
let dto: Vec<DoubleLabeledGraph> = acs
.iter()
.map(|ac| adf.into_double_labeled_graph(ac.as_ref()))
.collect();
web::Json(dto)
}
#[derive(Debug, Display, Error)]
#[display(
fmt = "Endpoint {} timed out. Probably your ADF problem is too complicated :(",
endpoint
)]
struct Timeout {
endpoint: &'static str,
}
impl ResponseError for Timeout {}
#[post("/solve")]
async fn solve_with_timeout(req_body: web::Json<SolveReqBody>) -> impl Responder {
timeout(Duration::from_secs(20), spawn_blocking(|| solve(req_body)))
.await
.map(|ok| {
ok.expect(
"An error in the spawned solve thread occurred. Timeouts are handled separately.",
)
})
.map_err(|_| Timeout { endpoint: "/solve" })
}
use adf::{add_adf_problem, solve_adf_problem};
use config::{AppState, ASSET_DIRECTORY, COOKIE_DURATION};
use user::{create_username_index, delete_account, login, logout, register};
#[actix_web::main]
async fn main() -> std::io::Result<()> {
@ -323,25 +56,32 @@ async fn main() -> std::io::Result<()> {
#[cfg(not(feature = "cors_for_local_development"))]
let cookie_secure = true;
app.app_data(web::Data::new(client.clone()))
.wrap(IdentityMiddleware::default())
.wrap(
SessionMiddleware::builder(CookieSessionStore::default(), secret_key.clone())
.cookie_name("adf-obdd-service-auth".to_owned())
.cookie_secure(cookie_secure)
.session_lifecycle(PersistentSession::default().session_ttl(THIRTY_MINUTES))
.build(),
)
.service(
web::scope("/users")
.service(register)
.service(delete_account)
.service(login)
.service(logout),
)
.service(solve_with_timeout)
// this mus be last to not override anything
.service(fs::Files::new("/", ASSET_DIRECTORY).index_file("index.html"))
app.app_data(web::Data::new(AppState {
mongodb_client: client.clone(),
currently_running: Mutex::new(HashSet::new()),
}))
.wrap(IdentityMiddleware::default())
.wrap(
SessionMiddleware::builder(CookieSessionStore::default(), secret_key.clone())
.cookie_name("adf-obdd-service-auth".to_owned())
.cookie_secure(cookie_secure)
.session_lifecycle(PersistentSession::default().session_ttl(COOKIE_DURATION))
.build(),
)
.service(
web::scope("/users")
.service(register)
.service(delete_account)
.service(login)
.service(logout),
)
.service(
web::scope("/adf")
.service(add_adf_problem)
.service(solve_adf_problem),
)
// this mus be last to not override anything
.service(fs::Files::new("/", ASSET_DIRECTORY).index_file("index.html"))
})
.bind(("0.0.0.0", 8080))?
.run()

217
server/src/user.rs Normal file
View File

@ -0,0 +1,217 @@
use actix_identity::Identity;
use actix_web::{delete, post, web, HttpMessage, HttpRequest, HttpResponse, Responder};
use argon2::password_hash::rand_core::OsRng;
use argon2::password_hash::SaltString;
use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier};
use mongodb::results::DeleteResult;
use mongodb::{bson::doc, options::IndexOptions, Client, IndexModel};
use serde::{Deserialize, Serialize};
use crate::adf::AdfProblem;
use crate::config::{AppState, ADF_COLL, DB_NAME, USER_COLL};
#[derive(Deserialize, Serialize)]
pub(crate) struct User {
pub(crate) username: String,
pub(crate) password: Option<String>, // NOTE: Password being None indicates a temporary user
}
#[derive(Deserialize, Serialize)]
struct UserPayload {
username: String,
password: String,
}
// Creates an index on the "username" field to force the values to be unique.
pub(crate) async fn create_username_index(client: &Client) {
let options = IndexOptions::builder().unique(true).build();
let model = IndexModel::builder()
.keys(doc! { "username": 1 })
.options(options)
.build();
client
.database(DB_NAME)
.collection::<User>(USER_COLL)
.create_index(model, None)
.await
.expect("creating an index should succeed");
}
pub(crate) async fn username_exists(user_coll: &mongodb::Collection<User>, username: &str) -> bool {
user_coll
.find_one(doc! { "username": username }, None)
.await
.ok()
.flatten()
.is_some()
}
// Add new user
#[post("/register")]
async fn register(app_state: web::Data<AppState>, user: web::Json<UserPayload>) -> impl Responder {
let mut user: UserPayload = user.into_inner();
let user_coll = app_state
.mongodb_client
.database(DB_NAME)
.collection(USER_COLL);
if username_exists(&user_coll, &user.username).await {
return HttpResponse::Conflict()
.body("Username is already taken. Please pick another one!");
}
let pw = &user.password;
let salt = SaltString::generate(&mut OsRng);
let hashed_pw = Argon2::default()
.hash_password(pw.as_bytes(), &salt)
.expect("Error while hashing password!")
.to_string();
user.password = hashed_pw;
let result = user_coll
.insert_one(
User {
username: user.username,
password: Some(user.password),
},
None,
)
.await;
match result {
Ok(_) => HttpResponse::Ok().body("Registration successful!"),
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
}
}
// Remove user
#[delete("/delete")]
async fn delete_account(
app_state: web::Data<AppState>,
identity: Option<Identity>,
) -> impl Responder {
let user_coll: mongodb::Collection<User> = app_state
.mongodb_client
.database(DB_NAME)
.collection(USER_COLL);
let adf_coll: mongodb::Collection<AdfProblem> = app_state
.mongodb_client
.database(DB_NAME)
.collection(ADF_COLL);
match identity.map(|id| id.id()) {
None => HttpResponse::Unauthorized().body("You need to login to delete your account."),
Some(Err(err)) => HttpResponse::InternalServerError().body(err.to_string()),
Some(Ok(username)) => {
// Delete all adfs created by user
match adf_coll
.delete_many(doc! { "username": &username }, None)
.await
{
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
Ok(DeleteResult {
deleted_count: 0, ..
}) => HttpResponse::InternalServerError().body("Account could not be deleted."),
Ok(DeleteResult {
deleted_count: _, ..
}) => {
// Delete actual user
match user_coll
.delete_one(doc! { "username": &username }, None)
.await
{
Ok(DeleteResult {
deleted_count: 0, ..
}) => HttpResponse::InternalServerError()
.body("Account could not be deleted."),
Ok(DeleteResult {
deleted_count: 1, ..
}) => HttpResponse::Ok().body("Account deleted."),
Ok(_) => unreachable!(
"delete_one removes at most one entry so all cases are covered already"
),
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
}
}
}
}
}
}
// Login
#[post("/login")]
async fn login(
req: HttpRequest,
app_state: web::Data<AppState>,
user_data: web::Json<UserPayload>,
) -> impl Responder {
let username = &user_data.username;
let pw = &user_data.password;
let user_coll: mongodb::Collection<User> = app_state
.mongodb_client
.database(DB_NAME)
.collection(USER_COLL);
match user_coll
.find_one(doc! { "username": username }, None)
.await
{
Ok(Some(user)) => {
let stored_password = match &user.password {
None => return HttpResponse::BadRequest().body("Invalid email or password"), // NOTE: login as tremporary user is not allowed
Some(password) => password,
};
let stored_hash = PasswordHash::new(stored_password).unwrap();
let pw_valid = Argon2::default()
.verify_password(pw.as_bytes(), &stored_hash)
.is_ok();
if pw_valid {
Identity::login(&req.extensions(), username.to_string()).unwrap();
HttpResponse::Ok().body("Login successful!")
} else {
HttpResponse::BadRequest().body("Invalid email or password")
}
}
Ok(None) => HttpResponse::NotFound().body(format!(
"No user found with username {}",
&user_data.username
)),
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
}
}
#[delete("/logout")]
async fn logout(app_state: web::Data<AppState>, id: Option<Identity>) -> impl Responder {
let user_coll: mongodb::Collection<User> = app_state
.mongodb_client
.database(DB_NAME)
.collection(USER_COLL);
match id {
None => HttpResponse::Unauthorized().body("You are not logged in."),
Some(id) => match id.id() {
Err(err) => HttpResponse::InternalServerError().body(err.to_string()),
Ok(username) => {
let user: User = match user_coll
.find_one(doc! { "username": &username }, None)
.await
{
Ok(Some(user)) => user,
Ok(None) => {
return HttpResponse::NotFound()
.body(format!("No user found with username {}", &username))
}
Err(err) => return HttpResponse::InternalServerError().body(err.to_string()),
};
if user.password.is_none() {
HttpResponse::BadRequest().body("You are logged in as a temporary user so we won't log you out because you will not be able to login again. If you want to be able to login again, set a password. Otherwise your session will expire automatically at a certain point.")
} else {
id.logout();
HttpResponse::Ok().body("Logout successful!")
}
}
},
}
}