Commit efaea921 authored by Ryan Wisnesky's avatar Ryan Wisnesky
Browse files

add pivot

parent 520e2496
......@@ -346,6 +346,11 @@ Note: AQL now requires mapping literals to be a list of mappings $m_1, \ldots$ o
\subsection{{\tt options}}
Allowable options are {\tt timeout} and {\tt dont\_validate\_unsafe}, which disables checking that equations preserved.
\section{{\tt pivot <instance> <options>}}
Computes the schema mapping $pivot(I) \to I$.
\chapter{Kind {\tt query}}
A query $S \to T$ assigns to each entity $en$ in $S$ a (``frozen'') instance on $T$, which we will write as $[en]$, and to each foreign key $fk : e \to e'$ a transform $[fk] : [e'] \to [e]$ (note the reversal), and to each attribute $att : e \to \tau$ a term of type $\tau$ in context $[e]$.
......@@ -380,6 +385,10 @@ A {\tt <instance>, ctx(<attribute>, <term>)}, i.e., a single from/where/return (
The expression ${\tt getMapping} \ C \ X$ gets the mapping $X \to C$ associated with schema $X$ in the colimit of schemas $C$.
\section{{\tt pivot <instance> <options>}}
Computes the 'schema of elements' for an instance $I$. Each row in $I$ is an entity in $pivot(I)$.
\chapter{Kind {\tt constraints}}
\section{{\tt literal : <schema>}}
......@@ -573,6 +582,11 @@ If the chase succeeds, the result instance will satisfy the constraints. The op
Allowable options are the same as for an instance.
\section{{\tt pivot <instance> <options>}}
Computes the canonical instance $J$ associated with 'schema of elements' for an instance $I$. Each row in $I$ is an entity in $pivot(I)$, and we have a mapping $pivot(I) : pivot(I) \to I$ as well. This instance $J$ has the property that $\Sigma_{pivot(I)}(J)=I$.
\section{{\tt random : <schema>}}
Constructs a random instance with the specified number of generators per entity or type. Then for each generator $e:s$ and fk/att $f : s \to t$, it adds equation $f(e) = y$, where $y:t$ is a randomly selected generator (or no equation if t has no generators).
......@@ -941,6 +955,12 @@ Applies only to weakly orthogonal theories. Interprets all equations $p = q$ as
Interprets all equations $p = q$ as rewrite rules $p \to q$ regardless of termination behavior. Can diverge.
\subsubsection{{\tt program\_allow\_nonconfluence\_unsafe}}
Interprets all equations $p = q$ as rewrite rules $p \to q$ regardless of confluence. Can diverge.
\subsection{{\tt completion}}
Applies unfailing (ordered) Knuth-Bendix completion specialized to lexicographic path ordering. If no completion precedence is given, attempts to infer a precedence using constraint-satisfaction techniques.
......
......@@ -42,8 +42,9 @@ public final class AqlOptions {
public enum AqlOption {
quotient_use_chase,
chase_style,
maedmax_allow_empty_sorts_unsafe,
allow_empty_sorts_unsafe,
maedmax_path,
program_allow_nonconfluence_unsafe,
gui_sample,
gui_sample_size,
import_dont_check_closure_unsafe,
......@@ -202,6 +203,8 @@ public final class AqlOptions {
//@SuppressWarnings("static-method")
private static Object getDefault(AqlOption option) {
switch (option) {
case program_allow_nonconfluence_unsafe:
return false;
case quotient_use_chase:
return true;
case jdbc_no_distinct_unsafe:
......@@ -332,7 +335,7 @@ public final class AqlOptions {
return 16*1024;
case maedmax_path:
return "/home/ryan/maedmax/maedmax";
case maedmax_allow_empty_sorts_unsafe:
case allow_empty_sorts_unsafe:
return false;
case chase_style:
return "parallel";
......@@ -383,6 +386,8 @@ public final class AqlOptions {
private static Object getFromMap(Map<String, String> map, Collage<Ty, En, Sym, Fk, Att, Gen, Sk> col, AqlOption op) {
switch (op) {
case program_allow_nonconfluence_unsafe:
return op.getBoolean(map);
case quotient_use_chase:
return op.getBoolean(map);
case jdbc_query_export_convert_type:
......@@ -509,7 +514,7 @@ public final class AqlOptions {
return op.getInteger(map);
case maedmax_path:
return op.getString(map);
case maedmax_allow_empty_sorts_unsafe:
case allow_empty_sorts_unsafe:
return op.getBoolean(map);
case chase_style:
return op.getString(map);
......
package catdata.aql;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import catdata.Chc;
import catdata.Ctx;
import catdata.Pair;
import catdata.Triple;
import catdata.Util;
import catdata.aql.AqlOptions.AqlOption;
import catdata.aql.It.ID;
import catdata.aql.exp.SchExpRaw.Att;
import catdata.aql.exp.SchExpRaw.En;
import catdata.aql.exp.SchExpRaw.Fk;
import catdata.aql.fdm.InitialAlgebra;
import catdata.aql.fdm.LiteralInstance;
public class AqlPivot<Ty, En0, Sym, Fk0, Att0, Gen, Sk, X, Y> {
public final Instance<Ty, En0, Sym, Fk0, Att0, Gen, Sk, X, Y> I;
public final Schema<Ty, En, Sym, Fk, Att> intI;
public final Mapping<Ty, En, Sym, Fk, Att, En0, Fk0, Att0> F;
public Instance<Ty, En, Sym, Fk, Att, X, Y, ID, Chc<Y, Pair<ID, Att>>> J;
public AqlPivot(Instance<Ty, En0, Sym, Fk0, Att0, Gen, Sk, X, Y> i, AqlOptions strat) {
I = i;
Set<En> ens = new HashSet<>();
Map<Att, Pair<En, Ty>> atts = new HashMap<>();
Map<Fk, Pair<En, En>> fks = new HashMap<>();
Map<En, En0> ens0 = new HashMap<>();
Map<Att, Triple<Var, En0, Term<Ty, En0, Sym, Fk0, Att0, Void, Void>>> atts0 = new HashMap<>();
Map<Fk, Pair<En0, List<Fk0>>> fks0 = new HashMap<>();
Set<Pair<Term<Ty, En, Sym, Fk, Att, X, Y>, Term<Ty, En, Sym, Fk, Att, X, Y>>>
eqs0 = new HashSet<>();
Collage<Ty, En, Sym, Fk, Att, X, Y> col = new Collage<>();
col.addAll(I.algebra().talg().convert());
for (En0 en : I.schema().ens) {
for (X x0 : I.algebra().en(en)) {
String x = x0.toString();
ens.add(new En(x));
ens0.put(new En(x), en);
col.gens.put(x0, new En(x));
for (Att0 att : I.schema().attsFrom(en)) {
Att xxx = new Att(new En(x),att.toString());
atts.put(xxx, new Pair<>(new En(x),I.schema().atts.get(att).second));
atts0.put(xxx, new Triple<>(new Var("x"), en, Term.Att(att, Term.Var(new Var("x")))));
Term<Ty, En, Sym, Fk, Att, X, Y>
l = Term.Att(xxx, Term.Gen(x0));
Term<Ty, En, Sym, Fk, Att, X, Y>
r = I.algebra().att(att, x0).convert();
col.eqs.add(new Eq<Ty, En, Sym, Fk, Att, X, Y>(new Ctx<>(), l, r));
eqs0.add(new Pair<>(l,r));
}
for (Fk0 fk : I.schema().fksFrom(en)) {
Fk xxx = new Fk(new En(x),fk.toString());
fks.put(xxx, new Pair<>(new En(x), new En(I.algebra().fk(fk, x0).toString())));
fks0.put(xxx, new Pair<>(en,Util.singList(fk)));
Term<Ty, En, Sym, Fk, Att, X, Y>
l = Term.Fk(xxx, Term.Gen(x0));
Term<Ty, En, Sym, Fk, Att, X, Y>
r = Term.Gen(I.algebra().fk(fk, x0));
col.eqs.add(new Eq<Ty, En, Sym, Fk, Att, X, Y>(new Ctx<>(), l, r));
eqs0.add(new Pair<>(l,r));
}
}
}
for (Y y : I.algebra().talg().sks.map.keySet()) {
Term<Ty, En, Sym, Fk, Att, X, Y> l = Term.Sk(y);
Term<Ty, En, Sym, Fk, Att, X, Y> r = foo(I.algebra().reprT(Term.Sk(y)));
col.eqs.add(new Eq<Ty, En, Sym, Fk, Att, X, Y>(new Ctx<>(), l, r));
eqs0.add(new Pair<>(l,r));
}
DP<Ty, En, Sym, Fk, Att, Void, Void> dp = new DP<>() {
@Override
public String toStringProver() {
return "Pivot prover";
}
@Override
public boolean eq(Ctx<Var, Chc<Ty, En>> ctx, Term<Ty, En, Sym, Fk, Att, Void, Void> lhs,
Term<Ty, En, Sym, Fk, Att, Void, Void> rhs) {
return lhs.equals(rhs);
}
};
intI = new Schema<>(I.schema().typeSide, ens , atts, fks, new HashSet<>(), dp, I.allowUnsafeJava());
F = new Mapping<Ty, En, Sym, Fk, Att, En0, Fk0, Att0>(ens0, atts0, fks0, intI, I.schema(), false);
col.addAll(intI.collage());
InitialAlgebra<Ty, En, Sym, Fk, Att, X, Y, ID> initial = new InitialAlgebra<>(strat, intI, col, new It(),
Object::toString, Object::toString);
J = new LiteralInstance<Ty, En, Sym, Fk, Att, X, Y, ID, Chc<Y, Pair<ID, Att>>>
(intI, col.gens.map, col.sks.map, eqs0, initial.dp(), initial,
(Boolean) strat.getOrDefault(AqlOption.require_consistency),
(Boolean) strat.getOrDefault(AqlOption.allow_java_eqs_unsafe));
J.validate();
}
private X bar(Term<Void, En0, Void, Fk0, Void, Gen, Void> t) {
if (t.gen != null) {
return I.algebra().gen(t.gen);
} else if (t.fk != null) {
X x = bar(t.arg);
return I.algebra().fk(t.fk, x);
}
return Util.anomaly();
}
private Term<Ty, En, Sym, Fk, Att, X, Y> foo(Term<Ty, En0, Sym, Fk0, Att0, Gen, Sk> t) {
if (t.obj != null) {
return Term.Obj(t.obj, t.ty);
} else if (t.sym != null) {
List<Term<Ty, En, Sym, Fk, Att, X, Y>> l = new LinkedList<>();
for (Term<Ty, En0, Sym, Fk0, Att0, Gen, Sk> s : t.args) {
l.add(foo(s));
}
return Term.Sym(t.sym, l);
} else if (t.sk != null) {
return I.algebra().sk(t.sk).convert();
} else if (t.att != null) {
X x = bar(t.arg.convert());
return Term.Att(new Att(new En(x.toString()),t.att.toString()), Term.Gen(x));
}
return Util.anomaly();
}
}
......@@ -51,6 +51,8 @@ public class AqlProver {
return new KBtoDP<>(js, col1.simplify().second, new CongruenceProver<>(col1.simplify().first.toKB()));
case program:
boolean check = !(Boolean) ops.getOrDefault(AqlOption.dont_verify_is_appropriate_for_prover_unsafe);
boolean check2 = !(Boolean) ops.getOrDefault(AqlOption.program_allow_nonconfluence_unsafe);
check = check && check2;
boolean allowNonTerm = (Boolean) ops.getOrDefault(AqlOption.program_allow_nontermination_unsafe);
try {
if (!allowNonTerm) {
......@@ -60,13 +62,15 @@ public class AqlProver {
throw new RuntimeException(ex.getMessage() + "\n\nPossible solution: add options program_allow_nontermination_unsafe=true, or prover=completion");
}
return new KBtoDP<>(js, col1.simplify().second, new ProgramProver<>(check, Var.it, col1.simplify().first.toKB())); // use
//return new KBtoDP<>(js, col1.simplify().second, new ProgramProver<>(check, Var.it, col1.simplify().first.toKB())); // use
case completion:
return new KBtoDP<>(js, col1.simplify().second, new CompletionProver<>(col1.toKB().syms.keySet(), ops, col1.simplify().first));
case monoidal:
return new MonoidalFreeDP<>(js, col1.simplify().second, col1.simplify().first); // use // simplified
case maedmax:
String exePath = (String) ops.getOrDefault(AqlOption.maedmax_path);
Boolean b = (Boolean) ops.getOrDefault(AqlOption.maedmax_allow_empty_sorts_unsafe);
Boolean b = (Boolean) ops.getOrDefault(AqlOption.allow_empty_sorts_unsafe);
return new KBtoDP<>(js, col1.simplify().second, new MaedmaxProver<>(exePath, col1.simplify().first.toKB(), b, timeout)); // use
default:
......
......@@ -222,7 +222,8 @@ public abstract class Instance<Ty, En, Sym, Fk, Att, Gen, Sk, X, Y> implements S
@Override
public String toString() {
return toString("generators", "equations");
}
}
......
......@@ -39,7 +39,6 @@ import catdata.aql.exp.EdsExp.EdsExpVar;
import catdata.aql.exp.EdsExpRaw.EdExpRaw;
import catdata.aql.exp.GraphExp.GraphExpRaw;
import catdata.aql.exp.GraphExp.GraphExpVar;
import catdata.aql.exp.InstExpQueryQuotient;
import catdata.aql.exp.InstExp.InstExpAnonymize;
import catdata.aql.exp.InstExp.InstExpChase;
import catdata.aql.exp.InstExp.InstExpCoEq;
......@@ -55,11 +54,13 @@ import catdata.aql.exp.InstExp.InstExpEmpty;
import catdata.aql.exp.InstExp.InstExpEval;
import catdata.aql.exp.InstExp.InstExpFrozen;
import catdata.aql.exp.InstExp.InstExpPi;
import catdata.aql.exp.InstExp.InstExpPivot;
import catdata.aql.exp.InstExp.InstExpSigma;
import catdata.aql.exp.InstExp.InstExpSigmaChase;
import catdata.aql.exp.InstExp.InstExpVar;
import catdata.aql.exp.MapExp.MapExpComp;
import catdata.aql.exp.MapExp.MapExpId;
import catdata.aql.exp.MapExp.MapExpPivot;
import catdata.aql.exp.MapExp.MapExpVar;
import catdata.aql.exp.PragmaExp.PragmaExpCheck;
import catdata.aql.exp.PragmaExp.PragmaExpConsistent;
......@@ -84,6 +85,7 @@ import catdata.aql.exp.QueryExpRaw.Trans;
import catdata.aql.exp.SchExp.SchExpCod;
import catdata.aql.exp.SchExp.SchExpEmpty;
import catdata.aql.exp.SchExp.SchExpInst;
import catdata.aql.exp.SchExp.SchExpPivot;
import catdata.aql.exp.SchExp.SchExpVar;
import catdata.aql.exp.SchExpRaw.Att;
import catdata.aql.exp.SchExpRaw.En;
......@@ -199,10 +201,12 @@ public class CombinatorParser implements IAqlParser {
Parser<SchExp<?, ?, ?, ?, ?>>
var = ident.map(SchExpVar::new),
empty = Parsers.tuple(token("empty"), token(":"), ty_ref.get()).map(x -> new SchExpEmpty<>(x.c)),
pivot = Parsers.tuple(token("pivot"), inst_ref.lazy(), options.between(token("{"), token("}")).optional()).map(x -> new SchExpPivot<>(x.b, x.c == null ? new LinkedList<>() : x.c)),
inst = Parsers.tuple(token("schemaOf"), inst_ref.lazy()).map(x -> new SchExpInst<>(x.b)),
colim = Parsers.tuple(token("getSchema"), colim_ref.lazy()).map(x -> new SchExpColim<>(x.b)),
cod = Parsers.tuple(token("dst"), query_ref.lazy()).map(x -> new SchExpCod<>(x.b)),
ret = Parsers.or(inst, empty, schExpRaw(), var, colim, parens(sch_ref), cod);
ret = Parsers.or(inst, empty, schExpRaw(), var, colim, parens(sch_ref), pivot, cod);
sch_ref.set(ret);
}
......@@ -301,7 +305,8 @@ public class CombinatorParser implements IAqlParser {
.map(x -> new InstExpDelta(x.b, x.c)),
distinct = Parsers.tuple(token("distinct"), inst_ref.lazy()).map(x -> new InstExpDistinct(x.b)),
anon = Parsers.tuple(token("anonymize"), inst_ref.lazy()).map(x -> new InstExpAnonymize(x.b)),
pivot = Parsers.tuple(token("pivot"), inst_ref.lazy(), options.between(token("{"), token("}")).optional()).map(x -> new InstExpPivot<>(x.b, x.c == null ? new LinkedList<>() : x.c)),
eval = Parsers
.tuple(token("eval"), query_ref.lazy(), inst_ref.lazy(),
options.between(token("{"), token("}")).optional())
......@@ -321,7 +326,7 @@ public class CombinatorParser implements IAqlParser {
Parser ret = Parsers.or(queryQuotientExpRaw(), sigma_chase, l2, pi, frozen, instExpCsvQuot(), instExpJdbcQuot(), instExpCoProd(), instExpRand(),
instExpCoEq(), instExpJdbcAll(), chase, instExpJdbc(), empty, instExpRaw(), var, sigma, delta, distinct,
eval, colimInstExp(), dom, anon, cod, instExpCsv(), coeval, parens(inst_ref), instExpQuotient());
eval, colimInstExp(), dom, anon, pivot, cod, instExpCsv(), coeval, parens(inst_ref), instExpQuotient());
inst_ref.set(ret);
}
......@@ -342,8 +347,9 @@ public class CombinatorParser implements IAqlParser {
colim = Parsers.tuple(token("getMapping"), colim_ref.lazy(), ident).map(x -> new MapExpColim(x.c, x.b)),
comp = Parsers.tuple(token("["), map_ref.lazy(), token(";"), map_ref.lazy(), token("]"))
.map(x -> new MapExpComp(x.b, x.d)),
ret = Parsers.or(id, mapExpRaw(), var, colim, comp, parens(map_ref));
pivot = Parsers.tuple(token("pivot"), inst_ref.lazy(), options.between(token("{"), token("}")).optional()).map(x -> new MapExpPivot<>(x.b, x.c == null ? new LinkedList<>() : x.c)),
ret = Parsers.or(id, mapExpRaw(), var, pivot, colim, comp, parens(map_ref));
map_ref.set(ret);
}
......
......@@ -18,7 +18,7 @@ public interface IAqlParser {
"*)", "+", "[", "]", "<-" };
public static final String[] res = new String[] {
"quotient_query",
"quotient_query", "pivot",
"sigma_chase", "entity", "md", "quotient_jdbc", "random", "sql",
"chase", "check", "import_csv", "quotient_csv", "coproduct", "simple", "assert_consistent",
"coproduct_sigma", "coequalize", "html", "quotient", "entity_equations", "schema_colimit", "exists",
......
......@@ -17,6 +17,7 @@ import catdata.Pair;
import catdata.Util;
import catdata.aql.Anonymized;
import catdata.aql.AqlOptions;
import catdata.aql.AqlPivot;
import catdata.aql.AqlOptions.AqlOption;
import catdata.aql.Collage;
import catdata.aql.CoprodInstance;
......@@ -36,6 +37,7 @@ import catdata.aql.Var;
import catdata.aql.exp.MapExp.MapExpId;
import catdata.aql.exp.QueryExp.QueryExpDeltaCoEval;
import catdata.aql.exp.SchExp.SchExpLit;
import catdata.aql.exp.SchExp.SchExpPivot;
import catdata.aql.fdm.CoEvalInstance;
import catdata.aql.fdm.ColimitInstance;
import catdata.aql.fdm.DeltaInstance;
......@@ -62,6 +64,71 @@ public abstract class InstExp<Ty, En, Sym, Fk, Att, Gen, Sk, X, Y>
///////////////////////////////////////////////////////////////////////
public static final class InstExpPivot<Ty, En0, Sym, Fk0, Att0, Gen, Sk, X, Y>
extends InstExp<Ty, SchExpRaw.En, Sym, SchExpRaw.Fk, SchExpRaw.Att, X, Y, ID, Chc<Y, Pair<ID, catdata.aql.exp.SchExpRaw.Att>>> {
public InstExpPivot(InstExp<Ty, En0, Sym, Fk0, Att0, Gen, Sk, X, Y> i, List<Pair<String, String>> ops) {
I = i;
this.ops = Util.toMapSafely(ops);
}
public final InstExp<Ty, En0, Sym, Fk0, Att0, Gen, Sk, X, Y> I;
public final Map<String, String> ops;
@Override
public SchExp<Ty, SchExpRaw.En, Sym, SchExpRaw.Fk, SchExpRaw.Att> type(
AqlTyping G) {
return new SchExpPivot<>(I, Collections.emptyList());
}
@Override
protected Map<String, String> options() {
return ops;
}
@Override
public Instance<Ty, catdata.aql.exp.SchExpRaw.En, Sym, catdata.aql.exp.SchExpRaw.Fk, catdata.aql.exp.SchExpRaw.Att, X, Y, ID, Chc<Y, Pair<ID, catdata.aql.exp.SchExpRaw.Att>>> eval(
AqlEnv env) {
AqlOptions strat = new AqlOptions(ops, null, env.defaults);
Instance<Ty, SchExpRaw.En, Sym, SchExpRaw.Fk, SchExpRaw.Att, X, Y, ID, Chc<Y, Pair<ID, SchExpRaw.Att>>> l = new AqlPivot<>(I.eval(env), strat).J;
return l;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((I == null) ? 0 : I.hashCode());
result = prime * result + ((ops == null) ? 0 : ops.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
InstExpPivot other = (InstExpPivot) obj;
if (I == null) {
if (other.I != null)
return false;
} else if (!I.equals(other.I))
return false;
if (ops == null) {
if (other.ops != null)
return false;
} else if (!ops.equals(other.ops))
return false;
return true;
}
@Override
public Collection<Pair<String, Kind>> deps() {
return I.deps();
}
}
///////////////////////////////////////////////////////////////////////
public static final class InstExpCoEq<Ty, En, Sym, Fk, Att, Gen1, Sk1, Gen2, Sk2, X1, Y1, X2, Y2>
......
......@@ -161,7 +161,7 @@ public class InstExpCsv
File file = new File(f, op.getOrDefault(AqlOption.csv_import_prefix) + en.toString() + "."
+ op.getOrDefault(AqlOption.csv_file_extension));
if (file.exists()) {
m.put(en.str, file.getAbsolutePath());
m.put(en.convert(), file.getAbsolutePath());
} else if (!(boolean) op.getOrDefault(AqlOption.import_missing_is_empty)) {
throw new RuntimeException("Missing file: " + file.getAbsolutePath()
+ ". \n\nPossible options to consider: " + AqlOption.import_missing_is_empty + " and "
......@@ -180,7 +180,7 @@ public class InstExpCsv
protected void joinedEn(Map<En, List<String[]>> rows, En en0,
Pair<List<Pair<String, String>>, List<Pair<String, String>>> s, Schema<Ty, En, Sym, Fk, Att> sch)
throws Exception {
String en = en0.str;
String en = en0.convert();
Map<String, String> inner;
if (s == null) {
inner = new HashMap<>();
......@@ -248,7 +248,7 @@ public class InstExpCsv
if (!fks0.containsKey(l0)) {
fks0.put(l0, new Ctx<>());
}
Gen g = toGen(sch.fks.get(fk).second, row[m.get(mediate.apply(fk.str))]);
Gen g = toGen(sch.fks.get(fk).second, row[m.get(mediate.apply(fk.convert()))]);
fks0.get(l0).put(fk, g);
}
......@@ -256,7 +256,7 @@ public class InstExpCsv
if (!atts0.containsKey(l0)) {
atts0.put(l0, new Ctx<>());
}
String zz = mediate.apply(att.str);
String zz = mediate.apply(att.convert());
if (!m.containsKey(zz)) {
throw new RuntimeException("No column " + att + " in file for " + en + " nor explicit mapping for "
+ att + " given. Tried " + zz + " and options are " + m.keySet());
......
......@@ -231,7 +231,7 @@ public class InstExpJdbc extends InstExpImport<Connection, String> {
ens0.get(en).add(g1); //store strings
for (Fk fk : sch.fksFrom(en)) {
Object rhs = rs.getObject(fk.str);
Object rhs = rs.getObject(fk.convert());
if (rhs == null) {
stmt.close();
rs.close();
......@@ -246,7 +246,7 @@ public class InstExpJdbc extends InstExpImport<Connection, String> {
fks0.get(g1).put(fk, g2);
}
for (Att att : sch.attsFrom(en)) {
Object rhs = rs.getObject(att.str);
Object rhs = rs.getObject(att.convert());
if (!atts0.map.containsKey(g1)) {
atts0.put(g1, new Ctx<>());
}
......
......@@ -146,7 +146,7 @@ extends InstExp<Ty,En,Sym,Fk,Att,Pair<Integer,En>, Pair<Integer, Att>,Pair<Integ
Ctx<Fk, Pair<Integer, En>> ctx0 = new Ctx<>();
for (Fk fk : schema.fksFrom(new En(en))) {
int size0 = ens.get(schema.fks.get(fk).second.str);
int size0 = ens.get(schema.fks.get(fk).second.convert());
Integer k = rand.nextInt(size0);
ctx0.put(fk, new Pair<>(k, schema.fks.get(fk).second));
}
......@@ -179,8 +179,7 @@ extends InstExp<Ty,En,Sym,Fk,Att,Pair<Integer,En>, Pair<Integer, Att>,Pair<Integ
};
SaturatedInstance<Ty, En, Sym, Fk, Att, Pair<Integer, En>, Pair<Integer, Att>, Pair<Integer, En>, Pair<Integer, Att>> x = new SaturatedInstance
<Ty, En, Sym, Fk, Att, Pair<Integer,En>, Pair<Integer, Att>, Pair<Integer,En>, Pair<Integer, Att>>
(alg, dp, false, true, false, new Ctx<>());
<Ty, En, Sym, Fk, Att, Pair<Integer,En>, Pair<Integer, Att>, Pair<Integer,En>, Pair<Integer, Att>> (alg, dp, false, true, false, new Ctx<>());
//x.validate();
x.checkSatisfaction();
return x;
......
......@@ -405,8 +405,12 @@ public final class InstExpRaw extends InstExp<Ty, En, Sym, Fk, Att, Gen, Sk, ID,
atts0.get(lhs.arg.gen).put(lhs.att, Term.Obj(rhs.obj, rhs.ty));
} else if (lhs.obj != null && rhs.att != null && rhs.arg.gen != null) {
atts0.get(rhs.arg.gen).put(rhs.att, Term.Obj(lhs.obj, lhs.ty));
} else if (rhs.sym != null && rhs.args.isEmpty() && lhs.att != null && lhs.arg.gen != null) {
atts0.get(lhs.arg.gen).put(lhs.att, Term.Sym(rhs.sym, Collections.emptyList()));
} else if (lhs.sym != null && lhs.args.isEmpty() && rhs.att != null && rhs.arg.gen != null) {
atts0.get(rhs.arg.gen).put(rhs.att, Term.Sym(lhs.sym, Collections.emptyList()));
} else {
throw new RuntimeException("import_as_theory not compatible with equation " + lhs + " = " + rhs
throw new RuntimeException("interpret_as_algebra not compatible with equation " + lhs + " = " + rhs
+ "; each equation must be of the form gen.fk=gen or gen.att=javaobject");
}
}
......
......@@ -2,13 +2,21 @@ package catdata.aql.exp;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import catdata.Pair;
import catdata.Util;
import catdata.aql.AqlOptions;
import catdata.aql.AqlPivot;
import catdata.aql.Kind;
import catdata.aql.Mapping;
import catdata.aql.Schema;
import catdata.aql.exp.SchExp.SchExpLit;
import catdata.aql.exp.SchExp.SchExpPivot;
import catdata.aql.exp.SchExpRaw.Att;
import catdata.aql.exp.SchExpRaw.En;
import catdata.aql.exp.SchExpRaw.Fk;
//TODO aql move back to presentation / tables distinction?
public abstract class MapExp<Ty,En1,Sym,Fk1,Att1,En2,Fk2,Att2> extends Exp<Mapping<Ty,En1,Sym,Fk1,Att1,En2,Fk2,Att2>> {
......@@ -23,6 +31,89 @@ public abstract class MapExp<Ty,En1,Sym,Fk1,Att1,En2,Fk2,Att2> extends Exp<Mappi
//////////////////////////////////////////////////////////////////////