```    1% Simple illustration of the use of multi-instance lazy evaluation
2%       a predicate to construct a line is called during
3%       the search to obtain a linear equation
4% For realistic examples a proper regression predicate will be needed
5%	(usually better to implement this in C, and call it from Prolog)
6% To run do the following:
9%       c. sat(1).
10%       d. reduce.
```

?- `sat(1)`,`reduce(A)`. */

```   14:- use_module(library(aleph)).   15:- if(current_predicate(use_rendering/1)).   16:- use_rendering(prolog).   17:- endif.   18:- aleph.   19
20
21:- mode(1,p(+bag,+indep,+dep)).   22:- mode(1,lin_regress1(+bag,+dep,+indep,#number,#number,#number)).
23
24:- determination(p/3,lin_regress1/6).   25
26:- lazy_evaluate(lin_regress1/6).   27
28% this predicate does not need substitutions from negative examples
29
30:- positive_only(lin_regress1/6).   31
32:- aleph_set(portray_literals,true).   33
34aleph_portray(p(Bag,A,B)):-
35	format("~q",[p(Bag,A,B)]).
36aleph_portray(lin_regress1(B,Y,X,M,C,E)):-
37	write(' exists '), format("~q",[X]),
38	write(' in bag '), format("~q",[B]), write(' such that '),
39	format("~q",[Y]), write(' is '),
40	format("~q",[M]), write(' * '),
41	format("~q",[X]), write(' + '),
42	format("~q",[C]), write(' +/- '),
43	format("~q",[E]).
44
45
46:-begin_bg.   47
48
49nlist([]).
50nlist([_|_]).
51
52% definition for use during normal evaluation
53lin_regress1(_,Y,X,M,C,Error):-
54	number(X), number(Y),
55	nonvar(M), nonvar(C), nonvar(Error), !,
56	Y1 is M*X + C,
57	Diff is (Y - Y1),
58	(Diff > 0 -> E1 = Diff; E1 is -1*Diff),
59	E1 =< Error.
60
61% definition for use during lazy evaluation
62lin_regress1(B,Y,X,M,C,Error):-
63	nlist(B), nlist(X), nlist(Y),
64	var(M), var(C), !,
65	em_regress(B,Y,X,M,C,Error).
66
67% otherwise: for bottom clause
68lin_regress1(_,Y,X,0.0,Y,0.0):-
69	X \= Y.
70
71
72% Approximate EM algorithm for estimating best fitting line through instances
73%	finds best line after 10 random restarts
74em_regress([Bags|_],[YVals|_],[XVals|_],M,C,Error):-
75 	Inf is inf,
76	em_regress(10,Bags,YVals,XVals,Inf,Inf,Inf,M,C,Error).
77
78em_regress(0,_,_,_,M,C,E,M,C,E):- !.
79em_regress(N,Bags,YVals,XVals,_,_,E0,M,C,E):-
80	random_vals([Mr,Cr]),
81	em_regress(Bags,YVals,XVals,Mr,Cr,E0,M1,C1,E1),
82	(E1 > 0.0 ->
83		N1 is N - 1,
84		em_regress(N1,Bags,YVals,XVals,M1,C1,E1,M,C,E);
85		M = M1,
86		C = C1,
87		E = E1).
88
89em_regress(Bags,YVals,XVals,M0,C0,E0,M,C,E):-
90	find_new_instances(Bags,YVals,XVals,M0,C0,0.0,YVals1,XVals1,E1),
91	E1 < E0, !,
92	(E1 > 0.0 ->
93		l_regress1(YVals1,XVals1,M1,C1),
94		em_regress(Bags,YVals,XVals,M1,C1,E1,M,C,E);
95		M = M0,
96		C = C0,
97		E = E1).
98em_regress(_,_,_,M,C,E,M,C,E).
99
100
101l_regress1(YVals,XVals,M,C):-
102	YVals = [Y1,Y2|_],
103	XVals = [X1,X2|_],
104	(X2=:=X1->
105		M is (Y2-Y1)*1e10
106	;
107		M is (Y2-Y1)/(X2-X1)
108	),
109	C is Y1 - M*X1.
110
111find_new_instances(Bags,YVals,XVals,M,C,E,YVals1,XVals1,E1):-
112	get_closest_instances(Bags,YVals,XVals,M,C,E,[],E1,Instances),
113	get_bag_instances(Instances,YVals1,XVals1).
114
115get_closest_instances([],_,_,_,_,E,I,E,I).
116get_closest_instances([Bag|Bags],[Y|YVals],[X|XVals],M0,C0,E0,I0,E,I):-
117	Error is (Y - M0*X - C0)^2,
118	(aleph_delete(Bag-[OldE,_],I0,Left) ->
119		(Error < OldE ->
120			I1 = [Bag-[Error,[Y,X]]|Left],
121			E1 is E0 + Error - OldE;
122			I1 = I0,
123			E1 = E0);
124		I1 = [Bag-[Error,[Y,X]]|I0],
125		E1 is E0 + Error),
126	get_closest_instances(Bags,YVals,XVals,M0,C0,E1,I1,E,I).
127
128get_bag_instances([],[],[]).
129get_bag_instances([_-[_,[Y,X]]|Instances],[Y|YVals],[X|XVals]):-
130	get_bag_instances(Instances,YVals,XVals).
131
132random_vals([]).
133random_vals([A|B]):-
134	aleph_random(A),
135	random_vals(B).
136
137:-end_bg.  138:-begin_in_pos.  139p(b1,1,2).
140p(b1,1,3).
141p(b1,1,4).
142p(b2,2,4).
143p(b3,3,6).
144p(b4,4,8).
145:-end_in_pos.  146:-begin_in_neg.  147p(b,1,0).
148p(b,2,5).
149p(b,3,9).
150
151:-end_in_neg.```