File: IntegrationDB.cpp

package info (click to toggle)
latte-int 1.7.6%2Bds-3
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 38,260 kB
  • sloc: cpp: 32,231; sh: 4,413; makefile: 811; perl: 300
file content (1468 lines) | stat: -rw-r--r-- 55,283 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468

#include "IntegrationDB.h"
#include <sstream>
#include <cstdlib> //for atoi
#include <cassert>
#include <iostream>
IntegrationDB::IntegrationDB()
{
}


/**
 * @parm seconds: time limit how how each polytope should take.
 * @parm dim: of polytope
 * @parm vertexCount of the polytope (or it's dual if useDual is ture)
 * @parm degree of the polynomial
 * @parm useDual: true if we are testing the dual polytopes
 * @return bool: true if the test case "dim-vertecCount-degree-dual" should finish with secondsLimit
 */
bool IntegrationDB::canTestFinish(AlgorithemUsed alg, int dim, int vertexCount, int degree, bool useDual, int secondsLimit)
{
	stringstream sql;
	vector<vector<string> > result;
	//if we have data for this test case, great, find the average.

	if ( useDual == true)
	{
		sql << "select count(*)"
			<< " from polynomial as p, integrate as i, polytope as dualP, polytope as orgP "
			<< " where i.polytopeID = dualP.rowid and i.polynomialID = p.rowid " //joint dual-integrate-polynomial
			<< " and dualP.dual = orgP.rowid and dualP.dual is not null" //join the dual and org. polytope.
			<< " and orgP.dim = " << dim
			<< " and orgP.vertexCount = " << vertexCount
			<< " and p.degree <= " << degree
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " = -2";//-2 = I manually said skip this test case

	}
	else
	{
		sql << "select count(*)"
			<< " from polynomial as p, integrate as i, polytope as t "
			<< " where i.polytopeID = t.rowid and i.polynomialID = p.rowid "
			<< " and t.dim = " << dim
			<< " and t.vertexCount = " << vertexCount
			<< " and p.degree <= " << degree
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " = -2";
	}//else not using the dual.

	if ( queryAsInteger(sql.str().c_str()) )
		return false; //skip this test case.

	sql.str("");
	if ( useDual == true)
	{
		sql << "select avg(" << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << "), count(*)"
			<< " from polynomial as p, integrate as i, polytope as dualP, polytope as orgP "
			<< " where i.polytopeID = dualP.rowid and i.polynomialID = p.rowid " //joint dual-integrate-polynomial
			<< " and dualP.dual = orgP.rowid and dualP.dual is not null" //join the dual and org. polytope.
			<< " and orgP.dim = " << dim
			<< " and orgP.vertexCount = " << vertexCount
			<< " and p.degree = " << degree
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " >= 0";

	}
	else
	{
		sql << "select avg(" << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << "), count(*)"
			<< " from polynomial as p, integrate as i, polytope as t "
			<< " where i.polytopeID = t.rowid and i.polynomialID = p.rowid "
			<< " and t.dim = " << dim
			<< " and t.vertexCount = " << vertexCount
			<< " and p.degree = " << degree
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " >= 0";
	}//else not using the dual.

	result = query(sql.str().c_str());
	if ( result[0][0] != "NULL" && atoi(result[0][1].c_str()) >= 1 )
		return (atof(result[0][0].c_str()) < secondsLimit); //if there are at least 1 test case

	//darn, if we are here, then there where no (or less than 3) test cases of this class.
	//now look at the previous tests and see if they did not finish.
	if (vertexCount <= dim + 3)
		return true; //always do the basic cases!

	//find the max time for all vertexCount and polynomial degree less than the current setting for fix dim.
	sql.str("");
	if ( useDual == true)
	{
		sql << "select max(" << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << ")"
			<< " from polynomial as p, integrate as i, polytope as dualP, polytope as orgP "
			<< " where i.polytopeID = dualP.rowid and i.polynomialID = p.rowid " //joint dual-integrate-polynomial
			<< " and dualP.dual = orgP.rowid and dualP.dual is not null" //join the dual and org. polytope.
			<< " and orgP.dim = " << dim
			<< " and orgP.vertexCount = " << vertexCount //the difference here with the above is the
			<< " and p.degree <= " << degree              // <=  on degree.
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " >= 0";

	}
	else
	{
		sql << "select max(" << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << ")"
			<< " from polynomial as p, integrate as i, polytope as t "
			<< " where i.polytopeID = t.rowid and i.polynomialID = p.rowid "
			<< " and t.dim = " << dim
			<< " and t.vertexCount = " << vertexCount
			<< " and p.degree <= " << degree
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " >= 0";
	}//else not using the dual.
	result = query(sql.str().c_str());
	if ( result[0][0] == "NULL")
		return true; //we have no data to say one way or the other.


	return (atof(result[0][0].c_str()) < secondsLimit);

}//canTestFinish


/**
 * @polymakeFile a .polymakefile, not a .dual.polymake file
 * @degree: of the polynomial to test
 * @return bool. true = we thing we can finish this file on the current degree.
 */
bool IntegrationDB::canSpecficFileFinish(AlgorithemUsed alg, const char *polymakeFile, int degree, int useDual, int secondsLimit)
{
	stringstream sql;
	vector<vector<string> > result;

	//fist check that I did not manually want to skip this test case.
	if ( useDual == true)
	{
		sql << "select count(*)"
			<< " from polynomial as p, integrate as i, polytope as dualP, polytope as orgP "
			<< " where i.polytopeID = dualP.rowid and i.polynomialID = p.rowid " //joint dual-integrate-polynomial
			<< " and dualP.dual = orgP.rowid and dualP.dual is not null" //join the dual and org. polytope.
			<< " and orgP.polymakeFilePath = '" << polymakeFile << "'"
			<< " and p.degree <= " << degree
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " = -2"; // -2 means "skip this"
	}
	else
	{
		sql << "select count(*)"
			<< " from polynomial as p, integrate as i, polytope as t "
			<< " where i.polytopeID = t.rowid and i.polynomialID = p.rowid "
			<< " and t.polymakeFilePath = '" << polymakeFile << "'"
			<< " and p.degree <= " << degree
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " = -2";
	}//else not using the dual.

	//cout << "check -2: " << sql.str().c_str() << "\n" << endl;
	result = query(sql.str().c_str());
	//cout << "check-2 ans: " << result[0][0].c_str() << '\n' << endl;
	if ( atoi(result[0][0].c_str()) )
		return false; //skip this test case.

	sql.str("");
	//find the average on the set degree.
	if ( useDual == true)
	{
		sql << "select avg(" << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << ")"
			<< " from polynomial as p, integrate as i, polytope as dualP, polytope as orgP "
			<< " where i.polytopeID = dualP.rowid and i.polynomialID = p.rowid " //joint dual-integrate-polynomial
			<< " and dualP.dual = orgP.rowid and dualP.dual is not null" //join the dual and org. polytope.
			<< " and orgP.polymakeFilePath = '" << polymakeFile << "'"
			<< " and p.degree = " << degree // <=  on degree.
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " >= 0";
	}
	else
	{
		sql << "select avg(" << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << ")"
			<< " from polynomial as p, integrate as i, polytope as t "
			<< " where i.polytopeID = t.rowid and i.polynomialID = p.rowid "
			<< " and t.polymakeFilePath = '" << polymakeFile << "'"
			<< " and p.degree = " << degree
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " >= 0";
	}//else not using the dual.

	//cout << "get avg: " << sql.str().c_str() << '\n' << endl;
	result = query(sql.str().c_str());
	//cout << "get avg: ans " << result[0][0].c_str() << endl;
	/*
	if ( !strcmp(polymakeFile, "./Various/3simp3simp.polymake") && degree == 50)
	{
		cout << "BEFORE file: " << polymakeFile
			 << "\ndegree: " << degree
			 << "\ndual: " << useDual
			 << "\ntime: " << (result[0][0].c_str())
			 << "\n" << (atof(result[0][0].c_str()) < secondsLimit) << endl;

		if ( result[0][0] != "NULL")
			exit(1);
	}*/


	if ( result[0][0] != "NULL")
		return (atof(result[0][0].c_str()) < secondsLimit);

	//if we got here, we never saw this file with this degree before.
	//now look at the previous degrees and see what was the last degree it finished at.
	if (degree <= 3)
		return true; //always do the basic cases!

	//find the max of any degree done.
	sql.str("");
	if ( useDual == true)
	{
		sql << "select max(" << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << ")"
			<< " from polynomial as p, integrate as i, polytope as dualP, polytope as orgP "
			<< " where i.polytopeID = dualP.rowid and i.polynomialID = p.rowid " //joint dual-integrate-polynomial
			<< " and dualP.dual = orgP.rowid and dualP.dual is not null" //join the dual and org. polytope.
			<< " and orgP.polymakeFilePath = '" << polymakeFile << "'"
			<< " and p.degree <= " << degree // <=  on degree.
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " >= 0";
	}
	else
	{
		sql << "select max(" << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << ")"
			<< " from polynomial as p, integrate as i, polytope as t "
			<< " where i.polytopeID = t.rowid and i.polynomialID = p.rowid "
			<< " and t.polymakeFilePath = '" << polymakeFile << "'"
			<< " and p.degree <= " << degree
			<< " and " << (alg == Lawrence ? " i.timeLawrence " : " i.timeTriangulate ") << " >= 0";
	}//else not using the dual.


	//cout << "avg anything: " << sql.str().c_str() << '\n' << endl;
	result = query(sql.str().c_str());

	/*
	if ( !strcmp(polymakeFile, "./Various/3simp3simp.polymake") && degree == 50)
	{
		cout << "AFTER file: " << polymakeFile
			 << "\ndegree: " << degree
			 << "\ndual: " << useDual
			 << "\ntime: " << (result[0][0].c_str())
			 << "\n"<< (atof(result[0][0].c_str()) < secondsLimit) << endl;
		exit(1);
	}*/
	if ( result[0][0] == "NULL")
		return true; //we have no data to say one way or the other.

	return (atof(result[0][0].c_str()) < secondsLimit);
}//canSpecficTestFinish

void IntegrationDB::deletePolynomial(int id)
{
	stringstream sql;
	sql << "delete from polynomial where rowid = " << id;
	query(sql.str().c_str());
}
/**
 * @parm p: polynomial filename to search by
 * @return int: rowid of matching filename or 0.
 * We assume all the rowid's are positive ( you never manually insert a non-positive!)
 */
int IntegrationDB::doesPolynomialExist(const char * p)
{
	stringstream sql;
	sql << "select rowid from polynomial where filePath = '" << p << "' limit 1";
	vector<vector<string> > result = query(sql.str().c_str());

	if (result.size())
	{
		return atoi(result[0][0].c_str());
	}
	return 0;
}//doesPolynomialExist

/**
 * Searches for polytope rowid by polymake file path.
 */
int IntegrationDB::doesPolytopeExist(const char *polymake)
{
	stringstream sql;
	sql << "select rowid from polytope where polymakeFilePath = '" << polymake << "'";

	vector<vector<string> > result = query(sql.str().c_str());

	if (result.size())
	{
		return atoi(result[0][0].c_str());
	}
	return 0;
}//doesPolytopeExist

/**
 * Return all the non-dual polymake files.
 */
vector<vector<string> > IntegrationDB::getAllPolymakeFiles()
{
	stringstream sql;
	sql << "select distinct polymakeFilePath from polytope where dual is null order by dim, vertexCount asc";
	return query(sql.str().c_str());
}//getAllPolymakeFiles

/*
 * Returns true if this case contained a "-2" as the time value for a test.
 */
bool IntegrationDB::getLimit(AlgorithemUsed alg, int dim, int vertexCount, int degree, bool useDual)
{
	stringstream sql;
	string strAlg;

	if (alg == Lawrence)
		strAlg = "timeLawrence";
	else
		strAlg = "timeTriangulate";


	if (useDual == true)
	{
		sql << "select count(i." << strAlg << ")"
			<< " from integrate as i"
			<< " join polytope as dualP on dualP.rowid = i.polytopeID"
			<< " join polytope as orgP on orgP.rowid = dualP.dual"
			<< " join polynomial as p on p.rowid = i.polynomialID"
			<< " where dualP.dual is not null " //and with orgp
			<< " and orgP.dim = " << dim
			<< " and orgP.vertexCount = " << vertexCount
			<< " and p.degree = " << degree
			<< " and i." << strAlg << " = -2 ";
	}//if dual
	else
	{

		sql << "select avg(i." << strAlg << ") "
			<< " from integrate as i"
			<< " join polynomial as p on p.rowid = i.polynomialID"
			<< " join polytope as t on t.rowid = i.polytopeID"
			<< " where t.dual is null"
			<< " and t.dim = " << dim
			<< " and t.vertexCount = " << vertexCount
			<< " and p.degree = " << degree
			<< " and i." << strAlg << " = -2 ";
	}//regular

	return (queryAsInteger(sql.str().c_str()) > 0);
}

/*
 * Returns true if this test case as a -2 as a time for a test result.
 */
bool IntegrationDB::getLimitByFile(AlgorithemUsed alg, const string &polymakeFile, int degree, bool useDual)
{
	stringstream sql;
	string strAlg;

	strAlg = (alg == Lawrence ? "timeLawrence" : "timeTriangulate");

	if (useDual == true)
	{
		sql << "select count(i." << strAlg << ") "
			<< " from polynomial as p, polytope as dualP, polytope as orgP, integrate as i "
			<< " where i.polynomialID = p.rowid and i.polytopeID = dualP.rowid " //join with p, i, dualP
			<< " and dualP.dual is not null and dualP.dual = orgP.rowid" //and with orgp
			<< " and orgP.polymakeFilePath = '" << polymakeFile << "'"
			<< " and p.degree = " << degree
			<< " and i." << strAlg << " = -2 ";
	}//if dual
	else
	{

		sql << "select count(i." << strAlg << ") "
			<< " from polynomial as p, polytope as t, integrate as i "
			<< " where i.polynomialID = p.rowid and i.polytopeID = t.rowid " //join with p, i, dualP
			<< " and t.dual is null"
			<< " and t.polymakeFilePath = '" << polymakeFile << "'"
			<< " and p.degree = " << degree
			<< " and i." << strAlg << " = -2 ";
	}//regular

	return (queryAsInteger(sql.str().c_str()) > 0);
}


/**
 * Returns the number of polynomials in the polynomials table with this dim and degree
 */
int IntegrationDB::getNumberPolynomials(int dim, int degree)
{
	stringstream sql;
	sql << "select count(*) from polynomial where dim = " << dim << " and degree = " << degree;
	return queryAsInteger(sql.str().c_str());
}//getNumberPolynomials

/**
 * Return the number of polytopes with set dim and vertexCount and dual value.
 */
int IntegrationDB::getNumberPolytopes(int dim, int vertexCount, bool useDuals)
{
	stringstream sql;
	if (useDuals == false)
		sql << "select count(*) from polytope where dim = " << dim
			<< " and vertexCount = " << vertexCount
			<< " and dual is  null";
	else
		sql << "select count(*) from polytope as orgP, polytope as dualP"
			<< " where orgP.dim = " << dim
			<< "  and orgP.rowid = dualP.dual"
			<< "  and orgP.vertexCount = " << vertexCount;

	cout << sql.str().c_str() << endl;
	//when the column for dual is null, then the polytope is not a dual.
	//when the column for dual is NOT null, then the polytope is a dual.
	return queryAsInteger(sql.str().c_str());
}//getNumberPolytopes

/**
 * Returns the number of integration tests that have this dim, vertex count, and polynomial degree (and is or isn't a dual)
 */
int IntegrationDB::getNumberIntegrationTest(int dim, int vertexCount, int degree, bool useDuals)
{
	stringstream sql; //we are going to join the tables.
	if ( useDuals == false)
	{
		sql << "select count(*) from polynomial as p, polytope as t, integrate as i "
			<< "where i.polynomialID = p.rowid and i.polytopeID = t.rowid " //join
			<< " and t.dim = " << dim << " and t.vertexCount = " << vertexCount
			<< " and p.degree = " << degree
			<< " and t.dual is null ";
	}
	else
	{
		sql << "select count(*) from polynomial as p, polytope as orgP, polytope as dualP, integrate as i "
				<< "where i.polynomialID = p.rowid and i.polytopeID = dualP.rowid " //join
				<< " and dualP.dual = orgP.rowid "
				<< " and orgP.dim = " << dim << " and orgP.vertexCount = " << vertexCount
				<< " and p.degree = " << degree
				<< " and dualP.dual is not null ";
	}
	return queryAsInteger(sql.str().c_str());
}//getNumberIntegrationTest


int IntegrationDB::getNumberIntegrationTest(const string &polymakeFile, int degree, bool useDual)
{
	stringstream sql; //we are going to join the tables.
	if ( useDual == false)
	{
		sql << "select count(*) from polynomial as p, polytope as t, integrate as i "
			<< "where i.polynomialID = p.rowid and i.polytopeID = t.rowid " //join
			<< " and t.polymakeFilePath = '" << polymakeFile << "'"
			<< " and p.degree = " << degree
			<< " and t.dual is null ";
	}
	else
	{
		sql << "select count(*) from polynomial as p, polytope as orgP, polytope as dualP, integrate as i "
				<< "where i.polynomialID = p.rowid and i.polytopeID = dualP.rowid " //join
				<< " and dualP.dual = orgP.rowid "
				<< " and orgP.polymakeFilePath = '" << polymakeFile << "'"
				<< " and p.degree = " << degree
				<< " and dualP.dual is not null ";
	}
	return queryAsInteger(sql.str().c_str());
}//getNumberIntegrationTest

/**
 * @polytopeID: rowid of a polytope.
 * @degree: of a polynomial
 * @return: number of integration tests in the integrate table with this polytope and a polynomial of this degree.
 */
int IntegrationDB::getNumberIntegrationTest(int polytopeID, int degree)
{
	stringstream sql;
	sql << "select count(*) from integrate as i, polytope as t, polynomial as p"
		<< " where i.polytopeID = t.rowid and i.polynomialID = p.rowid " //join
		<< " and t.rowid = " << polytopeID
		<< " and p.degree = " << degree;

	cout << sql.str().c_str() << endl;
	return queryAsInteger(sql.str().c_str());
}//getNumberIntegrationTest


/**
 * Gets all the integrate rows that have a set dim, vertex count, degree, and dual values.
 */
vector<vector<string> > IntegrationDB::getRowsToIntegrate(int dim, int vertex, int degree, bool useDual, int limit)
{
	stringstream sql;
	if (useDual == false)
	{
		sql << "select p.filePath, t.latteFilePath, i.timeLawrence, i.timeTriangulate, i.integral, i.rowid"
			<< " from polynomial as p, polytope as t, integrate as i "
			<< " where p.rowid = i.polynomialID and t.rowid = i.polytopeID "
			<< " and p.degree = " << degree
			<< " and t.dim = " << dim
			<< " and t.vertexCount =" << vertex
			<< " and t.dual is null"
			<< " order by t.latteFilePath, p.degree"
			<< " limit " << limit;
	}
	else
	{
		sql << "select p.filePath, t.latteFilePath, i.timeLawrence, i.timeTriangulate, i.integral, i.rowid"
			<< " from polynomial as p, polytope as t, polytope as orgP, integrate as i "
			<< " where p.rowid = i.polynomialID and t.rowid = i.polytopeID "
			<< " and orgP.rowid = t.dual"
			<< " and p.degree = " << degree
			<< " and t.dim = " << dim
			<< " and orgP.vertexCount =" << vertex
			<< " and t.dual is not null"
			<< " order by t.latteFilePath, p.degree"
			<< " limit " << limit;
	}
	return query(sql.str().c_str());
}//getRowsToIntegrate

/**
 * Given a org. polymake file, find all the test (or it's dual) with a set degree
 */
vector<vector<string> > IntegrationDB::getRowsToIntegrateGivenSpecficFile(char *polymakeFile, int degree, bool useDual, int limit)
{
	stringstream sql;
	if (useDual == false)
	{
		sql << "select p.filePath, t.latteFilePath, i.timeLawrence, i.timeTriangulate, i.integral, i.rowid"
			<< " from polynomial as p, polytope as t, integrate as i "
			<< " where p.rowid = i.polynomialID and t.rowid = i.polytopeID "
			<< " and p.degree = " << degree
			<< " and t.dual is null"
			<< " and t.polymakeFilePath = '" << polymakeFile << "'"
			<< " limit " << limit;
	}
	else
	{
		sql << "select p.filePath, dualP.latteFilePath, i.timeLawrence, i.timeTriangulate, i.integral, i.rowid"
			<< " from polynomial as p, polytope as dualP, polytope as orgP, integrate as i "
			<< " where p.rowid = i.polynomialID and dualP.rowid = i.polytopeID "
			<< " and orgP.rowid = dualP.dual"
			<< " and p.degree = " << degree
			<< " and dualP.dual is not null"
			<< " and orgP.polymakeFilePath = '" << polymakeFile << "'"
			<< " limit " << limit;
	}
	//cout << "getRowsToIntegrateGivenSpecficFile:: " << sql.str().c_str() << endl;
	return query(sql.str().c_str());

}//getRowsToIntegrateGivenSpecficFile

/**
 * Given a set dim, will find what polynomial degrees and vertex counts exist in the db
 * and give the results/stats in a 2d matrix.
 *
 * Assumes the db only contains a few different polynomial degrees and vertex counts.
 * (That is, we didn't blindly insert non-dual polynomials of any degree, only of degree 2,3,4,5,15,20,30,...etc
 * If we did, then we would get a new column in the matrix for each different degree!!!)
 *
 * @parm: dim. dimension of polytope.
 * @useDua: tue if we want to use he dual polytopes.
 * @return: a 2d matrix of polytope test stats.
 * answer[i][k] contains information on polytope vertex-count case i and
 *   polynomial degree case k.
 */
vector<vector<ValuationDBStatistics> >  IntegrationDB::getStatisticsByDim(int dim, bool useDual)
{
	vector<vector<ValuationDBStatistics> > ans;
	vector<vector<string> > polynomialDegrees;
	vector<vector<string> > vertexCounts;
	stringstream sql;

	sql << "select distinct p.degree from polynomial as p where p.dim = " << dim;
	polynomialDegrees = query(sql.str().c_str());


	sql.str("");
	sql << "select distinct t.vertexCount from polytope as t "
		<< " where t.dual is null"
		<< " and t.dim = " << dim;
	vertexCounts = query(sql.str().c_str());

	ans.resize(vertexCounts.size()); //make room for each row.
	//now loop over every vertexCount and polynomial degree and get
	//the statistics for the class dim-vertecCount-degree-dual
	cerr << "table is " << vertexCounts.size() << " by " << polynomialDegrees.size();
	for(int row = 0; row < (int)vertexCounts.size(); ++row)
	{

		for(int col = 0; col < (int)polynomialDegrees.size(); ++col)
		{
			cerr << "row, col=" << row << ", " << col << endl;
			ans[row].push_back(getStatisticsByDimVertexDegree(dim, atoi(vertexCounts[row][0].c_str()), atoi(polynomialDegrees[col][0].c_str()), useDual));
		}//for
	}//for row. vertexCounts

	return ans;
}//getResultsByDim

//given an array of double's, computes the average, min, max, etc.
//lawrenceData is the only input parameter, the rest are output parameters.
void IntegrationDB::getStatistics(const vector<double> &data, double &avg, double &min, double &max, double &sd, int &totalFinished, int &totalExist, bool &manuallyLimited)
{
	//set initial values.
	avg = 0.0;
	sd  = 0.0;
	totalFinished = 0;
	totalExist = 0;
	manuallyLimited = false;


	//set the initial min/max to a non-neg number.
	bool foundNonNeg = false;
	for(int i = 0; i < data.size() && !foundNonNeg ; ++i)
	{
		if (data[i] >= 0)
		{
			min = data[i];
			max = data[i];
			foundNonNeg = true;
		}
	}
	if ( foundNonNeg == false)
	{
		//so every number is -1 or -2. That is, not one integration test finished.
		max = 0;
		min = 0;
		//the next for loop will not change min to -1 or -2.
	}

	totalExist = (int) data.size();

	//we can compute everything but the standard deviation in one pass of the array.
	for(int i = 0; i < data.size(); ++i)
	{
		if (data[i] <= -2)
			manuallyLimited = true;
		if (data[i] < 0)
			continue;

		//so data[i] >= 0

		++totalFinished;

		if (min > data[i])
			min = data[i];
		if ( max < data[i])
			max = data[i];

		avg += data[i];
	}//for i

	if (totalFinished != 0)
	{
		avg /= totalFinished; //otherwise, avg is still zero.

		//now find the standard deviation.

		for(int i = 0; i < data.size(); ++i)
		{
			if (data[i] < 0)
				continue;
			sd += pow(data[i] - avg, 2);
		}//for i

		sd /= totalFinished;
		sd = sqrt(sd);
	}//if totalFinished != 0

}//getStatistics

vector<vector<ValuationDBStatistics> > IntegrationDB::getStatisticsByFile(const vector<vector<string> > &polymakeFile, bool useDual)
{
	vector<vector<ValuationDBStatistics> > ans;
	vector<vector<string> > polynomialDegrees;
	vector<vector<string> > vertexCounts;
	stringstream sql;

	if ( polymakeFile.size() == 0)
		return ans; //return it empty

	//get list of different degrees.
	sql << "select distinct p.degree from polynomial as p";
	polynomialDegrees = query(sql.str().c_str());


	ans.resize(polymakeFile.size()); //make room for each row.

	//now loop over every file and polynomial degree and get
	//the statistics for the class polymakefile-degree-dual
	cerr << "table is " << vertexCounts.size() << " by " << polynomialDegrees.size();
	for(int row = 0; row < (int) polymakeFile.size(); ++row)
	{

		for(int col = 0; col < (int)polynomialDegrees.size(); ++col)
		{
			cerr << "row, col=" << row << ", " << col << endl;
			ans[row].push_back(getStatisticsByFileDegree(polymakeFile[row][0], atoi(polynomialDegrees[col][0].c_str()), useDual));
		}//for
	}//for row. vertexCounts

	return ans;
}//getStatisticsByFile

/**
 * Given the (dual) polytope dim, vertex count, and polynomial degree,
 * Will find and return basic statistics (avg time, min/max, totals, etc) about this test class.
 */
ValuationDBStatistics IntegrationDB::getStatisticsByDimVertexDegree(int dim, int vertexCount, int degree, bool useDual)
{
	ValuationDBStatistics vdbs;
	vector<double> lawrenceData, triangulateData;
	string sqlLawrence, sqlTriang;

	//save how this function was called.
	vdbs.dim = dim;
	vdbs.vertexCount = vertexCount;
	vdbs.degree = degree;
	vdbs.useDual = useDual;

	//build the sql statements.
	for(int i = 0; i < 2; ++i)
	{
		stringstream sql;
		string strAlg;

		if ( i == 0)
			strAlg = "timeLawrence";
		else
			strAlg  = "timeTriangulate";

		if (useDual == true)
		{
			sql << "select i." << strAlg
				<< " from integrate as i"
				<< " join polytope as dualP on dualP.rowid = i.polytopeID"
				<< " join polytope as orgP on orgP.rowid = dualP.dual"
				<< " join polynomial as p on p.rowid = i.polynomialID"
				<< " where dualP.dual is not null " //and with orgp
				<< " and orgP.dim = " << dim
				<< " and orgP.vertexCount = " << vertexCount
				<< " and p.degree = " << degree;
		}//if dual
		else
		{
			sql << "select i." << strAlg
				<< " from integrate as i"
				<< " join polynomial as p on p.rowid = i.polynomialID"
				<< " join polytope as t on t.rowid = i.polytopeID"
				<< " where t.dual is null"
				<< " and t.dim = " << dim
				<< " and t.vertexCount = " << vertexCount
				<< " and p.degree = " << degree;
		}//regular

		if ( i == 0)
			sqlLawrence = sql.str();
		else
			sqlTriang  = sql.str();
	}//for i.

	//get the time data only. (note that a time in never stored as NULL. So zero times really mean "super fast"
	lawrenceData    = queryAsFloatArray(sqlLawrence.c_str());
	triangulateData = queryAsFloatArray(sqlTriang.c_str());


	double avg, min, max, sd;
	int totalExist, totalFinished;
	bool manuallyLimited;

	getStatistics(lawrenceData, avg, min, max, sd, totalFinished, totalExist, manuallyLimited);
	vdbs.avgLawrenceTime = avg;
	vdbs.minLawrenceTime = min;
	vdbs.maxLawrenceTime = max;
	vdbs.stdDeviationLawrence = sd;
	vdbs.totalFinishedLawrenceTestCases = totalFinished;
	vdbs.totalTestCases = totalExist;
	vdbs.manuallyLimitedLawrence = manuallyLimited;


	getStatistics(triangulateData, avg, min, max, sd, totalFinished, totalExist, manuallyLimited);
	vdbs.avgTriangulationTime = avg;
	vdbs.minTriangulationTime = min;
	vdbs.maxTriangulationTime = max;
	vdbs.stdDeviationTriangulation = sd;
	vdbs.totalFinishedTriangulationTestCases = totalFinished;
	assert(vdbs.totalTestCases == totalExist);
	vdbs.manuallyLimitedTriangulation = manuallyLimited;

	/*
	//get avg, min, man, and number finished
	avgMinMaxCountLawrence    = getStatisticsAvgMinMaxCount(Lawrence, dim, vertexCount, degree, useDual);
	avgMinMaxCountTriangulate = getStatisticsAvgMinMaxCount(Triangulate, dim, vertexCount, degree, useDual);

	vdbs.avgTriangulationTime = avgMinMaxCountTriangulate[0];
	vdbs.avgLawrenceTime      = avgMinMaxCountLawrence[0];

	vdbs.minTriangulationTime = avgMinMaxCountTriangulate[1];
	vdbs.minLawrenceTime      = avgMinMaxCountLawrence[1];

	vdbs.maxTriangulationTime = avgMinMaxCountTriangulate[2];
	vdbs.maxLawrenceTime      = avgMinMaxCountLawrence[2];

	vdbs.totalFinishedTriangulationTestCases = avgMinMaxCountTriangulate[3];
	vdbs.totalFinishedLawrenceTestCases      = avgMinMaxCountLawrence[3];

	vdbs.stdDeviationLawrence = getStdDeviation(Lawrence, dim, vertexCount, degree, useDual);
	vdbs.stdDeviationTriangulation = getStdDeviation(Triangulate, dim, vertexCount, degree, useDual);

	vdbs.totalTestCases = getNumberIntegrationTest(dim, vertexCount, degree, useDual);

	vdbs.manuallyLimitedLawrence = getLimit(Lawrence, dim, vertexCount, degree, useDual);
	vdbs.manuallyLimitedTriangulation = getLimit(Triangulate, dim, vertexCount, degree, useDual);
*/
	return vdbs;
}//getStatisticsByDimVertexDegree


ValuationDBStatistics IntegrationDB::getStatisticsByFileDegree(const string & polymakeFile, int degree, bool useDual)
{
	ValuationDBStatistics vdbs;
	vector<double> avgMinMaxCountLawrence, avgMinMaxCountTriangulate;

	//save how this function was called.
	stringstream d, v;
	d << "select dim from polytope where polymakeFilePath = '" << polymakeFile << "'";
	v << "select vertexCount from polytope where polymakeFilePath = '" << polymakeFile << "'";
	vdbs.dim = queryAsInteger(d.str().c_str());
	vdbs.vertexCount = queryAsInteger(v.str().c_str());
	vdbs.degree = degree;
	vdbs.useDual = useDual;

	//get avg, min, man, and number finished
	avgMinMaxCountLawrence    = getStatisticsAvgMinMaxCount(Lawrence, polymakeFile, degree, useDual);
	avgMinMaxCountTriangulate = getStatisticsAvgMinMaxCount(Triangulate, polymakeFile, degree, useDual);


//	ans.push_back(avg); //avg 0
//	ans.push_back(min); //min 1
//	ans.push_back(max); //max 2
//	ans.push_back(sd) ;//sd 3
//	ans.push_back(totalFinished); //totalexist 4
//	ans.push_back(totalExist);//5 
//	ans.push_back(manuallyLimited);//6

	vdbs.avgTriangulationTime = avgMinMaxCountTriangulate[0];
	vdbs.minTriangulationTime = avgMinMaxCountTriangulate[1];
	vdbs.maxTriangulationTime = avgMinMaxCountTriangulate[2];
	vdbs.stdDeviationTriangulation = avgMinMaxCountTriangulate[3];
	vdbs.totalFinishedTriangulationTestCases = avgMinMaxCountTriangulate[4];
	vdbs.totalTestCases = avgMinMaxCountTriangulate[5];
	vdbs.manuallyLimitedTriangulation = avgMinMaxCountTriangulate[6];

	vdbs.avgLawrenceTime      = avgMinMaxCountLawrence[0];
	vdbs.minLawrenceTime      = avgMinMaxCountLawrence[1];
	vdbs.maxLawrenceTime      = avgMinMaxCountLawrence[2];
	vdbs.stdDeviationLawrence = avgMinMaxCountLawrence[3];	
	vdbs.totalFinishedLawrenceTestCases      = avgMinMaxCountLawrence[4];
	vdbs.manuallyLimitedLawrence = avgMinMaxCountLawrence[6];
//back here
/*
	vdbs.avgTriangulationTime = avgMinMaxCountTriangulate[0];
	vdbs.avgLawrenceTime      = avgMinMaxCountLawrence[0];

	vdbs.minTriangulationTime = avgMinMaxCountTriangulate[1];
	vdbs.minLawrenceTime      = avgMinMaxCountLawrence[1];

	vdbs.maxTriangulationTime = avgMinMaxCountTriangulate[2];
	vdbs.maxLawrenceTime      = avgMinMaxCountLawrence[2];

	vdbs.totalFinishedTriangulationTestCases = avgMinMaxCountTriangulate[3];
	vdbs.totalFinishedLawrenceTestCases      = avgMinMaxCountLawrence[3];

	vdbs.totalTestCases = getNumberIntegrationTest(polymakeFile, degree, useDual);

	vdbs.manuallyLimitedLawrence = getLimitByFile(Lawrence, polymakeFile, degree, useDual);
	vdbs.manuallyLimitedTriangulation = getLimitByFile(Triangulate, polymakeFile, degree, useDual);
*/
	return vdbs;
}///getStatisticsByFileDegree


vector<double> IntegrationDB::getStatisticsAvgMinMaxCount(AlgorithemUsed alg, int dim, int vertexCount, int degree, bool useDual)
{
	vector<double> ans;
	vector<vector<string> > strAns;
	stringstream sql;
	string strAlg;

	strAlg = (alg == Lawrence ? "timeLawrence" : "timeTriangulate");

	if (useDual == true)
	{
		sql << "select avg(i." << strAlg << "), min(i." << strAlg << "), max(i." << strAlg << "), count(*) "
			<< " from integrate as i"
			<< " join polytope as dualP on dualP.rowid = i.polytopeID"
			<< " join polytope as orgP on orgP.rowid = dualP.dual"
			<< " join polynomial as p on p.rowid = i.polynomialID"
			<< " where dualP.dual is not null " //and with orgp
			<< " and orgP.dim = " << dim
			<< " and orgP.vertexCount = " << vertexCount
			<< " and p.degree = " << degree
			<< " and i." << strAlg << " >= 0 ";
	}//if dual
	else
	{

		sql << "select avg(i." << strAlg << "), min(i." << strAlg << "), max(i." << strAlg << "), count(*) "
			<< " from integrate as i"
			<< " join polynomial as p on p.rowid = i.polynomialID"
			<< " join polytope as t on t.rowid = i.polytopeID"
			<< " where t.dual is null"
			<< " and t.dim = " << dim
			<< " and t.vertexCount = " << vertexCount
			<< " and p.degree = " << degree
			<< " and i." << strAlg << " >= 0 ";
	}//regular

	//get the data and save it
	strAns = query(sql.str().c_str());

	ans.push_back(atof(strAns[0][0].c_str())); //avg
	ans.push_back(atof(strAns[0][1].c_str())); //min
	ans.push_back(atof(strAns[0][2].c_str())); //max
	ans.push_back(atof(strAns[0][3].c_str())); //count

	return ans;
}//	getStatisticsAvgMinMaxCount

//finds the standard deviation (note that sqlite does not have a build in std deviation function :(  )
double IntegrationDB::getStdDeviation(AlgorithemUsed alg, int dim, int vertexCount, int degree, bool useDual)
{
	vector<vector<string> > strAns;
	stringstream sql;
	string strAlg;
	double sd = 0.0;
	double avg = 0.0;
	vector<double> numbers;


	strAlg = (alg == Lawrence ? "timeLawrence" : "timeTriangulate");

	if (useDual == true)
	{
		sql << "select i." << strAlg
			<< " from integrate as i"
			<< " join polytope as dualP on dualP.rowid = i.polytopeID"
			<< " join polytope as orgP on orgP.rowid = dualP.dual"
			<< " join polynomial as p on p.rowid = i.polynomialID"
			<< " where dualP.dual is not null " //and with orgp
			<< " and orgP.dim = " << dim
			<< " and orgP.vertexCount = " << vertexCount
			<< " and p.degree = " << degree
			<< " and i." << strAlg << " >= 0 ";
	}//if dual
	else
	{

		sql << "select i." << strAlg
			<< " from integrate as i"
			<< " join polynomial as p on p.rowid = i.polynomialID"
			<< " join polytope as t on t.rowid = i.polytopeID"
			<< " where t.dual is null"
			<< " and t.dim = " << dim
			<< " and t.vertexCount = " << vertexCount
			<< " and p.degree = " << degree
			<< " and i." << strAlg << " >= 0 ";
	}//regular

	//get the data and save it
	strAns = query(sql.str().c_str());
	cerr << "got here "<< strAlg.c_str() << " " << strAns.size() << endl;
	if (strAns.size() <= 1)
		return 0.0;

	//ok, find the standard deviation manually.
	numbers.resize(strAns.size());

	assert(strAns.size() > 0); //we are going to divide by this number.

	for(int i = 0; i < strAlg.size(); ++i)
	{
		numbers[i] = atof(strAns[i][0].c_str());
		avg += numbers[i];
	}
	avg /= numbers.size();

	for(int i = 0; i < numbers.size(); ++i)
		sd = sd + (numbers[i] - avg)*(numbers[i] - avg);
	sd /= numbers.size();
	cerr << "sd^2=" << sd << endl;
	sd = sqrt(sd);

	return sd;
}


vector<double> IntegrationDB::getStatisticsAvgMinMaxCount(AlgorithemUsed alg, const string &polymakeFile, int degree, bool useDual)
{

	vector<double> ans;
	vector<double > strAns;
	stringstream sql;
	string strAlg;

	strAlg = (alg == Lawrence ? "timeLawrence" : "timeTriangulate");

	if (useDual == true)
	{
		sql << "select i." << strAlg 
			<< " from polynomial as p, polytope as dualP, polytope as orgP, integrate as i "
			<< " where i.polynomialID = p.rowid and i.polytopeID = dualP.rowid " //join with p, i, dualP
			<< " and dualP.dual is not null and dualP.dual = orgP.rowid" //and with orgp
			<< " and orgP.polymakeFilePath = '" << polymakeFile << "'"
			<< " and p.degree = " << degree
			<< " and i." << strAlg << " >= 0 ";
	}//if dual
	else
	{

		sql << "select i." << strAlg 
			<< " from polynomial as p, polytope as t, integrate as i "
			<< " where i.polynomialID = p.rowid and i.polytopeID = t.rowid " //join with p, i, dualP
			<< " and t.dual is null"
			<< " and t.polymakeFilePath = '" << polymakeFile << "'"
			<< " and p.degree = " << degree
			<< " and i." << strAlg << " >= 0 ";
	}//regular

	//get the data and save it
	strAns = queryAsFloatArray(sql.str().c_str());

	double avg, min, max, sd;
	int totalExist, totalFinished;
	bool manuallyLimited;

	getStatistics(strAns, avg, min, max, sd, totalFinished, totalExist, manuallyLimited);
	
	
	
	ans.push_back(avg); //avg
	ans.push_back(min); //min
	ans.push_back(max); //max
	ans.push_back(sd) ;//sd
	ans.push_back(totalFinished); //totalexist
	ans.push_back(totalExist);
	ans.push_back(manuallyLimited);

	return ans;
}//getStatisticsAvgMinMaxCount


/**
 * Within all polynomials of set dim and degree, find my the ones that are NOT being used in a test with a polytope of set vertexCount and dual values.
 */
vector<vector<string> > IntegrationDB::getUnusedPolynomials(int dim, int degree, int vertexCount, bool useDual)
{
	stringstream sql;
	//GOAL: 1) Find all the polynomials that are of a set degree and dim.
	//      2) Find all the tests where the polynomials are of a set degree dim, the polytope is a set dim, and vertexCount (or came from a polytope of a set vertexCount if useDual=ture),
	//      3) Return the polynomials that are in the set 1 but NOT set 2.
	if ( useDual == false)
	{//for every polynomial of set dim and degree ans ask the question
		//Is this polynomial already being used in a test with a non-dual polytope of dim 10 and set vertexCount?
		//if not, then return it.
		sql << "select distinct rowid from polynomial "
				<< " where rowid not in"
				<< " ( select p.rowid from polynomial as p, integrate as i, polytope as t "
				<< "     where i.polytopeID = t.rowid and i.polynomialID = p.rowid " //join
				<< "       and t.dim = " << dim
				<< "       and t.vertexCount = " << vertexCount
				<< "       and p.degree = " << degree
				<< "       and t.dual is null "
				<< " ) "
				<< " and dim = " << dim << " and degree = " << degree;
	}
	else
	{//for every polynomial of set dim and degree ask the question:
		//Is this polynomial already being used in a test with a DUAL polytope of dim 10 where the org. polytope had set vertexCount?
		//if not, then return it.
		sql << "select distinct rowid from polynomial where rowid not in"
			<< "  (select p.rowid from polynomial as p, integrate as i, polytope as dualPolytope, polytope as orgPolytope "
			<< "     where i.polytopeID = dualPolytope.rowid and i.polynomialID = p.rowid "
			<< " 	   and dualPolytope.dual is not null "				//make sure we have dual
			<< "       and dualPolytope.dual = orgPolytope.rowid "		//find the org. polytope
			<< "       and orgPolytope.dim = " << dim					//look at the dim of the test case
			<< "       and orgPolytope.vertexCount = " << vertexCount	//look at the vertex count of the org polytope
			<< "       and p.degree = " << degree						//polynomial degree.
			<< "  ) "
			<< " and dim = " << dim << " and degree = " << degree;
	}
	return query(sql.str().c_str());
}//getUnusedPolynomials


vector<vector<string> > IntegrationDB::getUnusedPolynomials(int dim, int degree, int polytopeID)
{
	stringstream sql;
	sql << "select distinct rowid from polynomial where rowid not in "
			<< " ( select p.rowid from polynomial as p, integrate as i, polytope as t "
			<< "     where i.polytopeID = t.rowid and i.polynomialID = p.rowid " //join
			<< "       and t.rowid = " << polytopeID
			<< "       and p.degree = " << degree
			<< " ) "
			<< " and dim = " << dim << " and degree = " << degree;
	return query(sql.str().c_str());
}//getUnusedPolynomials


/**
 * Given a set dim, degree, vertexCount and dual values, find me all polytopes that are not used in the integrate table.
 */
vector<vector<string> > IntegrationDB::getUnusedPolytopes(int dim, int degree, int vertexCount, bool useDual)
{
	stringstream sql;
	//GOAL: 1) Find all the polytopes that are of a set dim and vertex count (or game from a polytope of a set vertex count if useDual = true).
	//      2) Find all the tests where the polynomials are of a set degree dim, the polytope is a set dim, and vertexCount (or came from a polytope of a set vertexCount if useDual=ture),
	//      3) Return the polytopes that are in the set 1 but NOT set 2.
	if (useDual == false)
	{//for every non-dual polytope of set dim and vertexCount ask the question:
		//Is this polytope already being used in a test with a polynomial of set degree and vertexCount?
		//if not, then return it
		sql << "select distinct rowid from polytope where rowid not in"
			<< " ( select t.rowid from polynomial as p, integrate as i, polytope as t "
			<< "     where i.polytopeID = t.rowid and i.polynomialID = p.rowid " //join
			<< "       and t.dim = " << dim
			<< "       and t.vertexCount = " << vertexCount
			<< "       and p.degree = " << degree
			<< "       and t.dual is  null "
			<< " ) "
			<< " and dim = " << dim << " and vertexCount = " << vertexCount
			<< " and dual is  null";
	}
	else
	{//for every dual polytope of set dim and that came from an org. polytope of set vertexCount ask the question:
		//Is this polytope already being used in a test with a polynomial of set degree and vertexCount?
		//if not, then return it
		sql << "select distinct dualP.rowid from polytope as dualP, polytope as orgP "
			<< " where dualP.rowid not in "
			<< " ( select dualPoly.rowid from polynomial as p, integrate as i, polytope as dualPoly, polytope as orgPoly "
			<< "     where i.polytopeID = dualPoly.rowid and i.polynomialID = p.rowid " //join
			<< "       and dualPoly.dual is not null"
			<< "       and dualPoly.dual = orgPoly.rowid"
			<< "       and orgPoly.dim = " << dim
			<< "       and orgPoly.vertexCount = " << vertexCount
			<< "       and p.degree = " << degree
			<< " ) "
			<< " and dualP.dim = " << dim
			<< " and dualP.dual is not null"
			<< " and dualP.dual = orgP.rowid" //join the orgPoly and dualPoly.
			<< " and orgP.vertexCount = " << vertexCount;
	}
	return query(sql.str().c_str());
}//getUnusedPolytopes


/**
 * No longer used....to delte.
 *
 *	Inserts a polynomial, a polytope, and the dual polytope in the polynomial/polytope tables.
 * Also, inserts two integration tests (one for the org. polytope and one for the dual polytope).
 *
 * @parm polynomialPath:	file name to the latte-style polynomial file
 * @parm dim: dim of both polytopes and the polynomial.
 * @parm degree: of the polynomial
 * @parm (dual)polytopePath: file name of the (dual) polytope
 * @parm (dual)polymakePath: file name of the (dual) polymake file.
 * @parm (dual)vertexCount: number of vertices in the (dual) polytope
 * @parm (dual)simple:	is the (dual) polytope simple? Yes=true=1.
 */
void IntegrationDB::insertEmptyIntegrationTest(
			const char* polynomialPath, int dim, int degree,
			const char* polytopePath, const char* polymakePath, int vertexCount, int simple,
			const char* dualPolytopePath, const char* dualPolymakePath, int dualVertexCount, int dualSimple)
{
	int polynomialID, polytopeID, dualPolytopeID;
	
	//insert the dual polytope first (and get the rowid) and then insert the org. polytope.
	dualPolytopeID = insertPolytope(dim, dualVertexCount, dualSimple, -1            , dualPolytopePath, dualPolymakePath);
	polytopeID     = insertPolytope(dim, vertexCount    , simple    , dualPolytopeID, polytopePath, polymakePath);

	//now insert the polynomial.
	polynomialID   = insertPolynomial(dim, degree, polynomialPath);
	
	//now insert two test cases: org. and dual polytope.
	insertIntegrationTest(polynomialID, polytopeID);
	insertIntegrationTest(polynomialID, dualPolytopeID);
}//insertEmptyIntegrationTest

/**
 *	Inserts 1 empty row in the integrate table.
 *
 * @parm polynomialID: rowid of a polynomial from the polynomial table.
 * @parm polytopeID: rowid of a polytope from the polytope table.
 * @return rowid of the new just-inserted integrate row.
 */
int IntegrationDB::insertIntegrationTest(int polynomialID, int polytopeID)
{
	stringstream sql;
	sql << "insert into integrate (polynomialID, polytopeID, timeLawrence, timeTriangulate, integral) " 
	    << "values (" << polynomialID << ", " << polytopeID << ", " << "-1, -1, 'NA')";
	query(sql.str().c_str());
	return last_insert_rowid();
}

/**
 * Pre conditions: the polynomial and polytope tables have enough rows of a
 * set dim/degree/vertex count to make 'count' many integration tests
 * and dual integration tests.
 *
 * If there is enough, we will then pick unused polytopes and polynomials and
 * insert there combination into the integrate table.
 */
void IntegrationDB::insertIntegrationTest(int dim, int degree, int vertexCount, int count)
{
	//find how many of each we have.
	int numPolynomials          = getNumberPolynomials(dim, degree);
	//cout << "numPolynomials" << numPolynomials << endl;
	int numPolytopes            = getNumberPolytopes(dim, vertexCount, false);
	//cout << "numPolytope" << numPolytopes <<endl;
	int numDualPolytopes        = getNumberPolytopes(dim, vertexCount, true);
	//cout << "numDualPolytope" << numDualPolytopes <<endl;
	int numIntegrationTests     = getNumberIntegrationTest(dim, vertexCount, degree, false);
	//cout << "numIntegrationTests" << numIntegrationTests << endl;
	int numDualIntegrationTests = getNumberIntegrationTest(dim, vertexCount, degree, true);
	//cout << "numDualIntegrationTests" << numDualIntegrationTests << endl;


	//check if we really can make 'count' many integration tests and dual integration tets.
	if (numPolynomials < count)
		throw SqliteDBexception("insertIntegrationTest::Not enough polynomials exist");
	if (numPolynomials < count)
		throw SqliteDBexception("insertIntegrationTest::Not enough polytopes exist");
	if (numDualPolytopes < count)
		throw SqliteDBexception("insertIntegrationTest::not enough dual polytopes exist");

	//ok, now we know we can make count many integration tests! so lets do it.

	if ( count - numIntegrationTests  <= 0)
	{
		cout << "There already exist " << numIntegrationTests << " tests; in fact, there might be more." << endl;
		return;
	}

	if (numIntegrationTests < count)
		makeMoreIntegrationTests(dim, degree, vertexCount, false, count, numIntegrationTests);
	if (numDualIntegrationTests < count)
		makeMoreIntegrationTests(dim, degree, vertexCount, true, count, numIntegrationTests);
}//insertIntegrationTest

void  IntegrationDB::insertSpecficPolytopeIntegrationTest(string polymakeFile, int degree, int count)
{
	stringstream sql;
	sql << "select dim from polytope where polymakeFilePath = '" << polymakeFile << "'";

	int dim = queryAsInteger(sql.str().c_str());
	int numPolynomials = getNumberPolynomials(dim, degree);
	int rowid = doesPolytopeExist(polymakeFile.c_str());
	int numIntegrationTests = getNumberIntegrationTest(rowid, degree);

	if ( rowid <= 0)
		throw SqliteDBexception("insertSpecficPolytopeIntegrationTest::polytope does not exist");
	if (count > numPolynomials)
		throw SqliteDBexception("insertSpecficPolytopeIntegrationTest::Not enough polynomials exist");
	if ( count - numIntegrationTests <= 0)
	{
		cout << "polytope dim: " << dim
			 << "\n number of polynomials: " << numPolynomials
			 << "\n rowid of polytope (" << polymakeFile.c_str() << "): " << rowid
			 << "\n number of current integration tests: " << numIntegrationTests << endl;
		cout << "There already exist " << numIntegrationTests << " tests." << endl;

		return;
	}

	makeMoreIntegrationTests(rowid, dim, degree, count, numIntegrationTests);

}//insertSpecficPolytopeIntegrationTest



/**
 * Inserts 1 row in the polynomial table.
 *
 * @parm dim: dim. of the polynomial (number of variables).
 * @parm degree: of the polynomial
 * @parm filePath: to the latte-style polynomial.
 * @return rowid of the inserted row.
 */
int IntegrationDB::insertPolynomial(int dim, int degree, const char*filePath) throw(SqliteDBexception)
{
	if ( doesPolynomialExist(filePath))
		throw SqliteDBexception(string("insertPolynomial::Polynomial ")+filePath+" already exist");

	stringstream sql;
	sql << "insert into polynomial (dim, degree, filepath) values (" << dim << ", " << degree << ", '" << filePath << "')";
	query(sql.str().c_str());	
	return last_insert_rowid();
}//insertPolynomial


/**
 * Inserts 1 row in the polytope table.
 *
 * @parm dim: dim. of polytope = dime of polynomial.
 * @parm vertexCount: what do you think?
 * @parm simple: true if the polytope is simple
 * @parm dualRowID: if positive, it is a "pointer" to the polytope table for the dual polytope. The dual does not point back to its "parent"
 * @parm latteFilePath: file name of the latte file.
 * @parm polymakeFilePath: polymake file path. could be null.
 */
int IntegrationDB::insertPolytope(int dim, int vertexCount, int simple, int dualRowID, const char* latteFilePath, const char* polymakeFilePath)
{
	stringstream sql;
	sql << "insert into polytope (dim, vertexCount, simple, latteFilePath, polymakeFilePath, dual) values (" 
	    << dim << ", " << vertexCount << ", " << simple  << ", '" << latteFilePath << "', '" << polymakeFilePath
	    << "', ";
	if (dualRowID > 0 )
		sql << dualRowID;
	else
		sql << "NULL";
	sql <<")";
	query(sql.str().c_str());
	return last_insert_rowid();
}//insertPolytope

//no longer used.
//to delete one day....
int IntegrationDB::insertPolytopeAndPickIntegrationTest(int dim, int vertexCount,     int simple    , const char * latteFile    , const char * polymakeFile
												               , int dualVertexCount, int dualSimple, const char * dualLatteFile, const char * dualPolymakeFile)
{
	stringstream sql;
	sql << "select * from polytope where polymakeFilePath = '" << latteFile << "' or polymakeFilePath = '" << dualLatteFile << "'";
	if ( query(sql.str().c_str()).size() )
	{
		throw SqliteDBexception("insertPolytopeAndPickIntegrationTest::Database already contains those polymake files!!!");
	}//if error.

	sql.str("");//clear query string.

	//now find 1 polynomial that have not been used on a polytope of this dim and vertex count.
	sql << "select rowid from polynomial where rowid not in "
			<<"(select p.rowid from polynomial as p, integrate as i, polytope as t "
			<<         "where i.polytopeID = t.rowid and i.polynomialID = p.rowid " // join the tables
			<<         "   and t.dim = " << dim << " and t.vertexCount = " << vertexCount  //restrict to the current dim/vertexcount
			<<") limit 1";
	//get this unused rowid.
	vector<vector<string> > result;
	result = query(sql.str().c_str());
	if ( ! result.size())
		throw SqliteDBexception("insertPolytopeAndPickIntegrationTest::There are not enough unique unused polynomials");
	int polynomialID = atoi(result[0][0].c_str());
	assert(polynomialID > 0);

	//insert two polytopes.
	int polytopeID     = insertPolytope(dim, vertexCount    , simple    , -1        , latteFile    , polymakeFile);
	int dualPolytopeID = insertPolytope(dim, dualVertexCount, dualSimple, polytopeID, dualLatteFile, dualPolymakeFile);

	//insert two tests with the same polynomial.
	insertIntegrationTest(polynomialID, polytopeID);
	insertIntegrationTest(polynomialID, dualPolytopeID);
}//insertPolytopeAndPickIntegrationTest


/**
 * Returns the number of completed test in the integrate table for the specfic test class
 */
int IntegrationDB::testCasesCompleted(AlgorithemUsed alg, int dim, int vertexCount, int degree, bool useDual)
{
	stringstream sql;
	if ( useDual == true)
	{
		sql << "select count(*) from polynomial as p, integrate as i, polytope as dualP, polytope as orgP "
			<< " where i.polynomialID = p.rowid and i.polytopeID = dualP.rowid "
			<< " and dualP.dual is not null and dualP.dual = orgP.rowid "
			<< " and orgP.vertexCount = " << vertexCount
			<< " and orgP.dim = " << dim
			<< " and p.degree = " << degree;
	}
	else
	{
		sql << "select count(*) from polynomial as p, integrate as i, polytope as t "
			<< " where i.polynomialID = p.rowid and i.polytopeID = t.rowid "
			<< " and t.vertexCount = " << vertexCount
			<< " and t.dim = " << dim
			<< " and p.degree = " << degree
			<< " and t.dual is null ";
	}//else not dual polytopes.

	sql << " and " << (alg == Lawrence  ? " i.timeLawrence " : " i.timeTriangulate ") << " >= 0";
	return queryAsInteger(sql.str().c_str());
}//isTestCaseFinished

/**
 *
 * Private function.
 *
 * Get a list of unused polynomials and polytopes, and add them to the integrate table.
 */
void IntegrationDB::makeMoreIntegrationTests(int dim, int degree, int vertexCount, bool useDual, int requestedCount, int existingCount)
{
	int newRows = requestedCount - existingCount;
	//unusedPolynomials and unusedPolytopes has 1 column of rowid's that do not already exist in the integrate table
	//again, unusedPolynomials[i] is a vector with 1 element.
	vector<vector<string> > unusedPolynomials = getUnusedPolynomials(dim, degree, vertexCount, useDual);
	vector<vector<string> > unusedPolytopes   = getUnusedPolytopes(dim, degree, vertexCount, useDual);

	if ( unusedPolynomials.size() < newRows || unusedPolytopes.size() < newRows)
		throw SqliteDBexception("makeMoreIntegrationTests: there are not enough free polynomials or polytopes"); //I think this should never be true...

	for(int i = 0; i < newRows; ++i)
	{
		insertIntegrationTest(atoi(unusedPolynomials[i][0].c_str()), atoi(unusedPolytopes[i][0].c_str()));
	}//for i
}//makeMoreIntegrationTests

/**
 * Private function.
 * get a list of unused polynomials for this set polytope and add them to the integrate table.
 */
void IntegrationDB::makeMoreIntegrationTests(int polytopeID, int dim, int degree, int requestedCount, int existingCount)
{
	int newRows = requestedCount - existingCount;
	vector<vector<string> > unusedPolynomials = getUnusedPolynomials(dim, degree, polytopeID);

	if ( unusedPolynomials.size() < newRows)
		throw SqliteDBexception("makeMoreIntegrationTests: there are not enough free polynomials"); //I think this should never be true...

	for(int i = 0; i < newRows; ++i)
	{
		insertIntegrationTest(atoi(unusedPolynomials[i][0].c_str()), polytopeID);
	}//for i
}//makeMoreIntegrationTests

/**
 * Updates the integral table with time and valuation results.
 *
 * @alg: what column should be updated in the integrate table?
 * @time: time from calling the mainValuationDriver
 * @currentValue: new value for the integral column. Checks to see if this is different from any previous values.
 * @previousValue: current value in the integral column, or the string "NA"
 * @rowid: which row are we updating?
 */
void IntegrationDB::updateIntegrationTimeAndValue(AlgorithemUsed alg, double time, RationalNTL computedValue, string previousValueStr,  string rowid)
{
	stringstream sql;

	sql << "update integrate set ";
	if ( alg == Lawrence)
		sql << " timeLawrence = " << time;
	else //Triangulate};
		sql << " timeTriangulate = " << time;


	if ( previousValueStr == "NA")
	{
		sql << " , integral = ' " << computedValue << "' ";
	}
	else
	{
		RationalNTL previousValue(previousValueStr);
		if ( previousValue != computedValue)
		{
			throw SqliteDBexception(string("updateIntegrationTimeAndValue::The integrals differ")
					+"\n\tpreviousValue: " + previousValueStr
					+"\n\tcomputedValue: " + computedValue.str()
					+"\n\tcurrent sql stm:" + sql.str()
					+"\n\trowid: " + rowid);
		}
	}
	sql << " , flagValue = '" << -1 << "', flagType = '" << LAWRECE_INTEGRATE_VERSION << "'";
	sql << " where rowid = " << rowid << endl;

	//cout << "QUERY:: " << sql.str().c_str() << endl;

	query(sql.str().c_str());

}//updateIntegrationTimeAndValue