File: Makefile.am

package info (click to toggle)
nut 2.8.4%2Breally-2
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 25,720 kB
  • sloc: ansic: 132,030; sh: 17,256; cpp: 12,566; makefile: 5,646; python: 1,114; perl: 856; xml: 47
file content (1487 lines) | stat: -rw-r--r-- 71,074 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
# top-level Makefile for NUT

# Export certain values for ccache which NUT ci_build.sh can customize,
# to facilitate developer iteration re-runs of "make" later.
# At least GNU and BSD make implementations are okay with this syntax.
@NUT_AM_MAKE_CAN_EXPORT@@NUT_AM_EXPORT_CCACHE_NAMESPACE@export CCACHE_NAMESPACE=@CCACHE_NAMESPACE@
@NUT_AM_MAKE_CAN_EXPORT@@NUT_AM_EXPORT_CCACHE_BASEDIR@export CCACHE_BASEDIR=@CCACHE_BASEDIR@
@NUT_AM_MAKE_CAN_EXPORT@@NUT_AM_EXPORT_CCACHE_DIR@export CCACHE_DIR=@CCACHE_DIR@
@NUT_AM_MAKE_CAN_EXPORT@@NUT_AM_EXPORT_CCACHE_PATH@export CCACHE_PATH=@CCACHE_PATH@
@NUT_AM_MAKE_CAN_EXPORT@@NUT_AM_EXPORT_CCACHE_PATH@export PATH=@PATH_DURING_CONFIGURE@

# include directory for aclocal
ACLOCAL_AMFLAGS = -I m4

# Autotools' SUBDIRS (our values are listed below) allow for powerful recursive
# recipe automation, with one notable weakness: the dirs are processed in a
# loop sequentially, even in parallel builds (each such sub-make is parallel
# then). In our case, the HTML/PDF render of ChangeLog can take a minute of
# work in "docs" while we are not building anything in other dirs. On the up
# side, that approach does allow for dirs with dependencies to get built first
# deterministically. For more details search for "am__recursive_targets" in the
# generated Makefile.
#
# The commonly suggested way out of this predicament is to consolidate numerous
# Makefile.am recipes into one, which alone properly defines all the needed
# interdependecies that are "known" to one instance of the `make` process.
# This however loses the ability to quickly e.g. `cd tests && make check`, so
# the next layer is to re-introduce Makefiles in sub-directories that define
# a few popular targets to perform via the one big top-level Makefile.
#
# Our approach here is to merge the two solutions: do use SUBDIRS the way
# autotools handle them for the hordes of `*-recursive` targets for us, but
# define more explicitly the targets for hot code paths (all, check) so that
# they can run first for certain different directories (in parallel if asked
# to) with "knowledge" of dependencies, and then a bit wastefully maybe re-run
# those directories via autotools integration. They should be quick no-ops by
# then in the anticipated common use-cases.
#
# List of source subdirectories to build and distribute, used to spawn automake
# (alas sequential) target recipes. The order matters, as several subdirectories
# depend on stuff in "common" or tools being built first! Also "data" depends
# (during "dist" time) on scripts in "tools".
SUBDIRS = include common clients conf drivers tools data \
  lib scripts server tests docs/man docs

# Note: not generated from SUBDIRS, because not all are recursive:
SUBDIRS_ALL_RECURSIVE = \
	all/include \
	all/common \
	all/clients \
	all/conf \
	all-recursive/data \
	all-drivers \
	all/tools/nut-scanner \
	all/tools/nutconf \
	all-recursive/tools \
	all/lib \
	all-recursive/scripts \
	all/server \
	all/tests/NIT \
	all/tests \
	all/docs \
	all/docs/man \
	all-recursive/docs \
	all-recursive/tests

# Library creation happens in a number of subdirectories, may be optional
# (e.g. C++ ones are not built without a suitable compiler and enablement).
# List maintenance is aided by this query:
#   git grep -E 'LTLIBRARIES' '*.am'
SUBDIRS_ALL_LIBS_LOCAL = \
	all-libs-local/include \
	all-libs-local/common  \
	all-libs-local/clients \
	all-libs-local/drivers \
	all-libs-local/tests   \
	all-libs-local/tools   \
	all-libs-local/tools/nut-scanner

# First target often defines default behavior, and in automake is always at least:
#   all: all-recursive
# with maybe custom dependencies of "all:" from a Makefile.am tacked on too
# (which used to cause us a lot of headache, building same things twice at
# the same time).

#all all-recursive all-am-local all-local: all-fanout-maybe
all-recursive: all-fanout-maybe

# Verbosity for fanout rule tracing; 0/1 (or "default" that may auto-set
# to 0 or 1 in some rules below)
SUBDIR_MAKE_VERBOSE = default

# Run the standard build if going sequential (or with unknown MAKEFLAGS),
# or fanout if parallel (presuming GNU/BSD/Sun make at least):
all-fanout-maybe:
	+@if [ x"$(NUT_MAKE_SKIP_FANOUT)" = xtrue ] ; then \
		if [ x"$(SUBDIR_MAKE_VERBOSE)" != x0 ] ; then \
			echo "  SUBDIR-MAKE	$@: skip optimization for parallel make - NUT_MAKE_SKIP_FANOUT is set" ; \
		fi ; \
		exit 0 ; \
	  fi ; \
	  case "-$(MAKEFLAGS) $(AM_MAKEFLAGS)" in \
		*-j|*-j" "*|*-{j,l}{0,1,2,3,4,5,6,7,8,9}*|*-[jl][0123456789]*|*{-l,--jobs,--load-average,--max-load}" "{-,0,1,2,3,4,5,6,7,8,9}*|*--jobserver*|*--jobs" "[0123456789]*|*--load-average" "[0123456789]*|*--max-load" "[0123456789]*) \
			if [ x"$(SUBDIR_MAKE_VERBOSE)" != x0 ] ; then \
				echo "  SUBDIR-MAKE	$@: implement optimization for parallel make as 'make all-fanout-subdirs'" ; \
			fi ; \
			$(MAKE) $(AM_MAKEFLAGS) all-fanout-subdirs ;; \
		*) \
			if [ x"$(SUBDIR_MAKE_VERBOSE)" != x0 ] ; then \
				echo "  SUBDIR-MAKE	$@: skip optimization for parallel make - we seem to run sequentially now, seen MAKEFLAGS='$(MAKEFLAGS)' AM_MAKEFLAGS='$(AM_MAKEFLAGS)'" ; \
			fi ;; \
	  esac

# We start with a pass to `make all` in `common` dir because our wild recipes
# (with other subdirs ensuring the libraries they need have been built) can
# sometimes cause parallel compilation and library generation for same files
# driven by different make processes that do not know they aim for same goal,
# with some "make" implementations...
# Just in case we followed up with "make doc", since our wild recipes could end
# up writing into same files and so corrupting them (fixes applied, but...)

# FIXME: Alas, we still tend to step on our toes when making everything at
# once from scratch, so still do benefit from pre-making the libraries:
all-fanout-staged:
	+$(MAKE) $(AM_MAKEFLAGS) all/include
	+$(MAKE) $(AM_MAKEFLAGS) all/common
	+$(MAKE) $(AM_MAKEFLAGS) all-fanout-libs
	+$(MAKE) $(AM_MAKEFLAGS) all-fanout-subdirs

all-fanout-subdirs: $(SUBDIRS_ALL_RECURSIVE)

all-fanout-libs all-libs-local: $(SUBDIRS_ALL_LIBS_LOCAL)

#all all-am-local all-local:
#	+@cd common && $(MAKE) $(AM_MAKEFLAGS) all
#	+@$(MAKE) $(AM_MAKEFLAGS) all-recursive
#	+@$(MAKE) $(AM_MAKEFLAGS) doc
#	+@$(MAKE) $(AM_MAKEFLAGS) doc

bindir = @bindir@
sbindir = @sbindir@
driverexecdir = @driverexecdir@
cgiexecdir = @cgiexecdir@

# Automatically update the libtool script if it becomes out-of-date
# See https://www.gnu.org/software/libtool/manual/html_node/LT_005fINIT.html
LIBTOOL_DEPS = @LIBTOOL_DEPS@
libtool: $(LIBTOOL_DEPS)
	$(SHELL) ./config.status libtool

# COPYING and other autotools-standard files are included automatically
# by automake. Note that the INSTALL file is (re-)imposed by autotools
# runs and is essentially a manual on configure script general usage, so
# NUT's actual installation notes have had to use a different filename.
EXTRA_DIST = LICENSE-GPL2 LICENSE-GPL3 LICENSE-DCO MAINTAINERS

# Since the renaming of documentation to `*.adoc` extension to help IDE
# and GitHub UIs to render the source files in a pretty fashion, we need
# to list them:
EXTRA_DIST += INSTALL.nut.adoc UPGRADING.adoc TODO.adoc NEWS.adoc README.adoc

# The document is now part of qa-guide; the script might be a bit git-oriented,
# but can be useful in builds from tarball, probably. Anyhow, having the doc
# without the tool which it documents is odd.
EXTRA_DIST += ci_build.sh ci_build.adoc

# Tarballs created by `make dist` include the `configure.ac` and `m4/*` sources
# but lack NUT magic logic to recreate the `configure` script if someone would
# want to adapt it to their autotools or locally fix a tarball-based build.
EXTRA_DIST += autogen.sh

if KEEP_NUT_REPORT
nodist_data_DATA = config.nut_report_feature.log
endif KEEP_NUT_REPORT

# Not too different from automake generated recursive rules at first sight,
# but here we do not loop all subdirs sequentially - instead, a sub-make
# (maybe parallel itself and with parallel flags passed) with a certain
# target in specified dir is the goal, all as separate targets for this
# level's Makefile:
SUBDIR_TGT_RULE = ( \
	[ x"$${TGT-}" != x ] || TGT="`echo '$@' | awk -F/ '{print $$1}'`" ; \
	[ x"$${DIR-}" != x ] || DIR="`echo '$@' | sed 's,^[^/]*/,,'`" ; \
	if [ x"$(SUBDIR_MAKE_VERBOSE)" != x0 ] ; then \
		echo "  SUBDIR-MAKE	STARTING: 'make $$TGT' in $$DIR ..." ; \
	fi ; \
	cd "$(abs_builddir)/$${DIR}" && \
	$(MAKE) $(AM_MAKEFLAGS) $${SUBDIR_TGT_MAKEFLAGS-} "$${TGT}" || { RES=$$?; echo "  SUBDIR-MAKE	FAILURE: 'make $$TGT' in $$DIR" >&2 ; exit $$RES ; } ; \
	if [ x"$(SUBDIR_MAKE_VERBOSE)" != x0 ] ; then \
		echo "  SUBDIR-MAKE	SUCCESS: 'make $$TGT' in $$DIR" ; \
	fi ; \
	)

# A way to quickly handle SUBDIRS_ALL_LIBS_LOCAL as dependency for all others
# (aka `make all-libs-local` also in root dir). Libs themselves have complex
# inter-dependencies which we do not spell out here and let one recipe handle
# the intimate details of other directories' deliverables (so far?). Query:
#   git grep -E '(LTLIBRARIES|\.la([ :'"`printf '\t'`"']|$))' '*.am'

### Delivers: nut_version.h
all-libs-local/include:
	+@$(SUBDIR_TGT_RULE)

### Delivers: libcommon.la libcommonclient.la libcommonstr.la
###           (consume only one of these at a time!)
### Delivers: libcommonversion.la (only version methods)
### Delivers: libparseconf.la libnutconf.la libnutwincompat.la
### Requires-ext: include/nut_version.h
### Requires-int: libparseconf.la libcommonclient.la
all-libs-local/common: all-libs-local/include
	+@$(SUBDIR_TGT_RULE)

### Delivers: libupsclient.la libnutclient.la libnutclientstub.la
### Delivers: libupsclient-version.h
### LIB-Requires-ext: common/libcommonclient.la
### Requires-ext: common/libcommon.la common/libcommonclient.la
### Requires-ext: common/libcommonversion.la
### Requires-ext: common/libparseconf.la
### Requires-int: libupsclient.la
all-libs-local/clients: all-libs-local/common
	+@$(SUBDIR_TGT_RULE)

### Delivers: libdummy.la libdummy_serial.la libdummy_upsdrvquery.la
### Delivers: libdummy_mockdrv.la libserial-nutscan.la
### LIB-Requires-ext: common/libcommon.la common/libparseconf.la
### Requires-ext: common/libcommon.la common/libparseconf.la
### Requires-ext: clients/libupsclient.la (dummy-ups only)
### Requires-int: libdummy.la libdummy_upsdrvquery.la
### Requires-int: libdummy_serial.la
all-libs-local/drivers: all-libs-local/common
	+@$(SUBDIR_TGT_RULE)

### Delivers: libdriverstubusb.la
### LIB-Requires-ext: #COMMENTED-AWAY# common/libcommon.la
### Requires-ext: common/libcommon.la common/libnutconf.la
### Requires-ext: clients/libnutclient.la clients/libnutclientstub.la
### Requires-ext: drivers/libdummy_mockdrv.la
### Requires-int: libdriverstubusb.la
all-libs-local/tests: all-libs-local/common
	+@$(SUBDIR_TGT_RULE)

### Delivers: generated sources and/or headers for nut-scanner
### No dependencies: actually runs as part of autogen.sh but may be
### re-run during development when USB or SNMP driver sources change.
all-libs-local/tools:
	+@$(SUBDIR_TGT_RULE)

### Delivers: libnutscan.la
### LIB-Requires-ext: drivers/libserial-nutscan.la
### LIB-Requires-ext: common/libnutwincompat.la common/libcommonstr.la
### LIB-Requires-ext: common/libcommonversion.la
### HDR-Requires-ext: clients/libupsclient-version.h
### HDR-Requires-ext: nut-scanner/nutscan-snmp.h nut-scanner/nutscan-usb.h
###       (generated by nut-scanner-deps/tools aliased as all-libs-local/tools)
### Requires-int: libnutscan.la
### Note: indirectly (ltdl) may use installed libupsclient.so
###       however does directly use libupsclient-version.h
###       for hints to find it at run-time
all-libs-local/tools/nut-scanner: all-libs-local/drivers all-libs-local/common all-libs-local/clients all-libs-local/tools
	+@$(SUBDIR_TGT_RULE)

# Handle all SUBDIRS_ALL_RECURSIVE in a way that dependencies can be specified,
# and portably to different make program implementations. Note we may revisit
# some dirs via "all-recursive" of a parent after "all" in them first, but it is
# expected to be a quick no-op (beneficial overall in parallel make situation).
# NOTE: "lib" dir only delivers pkg-config metadata or legacy scripts for any
# third-party development to integrate with NUT libs, no library recipes there.

all/conf \
all/lib \
.ChangeLog.adoc-parsed.latest/docs \
ChangeLog.adoc-parsed/docs \
all-recursive/data:
	+@$(SUBDIR_TGT_RULE)

all/include: all-libs-local/include
	+@$(SUBDIR_TGT_RULE)

prep-src-docs/docs/man:
	+@SUBDIR_TGT_MAKEFLAGS='MAINTAINER_DOCS_PREP_MAN_DELAY=3'; export SUBDIR_TGT_MAKEFLAGS; $(SUBDIR_TGT_RULE)

prep-src-docs/docs:
	+@DOCS_NO_MAN=true; export DOCS_NO_MAN; $(SUBDIR_TGT_RULE)

all/docs/man: prep-src-docs/docs/man
	+@$(SUBDIR_TGT_RULE)

# Note: we optionally sort of depend on ChangeLog.adoc so it is pre-made and
# pre-processed for html/pdf renders (if any are requested), so they surely
# do not compete for it to be made by independent "make" processes later on.
# BUT we do not want to (re-)build ChangeLog if no (relevant) DOC_BUILD_LIST
# types are enabled.
MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY = 0
all/docs: prep-src-docs/docs/man
	+@case "@DOC_BUILD_LIST@" in \
		*pdf*|*html-single*|*html-chunked*) \
			echo "  DOC-CHANGELOG-ASCIIDOC	Pre-generate ChangeLog artifacts before the bulk of $@ ..." ; \
			MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY="$(MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY)" \
			export MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY && \
			$(MAKE) $(AM_MAKEFLAGS) MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY="$(MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY)" ChangeLog.adoc && \
			$(MAKE) $(AM_MAKEFLAGS) MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY="$(MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY)" .ChangeLog.adoc-parsed.latest/docs && \
			echo "  DOC-CHANGELOG-ASCIIDOC	Pre-generate ChangeLog artifacts before the bulk of $@ : SUCCESS" ;; \
		*) ;; \
	  esac
	+@$(MAKE) $(AM_MAKEFLAGS) prep-src-docs/docs
	+@DOCS_NO_MAN=true; export DOCS_NO_MAN; $(SUBDIR_TGT_RULE)

all-recursive/docs: all/docs all/docs/man
	+@$(SUBDIR_TGT_RULE)

# Dependencies below are dictated by who needs whose library from another dir
# (generated by a sub-make there, so we pre-emptively ensure it exists to avoid
# conflicts of several make's writing to same files). Aided by this query:
#   git grep -E '/[^ ]*\.la([ :]|$)' '*.am'
# It does help to spell out all dependencies, even if transitive, to ensure
# that the top-level make completes needed (all-libs*) targets before drilling.

### Requires-int: libparseconf.la libcommonclient.la
all/common: all/include all-libs-local/common
	+@$(SUBDIR_TGT_RULE)

### Requires-ext: common/libcommon.la common/libcommonclient.la
### Requires-ext: common/libparseconf.la
### Requires-ext: common/libcommonversion.la
### Requires-int: libupsclient.la
all/clients: all/common all-libs-local/clients
	+@$(SUBDIR_TGT_RULE)

### Summary of drivers/ subdir dependencies:
### Requires-ext: common/libcommon.la common/libparseconf.la
### Requires-ext: common/libcommonversion.la
### Requires-ext: clients/libupsclient.la (dummy-ups only)
### Requires-int: libdummy.la libdummy_upsdrvquery.la
### Requires-int: libdummy_serial.la

# TODO in the future: propagate the knowledge of whether we are building
# dummy-ups by default (if only SOME_DRIVERS are requested) from configure.ac,
# and so decide if a goal to build it would conflict or not with "all/drivers"
# (or alternately if it should be or not be part of "all-drivers", keeping the
# current web of definitions in place). Primarily for the benefit of tests/NIT.

# NOTE: The dummy-ups driver program relies on both libupsclient (ext) and
# libdummy_upsdrvquery.la (int) - and so requires all-libs-local/drivers too.
if SOME_DRIVERS
# Here we do wholesale subdir all-libs-local at the moment, to
# build whichever drivers are enabled (no idea if dummy-ups is
# in the default list - FIXME: configure.ac could tell us, so
# we could provide it for tests/NIT anyway...)
all/drivers: all-libs-local/clients all-libs-local/common all-libs-local/drivers
	+@$(SUBDIR_TGT_RULE)

all-drivers: all/drivers
else !SOME_DRIVERS
# We build all drivers, let dummy-ups be built with respect for
# libupsclient while not blocking other driver builds on that.
# NUTSW_DRIVERLIST_DUMMY_UPS acts as an equivalent of DOCS_NO_MAN
# in a way, to let other (non dummy-ups) drivers get built in a
# separate target with separate dependency trail, then "all-drivers"
# should depend on both "dummy-ups" and the rest in so constrained
# "all/drivers". This allows to ultimately not order one after another.
dummy-ups$(EXEEXT)/drivers: all-libs-local/clients all-libs-local/common all-libs-local/drivers
	+@$(SUBDIR_TGT_RULE)

all/drivers: all/common all-libs-local/drivers
	+@SUBDIR_TGT_MAKEFLAGS='NUTSW_DRIVERLIST_DUMMY_UPS=dummy'; export SUBDIR_TGT_MAKEFLAGS; $(SUBDIR_TGT_RULE)

all-drivers: dummy-ups$(EXEEXT)/drivers all/drivers
endif !SOME_DRIVERS

### Requires-ext: common/libcommon.la common/libparseconf.la
### Requires-ext: common/libcommonversion.la
all/server: all-libs-local/common
	+@$(SUBDIR_TGT_RULE)

### LIB-Requires-ext: drivers/libserial-nutscan.la
### LIB-Requires-ext: common/libnutwincompat.la common/libcommonstr.la
### LIB-Requires-ext: common/libcommonversion.la
### Requires-ext: clients/libupsclient-version.h
### Requires-int: libnutscan.la
all/tools/nut-scanner: all-libs-local/include all-libs-local/common \
	all-libs-local/drivers all-libs-local/clients \
	all-libs-local/tools/nut-scanner
	+@$(SUBDIR_TGT_RULE)

# only libnutscan is needed for nutconf,
# but we do wholesale subdir all-libs-local at the moment...
### Requires-ext: common/libcommon.la common/libnutconf.la
### Requires-ext: common/libcommonversion.la
### Requires-ext: tools/nut-scanner/libnutscan.la
all/tools/nutconf: all-libs-local/tools/nut-scanner all-libs-local/common
	+@$(SUBDIR_TGT_RULE)

all-recursive/tools: all/tools/nutconf all/tools/nut-scanner
	+@$(SUBDIR_TGT_RULE)

# Prereqs for NIT are runnable upsd, upsc, upsmon, dummy-ups, sample configs...
# For the actual "make check-NIT" runs - also python scripts and/or compiled
# tests/cppnit (if available).
# FIXME: technically of all drivers we need dummy-ups here;
# if it is not enabled among SOME_DRIVERS, things can get funny...
# But then we should also consider what is enabled by configure and what is not.
# Maybe we are doing a quick build not to be tested at all? :-/
all/tests/NIT: all/clients all/server all-drivers all-recursive/tools all-recursive/data
	+@$(SUBDIR_TGT_RULE)

### LIB-Requires-ext: #COMMENTED-AWAY# common/libcommon.la
### Requires-ext: common/libcommon.la common/libnutconf.la
### Requires-ext: clients/libnutclient.la clients/libnutclientstub.la
### Requires-ext: drivers/libdummy_mockdrv.la
### Requires-int: libdriverstubusb.la
all/tests: all-libs-local/tests all-libs-local/drivers all-libs-local/common all-libs-local/clients
	+@$(SUBDIR_TGT_RULE)

all-recursive/tests: all/tests/NIT all/tests
	+@$(SUBDIR_TGT_RULE)

if HAVE_MINGW_RESGEN
if HAVE_WINDOWS
### Requires-ext: common/libcommon.la
### Requires-ext: common/libcommonversion.la
all/scripts/Windows: all-libs-local/common
	+@$(SUBDIR_TGT_RULE)
else !HAVE_WINDOWS
all/scripts/Windows:
	+@$(SUBDIR_TGT_RULE)
endif !HAVE_WINDOWS
else !HAVE_MINGW_RESGEN
all/scripts/Windows:
	+@$(SUBDIR_TGT_RULE)
endif !HAVE_MINGW_RESGEN

all-recursive/scripts: all/scripts/Windows
	+@$(SUBDIR_TGT_RULE)

# ----------------------------------------------------------------------
# flags to pass to ./configure when calling "make distcheck" and "make
# distcheck-light". Try to check as many features as possible! Also
# need to give augeas-lenses-dir, hotplug-dir and udev-dir, and request
# PyNUT to be installed near the NUT-Monitor app (if feasible) so that
# staged install does not fail. Note that by default PyNUT tries to go
# into the system Python site-packages location, and autotools does not
# tweak paths not using ${prefix} so `make distcheck` fails for it as
# it does not play with a `DESTDIR` either.

DISTCHECK_FLAGS = --with-all --with-ssl --with-doc=auto --enable-docs-man-for-progs-built-only=no --with-pynut=app --with-nut_monitor=force CXXFLAGS='@NUT_CONFIG_CXXFLAGS@' CFLAGS='@NUT_CONFIG_CFLAGS@' CPPFLAGS='@NUT_CONFIG_CPPFLAGS@' LDFLAGS='@NUT_CONFIG_LDFLAGS@'
DISTCHECK_LIGHT_FLAGS = --with-all=auto --with-ssl=auto --with-doc=auto --enable-docs-man-for-progs-built-only=no --with-pynut=app --with-nut_monitor=force CXXFLAGS='@NUT_CONFIG_CXXFLAGS@' CFLAGS='@NUT_CONFIG_CFLAGS@' CPPFLAGS='@NUT_CONFIG_CPPFLAGS@' LDFLAGS='@NUT_CONFIG_LDFLAGS@'
DISTCHECK_LIGHT_MAN_FLAGS = --with-all=auto --with-ssl=auto --with-doc=man --enable-docs-man-for-progs-built-only=no --with-pynut=app --with-nut_monitor=force CXXFLAGS='@NUT_CONFIG_CXXFLAGS@' CFLAGS='@NUT_CONFIG_CFLAGS@' CPPFLAGS='@NUT_CONFIG_CPPFLAGS@' LDFLAGS='@NUT_CONFIG_LDFLAGS@'
DISTCHECK_VALGRIND_FLAGS = --with-all=auto --with-ssl=auto --with-doc=skip --with-valgrind CXXFLAGS='@NUT_CONFIG_CXXFLAGS@ -g' CFLAGS='@NUT_CONFIG_CFLAGS@ -g' CPPFLAGS='@NUT_CONFIG_CPPFLAGS@' LDFLAGS='@NUT_CONFIG_LDFLAGS@' --with-pynut=app --with-nut_monitor=force

# Note: this rule uses envvar DISTCHECK_FLAGS expanded at run-time
DISTCHECK_CONFIGURE_FLAGS = ${DISTCHECK_FLAGS}		\
 PKG_CONFIG_PATH='@PKG_CONFIG_PATH@'			\
 --with-systemdsystemunitdir='$${prefix}/lib/systemd/system' \
 --with-systemdsystempresetdir='$${prefix}/usr/lib/systemd/system-preset' \
 --with-systemdshutdowndir='$${prefix}/lib/systemd/system-shutdown' \
 --with-systemdtmpfilesdir='$${prefix}/usr/lib/tmpfiles.d' \
 --with-augeas-lenses-dir='$${prefix}/usr/share/augeas/lenses'		\
 --with-hotplug-dir='$${prefix}/etc/hotplug'		\
 --with-udev-dir='$${prefix}/etc/udev'			\
 --with-devd-dir='$${prefix}/etc/devd'			\
 --with-pynut=app --with-nut_monitor=force

# Note: trickery with prefix below is needed to expand it from
# DISTCHECK_CONFIGURE_FLAGS defaults defined above in a manner
# that is meaningful for sub-make program (gets stripped away
# otherwise and breaks custom distchecks).

# Helper for CI runs: ensure presence of pre-built man pages
# (even if faked where lack and can not build them), so that
# the majority of distcheck logic can pass.
# See also docs/man/Makefile.am
if KNOWN_UNABLE_MANS
if DOC_INSTALL_DISTED_MANS
distcheck-ci: distcheck
dist-ci: dist
else !DOC_INSTALL_DISTED_MANS
distcheck-ci: distcheck-fake-man
dist-ci: dist-fake-man
endif !DOC_INSTALL_DISTED_MANS
else !KNOWN_UNABLE_MANS
if WITH_MANS
distcheck-ci: distcheck
dist-ci: dist
else !WITH_MANS
if HAVE_ASCIIDOC
distcheck-ci: distcheck
dist-ci: dist
else !HAVE_ASCIIDOC
if DOC_INSTALL_DISTED_MANS
distcheck-ci: distcheck
dist-ci: dist
else !DOC_INSTALL_DISTED_MANS
distcheck-ci: distcheck-fake-man
dist-ci: dist-fake-man
endif !DOC_INSTALL_DISTED_MANS
endif !HAVE_ASCIIDOC
endif !WITH_MANS
endif !KNOWN_UNABLE_MANS

# Helper for a number of recipes below that explicitly agree to not require
# always real man pages (but require them to dist => distcheck-something)
# for CI or developer iterations on environments with incomplete tool kits:
distcheck-light-DIST_ALL_PAGES:
	@echo "Starting $@" >&2
	+@cd "$(abs_builddir)/docs/man" && $(MAKE) $(AM_MAKEFLAGS) all
	+@cd "$(abs_builddir)/docs/man" && $(MAKE) $(AM_MAKEFLAGS) distcheck-light-DIST_ALL_PAGES
	@echo "Completed $@: preparation of pre-built man pages for dist tarball, possibly faked" >&2

# In some recipes we `configure --with-docs=skip`, so "make dist" should not
# hiccup on lack of the page files (nor try to make them); not using simple
# distcheck-light-DIST_ALL_PAGES step due to custom logic!
distcheck-light-DIST_ALL_PAGES-docs-skipped:
	@echo "Starting $@" >&2
	+@cd $(abs_builddir)/docs/man && $(MAKE) $(AM_MAKEFLAGS) prep-src-docs
	+@cd $(abs_builddir)/docs/man && $(MAKE) $(AM_MAKEFLAGS) FAKE_PAGES_BUMP_SRC=false distcheck-light-DIST_ALL_PAGES
	@echo "Completed $@: preparation of pre-built man pages for dist tarball, possibly faked" >&2

# Here we generate man pages (if absent) or fake them
# Require other dependencies as usual distcheck does;
# be sure to pass through caller's DISTCHECK_FLAGS (if any)
distcheck-fake-man: distcheck-light-DIST_ALL_PAGES
	@echo "Starting $@" >&2
	+prefix='$${prefix}'; if test x"$(DISTCHECK_FLAGS)" = x ; then \
		$(MAKE) $(AM_MAKEFLAGS) distcheck ; \
	 else \
		$(MAKE) $(AM_MAKEFLAGS) DISTCHECK_FLAGS="$(DISTCHECK_FLAGS)" distcheck ; \
	 fi
	@echo "Completed $@: strict distcheck, but with possibly faked pre-built man pages" >&2

dist-fake-man: distcheck-light-DIST_ALL_PAGES
	@echo "Starting $@" >&2
	+prefix='$${prefix}'; $(MAKE) $(AM_MAKEFLAGS) dist
	@echo "Completed $@: strict dist, but with possibly faked pre-built man pages" >&2

# Here we allow to skip docs if tools are absent, so "make dist"
# should not hiccup on lack of the page files (but MAY make them
# if it can); be relaxed toward other dependencies.
distcheck-light: distcheck-light-DIST_ALL_PAGES
	@echo "Starting $@" >&2
	+prefix='$${prefix}'; $(MAKE) $(AM_MAKEFLAGS) DISTCHECK_FLAGS="$(DISTCHECK_LIGHT_FLAGS)" distcheck
	@echo "Completed $@: relaxed distcheck, with possibly faked pre-built man pages" >&2

# Require man pages to be built (or fail trying), but not other docs;
# be relaxed toward other dependencies.
distcheck-light-man:
	@echo "Starting $@" >&2
	+prefix='$${prefix}'; $(MAKE) $(AM_MAKEFLAGS) DISTCHECK_FLAGS="$(DISTCHECK_LIGHT_MAN_FLAGS)" distcheck
	@echo "Completed $@: relaxed distcheck, with real man pages" >&2

if HAVE_VALGRIND
# Make the check in current build, if possible
memcheck:
	@echo "Starting $@" >&2
	@echo "See also scripts/valgrind in NUT sources for a helper tool"
	+@cd $(builddir)/tests && $(MAKE) $(AM_MAKEFLAGS) -s $@

# Make a distcheck (and check in particular) with enabled valgrind and debug info
# Here we skip docs so "make dist" should not hiccup on lack of the page files
# (nor try to make them); not using simple distcheck-light-DIST_ALL_PAGES step
# due to custom logic!
distcheck-valgrind: distcheck-light-DIST_ALL_PAGES-docs-skipped
	@echo "Starting $@" >&2
	@echo "See also scripts/valgrind in NUT sources for a helper tool"
	+prefix='$${prefix}'; $(MAKE) $(AM_MAKEFLAGS) DISTCHECK_FLAGS="$(DISTCHECK_VALGRIND_FLAGS)" distcheck
	@echo "Completed $@: relaxed distcheck, without man pages ('pre-built' placeholders in dist archive), running tests under valgrind" >&2
else !HAVE_VALGRIND
memcheck distcheck-valgrind:
	@echo "Starting $@" >&2
	@echo "See also scripts/valgrind in NUT sources for a helper tool"
	@echo "  SKIP	$@ : valgrind was not detected on this system by configure script" >&2
endif !HAVE_VALGRIND

# workaround the dist generated files that are also part of the distribution
# Note that distcleancheck is disabled for now, while waiting for a proper
# solution, that do not break older unix systems
#distcleancheck_listfiles = \
#	find . -type f -exec sh -c 'test -f $(srcdir)/{} || echo {}' ';'
distcleancheck:
	@:

# Quick alias for root dir recipe:
realclean: maintainer-clean

# Files made by our targets:
CLEANFILES = *-spellchecked *.adoc-parsed cppcheck*.xml config.log.inplace-outer
DISTCLEANFILES = ChangeLog

# Most of the files generated by custom rules in the configure script
# or by autogen.sh are cleaned by the Makefile.am in their directories.
# Files below are re-created by running `configure` script and may be
# wiped by a `make distclean`:
DISTCLEANFILES += config.log configure~
#???# configure.ac~
DISTCLEANFILES += include/config.h.in~

# Files made by autotools and common rituals of the configure script,
# these are needed to run the configure script itself so are not wiped
# by a mere `make distclean`; most of these are copied by autotools
# from their installation, or made by `automake` etc. on the system
# which generates `configure`; rebuilding NUT after deleting these
# requires `autogen.sh` script to be re-run (and tools available):
MAINTAINERCLEANFILES = INSTALL
MAINTAINERCLEANFILES += aclocal.m4 config.guess config.sub
MAINTAINERCLEANFILES += configure
MAINTAINERCLEANFILES += depcomp install-sh ltmain.sh test-driver ar-lib
MAINTAINERCLEANFILES += m4/libtool.m4 m4/ltoptions.m4 m4/ltsugar.m4 m4/ltversion.m4 m4/lt~obsolete.m4
MAINTAINERCLEANFILES += Makefile.in .dirstamp include/config.h.in

# Executed after default rules
maintainer-clean-local:
	$(AM_V_at)rm -f missing || true

# Do not let $SUBDIRS/Makefile rules delete their local .deps because
# this breaks our ability to clean up (e.g. some common/.../*.Plo files
# are included by generated Makefiles from other subdirectories, so they
# should be available during their clean-up). Just in case, we make sure
# here that their sub-distcleans complete first.
distclean-local:
	+@for DIR in $(SUBDIRS) ; do \
		if test -f "$${DIR}/Makefile" ; then \
			echo "  DISTCLEAN	in $${DIR}" >&2 ; \
			( cd "$${DIR}" && $(MAKE) $(AM_MAKEFLAGS) -s distclean ) || exit ; \
		fi ; \
	 done
	$(AM_V_at)rm -rf .inst tmp autom4te.cache
	$(AM_V_at)find "$(builddir)" -type d -name '.deps' | while read DIR ; do rm -rf "$${DIR}" ; done

# Hook the documentation building and validating recipes
# Note: these are optionally available (as determined during configure runs)
# Only require SPELLCHECK_REPORT_MAYBE_UPDATED_DICT=yes for the last entry
# (reduce noise for spellcheck-interactive)
# Maint: grep -l 'SPELLCHECK_' `git grep -lw spellcheck '*.am'`
SPELLCHECK_DIRS_MOST = \
	spellcheck/docs \
	spellcheck/docs/man \
	spellcheck/conf \
	spellcheck/data \
	spellcheck/data/html \
	spellcheck/scripts \
	spellcheck/scripts/Solaris \
	spellcheck/scripts/Windows \
	spellcheck/scripts/devd \
	spellcheck/scripts/external_apis \
	spellcheck/scripts/hotplug \
	spellcheck/scripts/installer \
	spellcheck/scripts/python \
	spellcheck/scripts/systemd \
	spellcheck/scripts/udev \
	spellcheck/scripts/upsdrvsvcctl

# Same but with an info notice, so runs alone last
SPELLCHECK_DIRS_LAST = spellcheck/tests/NIT

SPELLCHECK_DIRS = $(SPELLCHECK_DIRS_MOST) $(SPELLCHECK_DIRS_LAST)

$(SPELLCHECK_DIRS_MOST): prep-src-docs/docs/man prep-src-docs/docs
	+@TGT="$(SPELLCHECK_TGT)"; export TGT; $(SUBDIR_TGT_RULE)

$(SPELLCHECK_DIRS_LAST): prep-src-docs/docs/man prep-src-docs/docs $(SPELLCHECK_DIRS_MOST)
	+@SUBDIR_TGT_MAKEFLAGS="SPELLCHECK_REPORT_MAYBE_UPDATED_DICT=yes"; \
	  export SUBDIR_TGT_MAKEFLAGS; \
	  TGT="$(SPELLCHECK_TGT)"; export TGT; \
	  $(SUBDIR_TGT_RULE)

# We want to check all files even if some have errors, so sub-make with "-k":
# Tricky TGT to pass sort-of-same rules (and dir list) as spellcheck,
# but using the correct make target for this goal:
# FIXME: fanned-out recipes tend to fail early despite "make -ks", so for
#  now we retry with a not-fanned-out attempt to cover most touch-files
spellcheck spellcheck-interactive:
	+@SUBDIR_TGT_MAKEFLAGS="$${SUBDIR_TGT_MAKEFLAGS-} -k -s " ; export SUBDIR_TGT_MAKEFLAGS ; \
	  if [ x"$(NUT_MAKE_SKIP_FANOUT)" = xtrue ] ; then \
		RES=0 ; \
		if [ x"$(SUBDIR_MAKE_VERBOSE)" != x0 ] ; then \
			echo "  SUBDIR-MAKE	$@: skip optimization for parallel make - NUT_MAKE_SKIP_FANOUT is set" ; \
		fi ; \
		(cd $(builddir)/docs && $(MAKE) $(AM_MAKEFLAGS) -k -s $(abs_top_builddir)/docs/.prep-src-docs) || RES=$$? ; \
		(cd $(builddir)/docs/man && $(MAKE) $(AM_MAKEFLAGS) -k -s $(abs_top_builddir)/docs/man/.prep-src-docs) || RES=$$? ; \
		for D in $(SPELLCHECK_DIRS_MOST) ; do \
			D="`echo "$$D" | sed 's,^spellcheck/,,'`" ; \
			(cd "$(builddir)/$$D" && $(MAKE) $(AM_MAKEFLAGS) -k -s $@) || RES=$$? ; \
		done ; \
		for D in $(SPELLCHECK_DIRS_LAST) ; do \
			D="`echo "$$D" | sed 's,^spellcheck/,,'`" ; \
			(cd "$(builddir)/$$D" && $(MAKE) $(AM_MAKEFLAGS) SPELLCHECK_REPORT_MAYBE_UPDATED_DICT=yes -k -s $@) || RES=$$? ; \
		done ; \
		exit $$RES ; \
	  fi ; \
	  SUBDIR_MAKE_VERBOSE="$(SUBDIR_MAKE_VERBOSE)" ; \
	  if [ x"$(SUBDIR_MAKE_VERBOSE)" = xdefault ] ; then \
		SUBDIR_MAKE_VERBOSE=0 ; \
	  fi ; \
	  export SUBDIR_MAKE_VERBOSE ; \
	  $(MAKE) $(AM_MAKEFLAGS) SPELLCHECK_TGT='$@' SUBDIR_MAKE_VERBOSE="$${SUBDIR_MAKE_VERBOSE}" -k -s $(SPELLCHECK_DIRS) && exit ; \
	  echo "WARNING: FAILED fanned-out attempt in $@, retrying with NUT_MAKE_SKIP_FANOUT" >&2 ; \
	  $(MAKE) $(AM_MAKEFLAGS) NUT_MAKE_SKIP_FANOUT=true SPELLCHECK_TGT='$@' -k -s $(SPELLCHECK_DIRS)

# Auto-parallel recipe (if current 'make' implementation supports the "-j N"
# syntax; the optional MAXPARMAKES may be set in NUT CI farm style builds):
SET_PARMAKES_OPT = \
	+@PARMAKES_OPT=""; \
	  case " $(MAKEFLAGS) $(AM_MAKEFLAGS)" in \
		*"j"*) ;; \
		*) \
			if ! [ "$${MAXPARMAKES-}" -gt 1 ] 2>/dev/null ; then \
				MAXPARMAKES=8 ; \
			fi ; \
			PARMAKES_OPT="-j $${MAXPARMAKES}" ; \
		;; \
	  esac

spellcheck-quick:
	+@$(SET_PARMAKES_OPT); \
	  $(MAKE) $(AM_MAKEFLAGS) -k -s ${PARMAKES_OPT} spellcheck

# Run auto-parallel recipe, and if something fails - re-run interactively:
spellcheck-interactive-quick:
	+@$(MAKE) $(AM_MAKEFLAGS) -k -s spellcheck-quick && exit ; \
	  echo "WARNING: in $@: make spellcheck-quick failed, retrying with spellcheck-interactive" >&2 ; \
	  if [ x"$(SUBDIR_MAKE_VERBOSE)" = xdefault ] ; then \
		SUBDIR_MAKE_VERBOSE=1 ; export SUBDIR_MAKE_VERBOSE ; \
	  fi ; \
	  $(MAKE) $(AM_MAKEFLAGS) -k -s spellcheck-interactive

# Note: the "all-docs" and "check-docs" targets may require tools not
# found by `configure` script (and so avoided by conventional recipes)
# such as PDF generators, so it should only be called at developer's
# discretion, choice and risk. The "check-man" targets covers source
# texts, man pages and HTML rendering of man pages, as enabled by tools.
doc spellcheck-sortdict spellcheck-report-dict-usage \
all-docs check-docs \
man all-man man-man check-man check-man-man html-man all-html:
	+cd $(abs_top_builddir)/docs && $(MAKE) $(AM_MAKEFLAGS) -s $(abs_top_builddir)/docs/.prep-src-docs
	+cd $(abs_top_builddir)/docs/man && $(MAKE) $(AM_MAKEFLAGS) -s $(abs_top_builddir)/docs/man/.prep-src-docs
	+cd $(abs_top_builddir)/docs && $(MAKE) $(AM_MAKEFLAGS) $@

INSTALL.nut UPGRADING NEWS README:
	+cd $(abs_top_builddir)/docs && $(MAKE) $(AM_MAKEFLAGS) ../$(@F).adoc-parsed && cp -f ../$(@F).adoc-parsed ../$(@F)

# Workarounds for https://github.com/github/markup/issues/1095
# require direct definition of our attributes in each source
# document, in order for GitHub Web-UI to render them nicely
# (unfortunately, asciidoc configs and includes are not handled
# at this time). Hopefully this will go away at some point.
# The following rule updates definitions in source asciidoc files
# between GH_MARKUP_1095_INCLUDE_BEGIN/END tags with contents of
# current docs/asciidoc-vars.conf file. It is intended to be used
# by maintainers (or brave contributors who would dare edit those
# definitions), to apply them into the committed document sources.
# Not bothering about with "make dist" constraints etc. - changes
# the contents of srcdir directly and intentionally.
# NOTE: Using `read -r` per POSIX standard to avoid backslashes
# being treated as escape characters:
# https://pubs.opengroup.org/onlinepubs/9699919799/utilities/read.html
MAINTAINER_ASCIIDOCS_RECIPE_DEBUG_STREAM = /dev/null
#MAINTAINER_ASCIIDOCS_RECIPE_DEBUG_STREAM = &2

maintainer-asciidocs:
	@USEDREV="`git log -1 --oneline --pretty=format:'%h (%cs) %s' docs/asciidoc-vars.conf`" || exit ; \
	 USEDREV_NOSUBJ="`git log -1 --oneline --pretty=format:'%h (%cs)' docs/asciidoc-vars.conf`" || exit ; \
	 echo "$@: Updating asciidoc text sources with docs/asciidoc-vars.conf as of commit: $${USEDREV}"; \
	 echo "//GH_MARKUP_1095_INCLUDE_BEGIN//$${USEDREV}" > docs/asciidoc-vars.conf.lastrev.tmp || exit ; \
	 find . -name '*.adoc' -or -name '*.txt' | ( \
	  FILES=""; \
	  while read F ; do \
	    grep -E '^//+GH_MARKUP_1095_INCLUDE_(BEGIN|END)' "$$F" >/dev/null \
	    || { echo "$@: SKIP: no GH_MARKUP_1095_INCLUDE_* tags: $$F"; continue ; } ; \
	    rm -f "$${F}"*.tmp || exit ; \
	    EXT="1.tmp"; \
	    while IFS='' read -r LINE ; do \
	        case "$${LINE}" in \
	            "//GH_MARKUP_1095_INCLUDE_BEGIN"*) EXT="2.tmp" ; continue ;; \
	            "//GH_MARKUP_1095_INCLUDE_END"*|"////GH_MARKUP_1095_INCLUDE_END"*) EXT="3.tmp" ; continue ;; \
	        esac ; \
	        printf '%s\n' "$${LINE}" >> "$${F}.$${EXT}" || exit ; \
	    done < "$$F" || { echo "$@: FAILED injection for $${F}" >&2; exit 1; } ; \
	    if test -s "$${F}.2.tmp" && test -z "`diff "$${F}.2.tmp" docs/asciidoc-vars.conf | tr -d '\n'`" ; then \
	        rm -f "$${F}"*.tmp ; \
	        echo "$@: SKIP: no changes: $$F"; continue ; \
	    fi; \
	    cat "$${F}.1.tmp" docs/asciidoc-vars.conf.lastrev.tmp docs/asciidoc-vars.conf > "$${F}.tmp" \
	    && echo '//GH_MARKUP_1095_INCLUDE_END//' >> "$${F}.tmp" \
	    && cat "$${F}.3.tmp" >> "$${F}.tmp" \
	    && mv -f "$${F}.tmp" "$${F}" \
	    || { echo "$@: FAILED injection for $${F}" >&2; exit 1; } ; \
	    echo "$@: UPDATED: $$F"; \
	    FILES="$${FILES} $${F}"; \
	    rm -f "$${F}"*.tmp ; \
	  done; \
	  rm -f docs/asciidoc-vars.conf.lastrev.tmp; \
	  if test -z "$${FILES}" ; then \
	    echo "$@: OVERALL-SKIP: No text files found with GH_MARKUP_1095_INCLUDE_ tags, or obsoleted docs/asciidoc-vars.conf contents";\
	  else \
	    echo "$@: OVERALL-UPDATED: You may now want to:"; \
	    echo "    make spellcheck-interactive-quick"; \
	    echo "    git add -p $${FILES} && git commit -sm 'Update NUT documentation sources with current docs/asciidoc-vars.conf: $${USEDREV_NOSUBJ}'"; \
	  fi; \
	 )

check-NIT check-NIT-devel check-NIT-sandbox check-NIT-sandbox-devel:
	+cd $(builddir)/tests/NIT && $(MAKE) $(AM_MAKEFLAGS) $@

VERSION_DEFAULT: dummy-stamp
	@abs_top_srcdir='$(abs_top_srcdir)' ; \
	 abs_top_builddir='$(abs_top_builddir)' ; \
	 export abs_top_srcdir ; export abs_top_builddir ; \
	 NUT_VERSION_QUERY=UPDATE_FILE '$(abs_top_srcdir)/tools/gitlog2version.sh'

CLEANFILES += VERSION_DEFAULT.tmp
EXTRA_DIST += VERSION_DEFAULT

# Best-effort delivery for (overly?) customized distros, e.g. via
#   echo NUT_VERSION_FORCED_SEMVER=1.1.1 > VERSION_FORCED_SEMVER
dist-hook:
	for D in "$(abs_top_srcdir)" "$(abs_top_builddir)" ; do \
	    for F in VERSION_FORCED VERSION_FORCED_SEMVER ; do \
	        if [ -s "$$D/$$F" ] ; then \
	            cat "$$D/$$F" > "$(top_distdir)/$$F" || true ; \
	        fi ; \
	    done ; \
	done

# This target adds syntax-checking for committed shell script files,
# to avoid surprises and delays in finding fatal typos after packaging
###
### Note: currently, shellcheck target calls check-scripts-syntax
### so when both are invoked at once, in the end the check is only
### executed once. Later it is anticipated that shellcheck would
### be implemented by requiring, configuring and calling the tool
### named "shellcheck" for even more code inspection and details.
### Still, there remains value in also checking the script syntax
### by the very version of the shell interpreter that would run
### these scripts in production usage of the resulting packages.
###
check-scripts-syntax:
	@echo 'NOTE: modern bash complains about scripts using backticks (warning not error), which we ignore in NUT codebase for portability reasons: `...` obsolete, use $$(...)'
	@RUNBASH=bash; if [ -x /bin/bash ] && /bin/bash -c 'echo $${BASH_VERSION}' | grep -E '^[456789]\.' ; then RUNBASH=/bin/bash ; else if [ -x /usr/bin/env ] ; then RUNBASH="/usr/bin/env bash"; fi; fi ; \
	 for F in `git ls-files || find . -type f` ; do \
	    case "`file "$$F"`" in \
	        *"Bourne-Again shell script"*) ( set -x ; $$RUNBASH -n "$$F" ; ) ;; \
	        *"POSIX shell script"*|*"shell script"*) ( set -x ; /bin/sh -n "$$F" ; ) ;; \
	    esac || { RES=$$? ; echo "ERROR: Syntax check failed for script file: $$F" >&2 ; exit $$RES ; } ; \
	done
	@echo 'SUCCESS: Shell scripts syntax is acceptable, no fatal issues were found'

shellcheck-disclaimer:
	@echo "==============================================================================="
	@echo "NOTICE: 'make shellcheck' is currently an alias for 'make check-scripts-syntax'"
	@echo "Later it may become a call to the real shellcheck tool (if available on the"
	@echo "build system during the configure phase)"
	@echo "==============================================================================="

# Note: currently not part of shellcheck target, because the script below
# can test the logic with numerous SHELL_PROGS in a CI setting, and because
# check-scripts-syntax probably has checked the basic syntax above already.
shellcheck-nde:
	cd $(srcdir)/tests && SERVICE_FRAMEWORK="selftest" ./nut-driver-enumerator-test.sh

shellcheck: shellcheck-disclaimer check-scripts-syntax

CPPCHECK = @CPPCHECK@
if HAVE_CPPCHECK
cppcheck: cppcheck-cxx11.xml cppcheck-c99.xml

# Let the analysis get regenerated due to any change in source;
# but note that with our different make implementations to support,
# we can not either $(shell find ...) nor blindly say e.g. *.cpp
# for each FS structure layer because e.g. there are no ./*.cpp
# in the root dir of the codebase (and so make complains there is
# `No rule to make target `*.cpp', needed by `cppcheck-cxx11.xml'`)
#
# Note that the actual `cppcheck` scan finds all files it likes
# (so if CPPCHECK_SRC_* misses something, it just won't trigger
# automagically a rebuild of the XML in developer working cycles).
CPPCHECK_SRC_H = $(top_srcdir)/*/*.h $(top_srcdir)/*/*/*.h
# CPPCHECK_SRC_H += $(top_srcdir)/*.h

CPPCHECK_SRC_C = $(top_srcdir)/*/*.c $(top_srcdir)/*/*/*.c
# CPPCHECK_SRC_C += $(top_srcdir)/*.cpp

CPPCHECK_SRC_CXX = $(top_srcdir)/*/*.cpp
# CPPCHECK_SRC_CXX += $(top_srcdir)/*.cpp $(top_srcdir)/*/*/*.cpp

cppcheck-cxx11.xml: $(CPPCHECK_SRC_CXX) $(CPPCHECK_SRC_H)
	$(CPPCHECK) --std=c++11 --enable=all --inconclusive --xml --xml-version=2 . 2>$@

cppcheck-c99.xml: $(CPPCHECK_SRC_C) $(CPPCHECK_SRC_H)
	$(CPPCHECK) --std=c99 --enable=all --inconclusive --xml --xml-version=2 . 2>$@
else !HAVE_CPPCHECK
cppcheck:
	@echo "CPPCHECK analysis not available since 'cppcheck' was not found."
endif !HAVE_CPPCHECK

sockdebug:
	+cd $(builddir)/server && $(MAKE) $(AM_MAKEFLAGS) sockdebug$(EXEEXT)

# ----------------------------------------------------------------------
# Automatically generate the ChangeLog from Git logs:
MAINTAINERCLEANFILES += ChangeLog

# CI builds can leave a log of selected features:
MAINTAINERCLEANFILES += config.nut_report_feature.log*

# Older boundary of the ChangeLog commits range
# It can be a tag ('v2.2.0'), a commit hash, a date, ...
# See gitrevisions for more information on specifying ranges
#GITLOG_START_POINT=v2.6.0
#GITLOG_END_POINT=HEAD
GITLOG_START_POINT=@GITLOG_START_POINT@
GITLOG_END_POINT=@GITLOG_END_POINT@

# Force ChangeLog regeneration upon make dist (due to nonexistant 'dummy-stamp'),
# in case it has already been generated previously
# Note that the script is hard-coded to inspect Git workspace which contains
# the current dir, and defaults to generate a "ChangeLog" in the current dir.
# The script itself is generated from a template, so resides in builddir.
dummy-stamp:
ChangeLog: dummy-stamp
	+@$(MAKE) $(AM_MAKEFLAGS) $(abs_top_builddir)/ChangeLog

if WITH_PDF_NONASCII_TITLES
WITH_PDF_NONASCII_TITLES_ENVVAR = WITH_PDF_NONASCII_TITLES=yes
else !WITH_PDF_NONASCII_TITLES
WITH_PDF_NONASCII_TITLES_ENVVAR = WITH_PDF_NONASCII_TITLES=no
endif !WITH_PDF_NONASCII_TITLES

# Be sure to not confuse with a DIST'ed file (and so try to overwrite it);
# do however avoid re-generating it if already made on a previous pass and
# the Git HEAD pointer (branch) or its actual "index" or "object" database
# did not change since then - meaning the local developer or CI did not
# modify the metadata (subsequent generation of the huge PDF/HTML files
# can cost dearly).
# Note there's a bit more fuss about Git internals which NUT should not
# really care about encapsulation-wise (detection of NUT_GITDIR location
# which may reside elsewhere, e.g. with local repo clones with reference
# repo configuration, or submodules). But this is a Git-crawling target
# anyway, and in the worst case (Git's design changes) we would spend a
# bit of time researching the FS in vain, and go on to re-generate the
# ChangeLog when maybe we should not have - oh well.
# WARNING: The CHANGELOG_REQUIRE_GROUP_BY_DATE_AUTHOR=true mode here is
# default to allow for prettier documentation, but it can require too much
# memory for weaker build systems. Set it to false when calling make there.
CHANGELOG_REQUIRE_GROUP_BY_DATE_AUTHOR_ENVVAR = true
$(abs_top_builddir)/ChangeLog: tools/gitlog2changelog.py dummy-stamp
	@cd $(abs_top_srcdir) && \
	    if test -e .git ; then \
	        NUT_GITDIR=".git" ; if test -r "$${NUT_GITDIR}" -a ! -d "$${NUT_GITDIR}" ; then GD="`grep -E '^gitdir:' "$${NUT_GITDIR}" | sed 's/^gitdir: *//'`" && test -n "$$GD" -a -d "$$GD" && NUT_GITDIR="$$GD" ; fi ; \
	        if test -s "$@" -a -d "$${NUT_GITDIR}" && test -z "`find "$${NUT_GITDIR}" -newer "$@" 2>/dev/null`" ; then \
	            echo "  DOC-CHANGELOG-GENERATE	$@ : SKIP (keep existing)" ; \
	            echo "Using still-valid ChangeLog file generated earlier from same revision of Git source metadata in '$${NUT_GITDIR}'" >&2 ; \
	        else \
	            if test -s "$@" ; then \
	                echo "  DOC-CHANGELOG-GENERATE	$@ : RE-GENERATE (older than Git workspace metadata) ..." ; \
	            else \
	                echo "  DOC-CHANGELOG-GENERATE	$@ : GENERATE (currently absent) ..." ; \
	            fi ; \
	            CHANGELOG_FILE="$@" $(WITH_PDF_NONASCII_TITLES_ENVVAR) \
	            CHANGELOG_REQUIRE_GROUP_BY_DATE_AUTHOR="$(CHANGELOG_REQUIRE_GROUP_BY_DATE_AUTHOR_ENVVAR)" \
	              $(abs_top_builddir)/tools/gitlog2changelog.py $(GITLOG_START_POINT) $(GITLOG_END_POINT) \
	              && { echo "  DOC-CHANGELOG-GENERATE	$@ : SUCCESS"; } \
	              || { \
	                echo "  DOC-CHANGELOG-GENERATE	$@ : FAILED (non-fatal)" >&2 ; \
	                printf "gitlog2changelog.py failed to generate the ChangeLog.\n\nNOTE: See https://github.com/networkupstools/nut/commits/master for change history.\n\n" > "$@" ; \
	            } ; \
	        fi ; \
	    else \
	        if test x"$(abs_top_srcdir)" != x"$(abs_top_builddir)" -a -s ./ChangeLog ; then \
	            echo "  DOC-CHANGELOG-GENERATE	$@ : SKIP (keep existing)" ; \
	            if ! diff ./ChangeLog "$@" >/dev/null 2>/dev/null ; then \
	                echo "Using distributed ChangeLog file from sources (and builddir is not srcdir)" >&2 ; \
	                rm -f "$@" || true ; \
	                cp -pf ./ChangeLog "$@" || { cat ./ChangeLog > "$@" ; touch -r ./ChangeLog "$@" || true ; } ; \
	            else \
	                echo "Using distributed ChangeLog file from sources (and builddir already has content identical to one in srcdir)" >&2 ; \
	            fi ; \
	        else \
	            if test -s "$@" ; then \
	                echo "  DOC-CHANGELOG-GENERATE	$@ : SKIP (keep existing)" ; \
	                echo "Using distributed ChangeLog file from sources (and builddir is srcdir)" >&2 ; \
	            else \
	                echo "  DOC-CHANGELOG-GENERATE	$@ : FAILED (non-fatal)" >&2 ; \
	                printf "Failed to generate the ChangeLog.\n\nNOTE: See https://github.com/networkupstools/nut/commits/master for change history.\n\n" > "$@" ; \
	            fi ; \
	        fi ; \
	    fi

ChangeLog.adoc: ChangeLog
	+cd $(abs_top_builddir)/docs && $(MAKE) $(AM_MAKEFLAGS) ../ChangeLog.adoc

nut_version.h include/nut_version.h:
	+cd $(abs_top_builddir)/include && $(MAKE) $(AM_MAKEFLAGS) nut_version.h

# May involve (re-)build of libupsclient.la
libupsclient-version.h clients/libupsclient-version.h:
	+cd $(abs_top_builddir)/include && $(MAKE) $(AM_MAKEFLAGS) libupsclient-version.h

tools/gitlog2changelog.py: tools/gitlog2changelog.py.in
	+cd $(@D) && $(MAKE) $(AM_MAKEFLAGS) -s $(@F)

# ----------------------------------------------------------------------
# Maintainers targets: distribution signature and hashes
# Assume tools are available (and maintainer GPG keys)
dist-files: dist dist-sig dist-hash

nut-@PACKAGE_VERSION@.tar.gz: dist
nut-@PACKAGE_VERSION@.tar.gz.sig: dist-sig
nut-@PACKAGE_VERSION@.tar.gz.md5 nut-@PACKAGE_VERSION@.tar.gz.sha256: dist-hash

dist-sig: nut-@PACKAGE_VERSION@.tar.gz
	rm -f nut-@PACKAGE_VERSION@.tar.gz.sig
	gpg --detach-sign nut-@PACKAGE_VERSION@.tar.gz

dist-hash: nut-@PACKAGE_VERSION@.tar.gz
	md5sum nut-@PACKAGE_VERSION@.tar.gz > nut-@PACKAGE_VERSION@.tar.gz.md5
	sha256sum nut-@PACKAGE_VERSION@.tar.gz > nut-@PACKAGE_VERSION@.tar.gz.sha256

# ----------------------------------------------------------------------
# targets from old build system (pre-automake).
# supported for a period of time for backward "compatibility".

WARN="----------------------------------------------------------------------"

build:
	@echo $(WARN)
	@echo "Warning: 'make build' is deprecated. Use 'make all' instead."
	@echo $(WARN)
	+$(MAKE) $(AM_MAKEFLAGS) all
install-bin:
	@echo $(WARN)
	@echo "Warning: 'make install-bin' is deprecated." 
	@echo "Use 'make install-exec' instead for a similar effect."
	@echo $(WARN)
	+cd common;  $(MAKE) $(AM_MAKEFLAGS) install
	+cd drivers; $(MAKE) $(AM_MAKEFLAGS) install
	+cd server;  $(MAKE) $(AM_MAKEFLAGS) install
	+cd clients; $(MAKE) $(AM_MAKEFLAGS) install
install-man: install-data-recursive
	@echo $(WARN)
	@echo "Warning: 'make install-man' is deprecated."
	@echo "Use 'cd docs/man; make install' instead."
	@echo $(WARN)
	+cd docs/man; $(MAKE) $(AM_MAKEFLAGS) install
install-conf:
	@echo $(WARN)
	@echo "Warning: 'make install-conf' is deprecated."
	@echo "Use 'cd conf; make install' instead."
	@echo $(WARN)
	+cd conf; $(MAKE) $(AM_MAKEFLAGS) install
# The target install-data already has a standardized meaning under automake
install-dirs:
	@echo $(WARN)
	@echo "Warning: 'make install-dirs' is deprecated."
	@echo "Use 'make installdirs' instead."
	@echo $(WARN)
	+$(MAKE) $(AM_MAKEFLAGS) installdirs
cgi build-cgi install-cgi install-cgi-dir install-cgi-bin \
install-cgi-man install-cgi-conf install-cgi-html: 
	@echo "Error: 'make $@' no longer exists."
	@echo "Use './configure --with-cgi' instead."
install-lib:
	@echo "Error: 'make $@' no longer exists."
	@echo "Use './configure --with-dev' instead."
usb build-usb install-usb:
	@echo "Error: 'make $@' no longer exists."
	@echo "Use './configure --with-usb' instead."
snmp build-snmp install-snmp install-snmp-mgr install-snmp-man: 
	@echo "Error: 'make $@' no longer exists."
	@echo "Use './configure --with-snmp' instead."
setver:
	@echo "Error: 'make setver' no longer exists."
	@echo "Edit configure.ac to set version number."

# Adjust permissions when installing as `root` into the actual system.
# We honour DESTDIR anyway, as someone can install into a chroot etc.
# NOTE: Might be an 'install-data-hook' (for dirs) and/or 'install-exec-hook'
# (for service restart) but better not force this on everyone?
# It is also up to the end-user making such an installation to remove (or not)
# dirs and files made below.
# To err on the safe side in cross builds, we ignore Windows builds and those
# not built for the same system as the build host.
install-data-hook:
	@case "@target_os@" in *mingw*) exit 0;; esac ; \
	 if [ x"@host_os@" != x"@build_os@" ]; then exit 0 ; fi ; \
	 if [ x"@target_os@" != x"@build_os@" ]; then exit 0 ; fi ; \
	 if (command -v id) && [ x"`id -u`" = x0 ] && [ x"$(DESTDIR)" = x -o x"$(DESTDIR)" = x/ ] ; then \
		echo "================================================================================" >&2 ; \
		echo "| NUT data files have been installed into the system, now consider running     |" >&2 ; \
		echo "| '(sudo) make install-as-root' to apply permissions and service state changes |" >&2 ; \
		echo "================================================================================" >&2 ; \
	 fi

if HAVE_SYSTEMD
HAVE_SYSTEMD = true
else !HAVE_SYSTEMD
HAVE_SYSTEMD = false
endif !HAVE_SYSTEMD

if WITH_SYSTEMD_TMPFILES
WITH_SYSTEMD_TMPFILES = true
else !WITH_SYSTEMD_TMPFILES
WITH_SYSTEMD_TMPFILES = false
endif !WITH_SYSTEMD_TMPFILES

if WITH_SYSTEMD_PRESET
WITH_SYSTEMD_PRESET = true
else !WITH_SYSTEMD_PRESET
WITH_SYSTEMD_PRESET = false
endif !WITH_SYSTEMD_PRESET

if WITH_CGI
WITH_CGI = true
else !WITH_CGI
WITH_CGI = false
endif !WITH_CGI

if WITH_SOLARIS_SMF
WITH_SOLARIS_SMF = true
else !WITH_SOLARIS_SMF
WITH_SOLARIS_SMF = false
endif !WITH_SOLARIS_SMF

if WITH_SOLARIS_INIT
WITH_SOLARIS_INIT = true
else !WITH_SOLARIS_INIT
WITH_SOLARIS_INIT = false
endif !WITH_SOLARIS_INIT

# TODO: Actually move this into scripts like Solaris/postinstall
# using OS-specific `useradd`/`groupadd`, etc.
# Note that as we stop services, we may be dealing with (older)
# distros that do not follow current naming in NUT code base.
install-as-root:
	@+echo "$@: starting (no-op if not root)" >&2 ; \
	 case "@target_os@" in *mingw*) echo "$@: SKIP: not supported for this target_os='@target_os@'" >&2 ; exit 0;; esac ; \
	 if [ x"@host_os@" != x"@build_os@" ]; then echo "$@: SKIP: build_os='@build_os@' is not host_os='@host_os@'" >&2 ; exit 0 ; fi ; \
	 if [ x"@target_os@" != x"@build_os@" ]; then echo "$@: SKIP: build_os='@build_os@' is not target_os='@target_os@'" >&2 ; exit 0 ; fi ; \
	 if (command -v id) && [ x"`id -u`" = x0 ] ; then \
		echo "$@: we seem to be root, PROCEEDING" >&2 ; \
	 else \
		echo "$@: SKIP: we seem to NOT be root" >&2 ; \
		exit 0 ; \
	 fi ; \
	 prefix="@prefix@"; \
	 if [ x"$(DESTDIR)" = x -o x"$(DESTDIR)" = x/ ] ; then \
		if $(HAVE_SYSTEMD) ; then \
			echo "$@: Stop NUT services, if any" >&2 ; \
			@SYSTEMD_SYSTEMCTL_PROGRAM@ stop nut-monitor.service nut-server.service || true ; \
			@SYSTEMD_SYSTEMCTL_PROGRAM@ stop nut-driver.service || true ; \
			@SYSTEMD_SYSTEMCTL_PROGRAM@ stop nut-driver.target || true ; \
			@SYSTEMD_SYSTEMCTL_PROGRAM@ stop nut-udev-settle.service || true ; \
			@SYSTEMD_SYSTEMCTL_PROGRAM@ stop nut.target || true ; \
		fi ; \
		if $(WITH_SOLARIS_SMF) || $(WITH_SOLARIS_INIT) ; then \
			if $(WITH_SOLARIS_SMF) ; then \
				echo "$@: Stop NUT services, if any" >&2 ; \
				SMF_ACTIVE="`/usr/bin/svcs -a -Hostate,fmri | grep svc:/system/power/ | grep -v disabled | awk '{print $$2}'`" ; \
				for S in $$SMF_ACTIVE ; do \
					/usr/sbin/svcadm disable -ts $$S || true ; \
				done ; \
			fi ; \
			$(top_builddir)/scripts/Solaris/preremove \
			|| exit ; \
		fi ; \
	 fi ; \
	 $(MAKE) $(AM_FLAGS) DESTDIR="$(DESTDIR)" install || exit ; \
	 if [ x"$(DESTDIR)" = x -o x"$(DESTDIR)" = x/ ] ; then \
		if $(WITH_SOLARIS_SMF) || $(WITH_SOLARIS_INIT) ; then \
			$(top_builddir)/scripts/Solaris/preinstall && \
			$(top_builddir)/scripts/Solaris/postinstall ; \
			exit ; \
		fi ; \
	 fi ; \
	 echo "  MKDIR	$(DESTDIR)/@STATEPATH@ $(DESTDIR)/@STATEPATH@/upssched" >&2 ; \
	 $(MKDIR_P) "$(DESTDIR)/@STATEPATH@/upssched" && \
	 for D in "@PIDPATH@" "@ALTPIDPATH@" "@ALTSTATEPATH@" "@CONFPATH@" ; do \
		case x"$$D" in \
		x|x@*) ;; \
		*)	echo "  MKDIR	$(DESTDIR)/$$D" >&2 ; \
			$(MKDIR_P) "$(DESTDIR)/$$D" \
			|| exit ;; \
		esac ; \
	 done ; \
	 if (command -v chmod) ; then \
		echo "  CHMOD(0770)	$(DESTDIR)/@STATEPATH@/upssched" >&2 ; \
		chmod 0770 "$(DESTDIR)/@STATEPATH@/upssched" \
		|| exit ; \
		for D in "@STATEPATH@" "@PIDPATH@" "@ALTPIDPATH@" "@ALTSTATEPATH@" ; do \
			case x"$$D" in \
			x|x@*|x/run|x/var/run|x/tmp|x/var/tmp|x/dev/shm|x/etc|x/var|x/usr|x/usr/local|x/usr/local/etc|x/usr/etc) ;; \
			*)	echo "  CHMOD(0770)	$(DESTDIR)/$$D" >&2 ; \
				chmod 0770 "$(DESTDIR)/$$D" \
				|| exit ;; \
			esac ; \
		done ; \
		case x"@CONFPATH@" in \
			x|x@*|x/run|x/var/run|x/tmp|x/var/tmp|x/dev/shm|x/etc|x/var|x/usr|x/usr/local|x/usr/local/etc|x/usr/etc) ;; \
			*)	echo "  CHMOD(0751)	$(DESTDIR)/@CONFPATH@" >&2 ; \
				chmod 0751 "$(DESTDIR)/@CONFPATH@" \
				|| exit ;; \
		esac ; \
		for F in hosts.conf.sample upsstats-single.html.sample upsstats.html.sample upsset.conf.sample ; do \
			echo "  CHMOD(0644)	CGI: $(DESTDIR)/@CONFPATH@/$$F" >&2 ; \
			chmod 0644 "$(DESTDIR)/@CONFPATH@/$$F" \
			|| { if $(WITH_CGI) ; then exit 1 ; else true ; fi ; } ; \
		done ; \
		for F in nut.conf.sample ups.conf.sample upsd.conf.sample upsd.users.sample upsmon.conf.sample upssched.conf.sample ; do \
			echo "  CHMOD(0640)	$(DESTDIR)/@CONFPATH@/$$F" >&2 ; \
			chmod 0640 "$(DESTDIR)/@CONFPATH@/$$F" \
			|| exit ; \
		done ; \
	 else \
		echo "$@: WARNING: Can not CHMOD created locations!" >&2 ; \
	 fi ; \
	 if (command -v chown) && test 0 -lt "`id -u '@RUN_AS_USER@'`" \
	 && ( test 0 -lt "`getent group '@RUN_AS_GROUP@' | awk -F: '{print $$3}'`" || test 0 -lt "`id -g '@RUN_AS_GROUP@'`" ) \
	 ; then \
		echo "  CHOWN(@RUN_AS_USER@:@RUN_AS_GROUP@)	$(DESTDIR)/@STATEPATH@/upssched" >&2 ; \
		chown "@RUN_AS_USER@:@RUN_AS_GROUP@" "$(DESTDIR)/@STATEPATH@/upssched" \
		|| exit ; \
		for D in "@STATEPATH@" "@PIDPATH@" "@ALTPIDPATH@" "@ALTSTATEPATH@" ; do \
			case x"$$D" in \
			x|x@*|x/run|x/var/run|x/tmp|x/var/tmp|x/dev/shm|x/etc|x/var|x/usr|x/usr/local|x/usr/local/etc|x/usr/etc) ;; \
			*)	echo "  CHOWN(@RUN_AS_USER@:@RUN_AS_GROUP@)	$(DESTDIR)/$$D" >&2 ; \
				chown "@RUN_AS_USER@:@RUN_AS_GROUP@" "$(DESTDIR)/$$D" \
				|| exit ;; \
			esac ; \
		done ; \
		case x"@CONFPATH@" in \
			x|x@*|x/run|x/var/run|x/tmp|x/var/tmp|x/dev/shm|x/etc|x/var|x/usr|x/usr/local|x/usr/local/etc|x/usr/etc) ;; \
			*)	echo "  CHOWN(root:@RUN_AS_GROUP@)	$(DESTDIR)/@CONFPATH@" >&2 ; \
				chown "root:@RUN_AS_GROUP@" "$(DESTDIR)/@CONFPATH@" \
				|| exit ;; \
		esac ; \
		for F in hosts.conf.sample upsstats-single.html.sample upsstats.html.sample upsset.conf.sample ; do \
			echo "  CHOWN(root:@RUN_AS_GROUP@)	CGI: $(DESTDIR)/@CONFPATH@/$$F" >&2 ; \
			chown "root:@RUN_AS_GROUP@" "$(DESTDIR)/@CONFPATH@/$$F" \
			|| { if $(WITH_CGI) ; then exit 1 ; else true ; fi ; } ; \
		done ; \
		for F in nut.conf.sample ups.conf.sample upsd.conf.sample upsd.users.sample upsmon.conf.sample upssched.conf.sample ; do \
			echo "  CHOWN(root:@RUN_AS_GROUP@)	$(DESTDIR)/@CONFPATH@/$$F" >&2 ; \
			chown "root:@RUN_AS_GROUP@" "$(DESTDIR)/@CONFPATH@/$$F" \
			|| exit ; \
		done ; \
	 else \
		echo "$@: WARNING: Can not CHOWN created locations!" >&2 ; \
	 fi ; \
	 if [ x"$(DESTDIR)" = x -o x"$(DESTDIR)" = x/ ] ; then \
		applied_udev=false ; \
		if $(HAVE_SYSTEMD) ; then \
			echo "$@: Activate default systemd layout, restart services:" >&2 ; \
			if $(WITH_SYSTEMD_TMPFILES) ; then \
				echo "$@: Apply systemd-tmpfiles presets" >&2 ; \
				@SYSTEMD_TMPFILES_PROGRAM@ --create || exit ; \
			fi ; \
			echo "$@: Learn systemd definition changes" >&2 ; \
			@SYSTEMD_SYSTEMCTL_PROGRAM@ daemon-reload || exit ; \
			APPLIED_SYSTEMD_PRESET=false ; \
			if $(WITH_SYSTEMD_PRESET) ; then \
				echo "$@: Apply systemd enabled/disabled service presets" >&2 ; \
				@SYSTEMD_SYSTEMCTL_PROGRAM@ preset-all && APPLIED_SYSTEMD_PRESET=true || APPLIED_SYSTEMD_PRESET=false ; \
			fi ; \
			if [ x"$${APPLIED_SYSTEMD_PRESET}" = x"false" ] ; then \
				echo "$@: Apply systemd enabled/disabled service defaults in a legacy manner" >&2 ; \
				@SYSTEMD_SYSTEMCTL_PROGRAM@ disable nut.target nut-driver.target nut-udev-settle.service nut-monitor nut-server nut-driver-enumerator.path nut-driver-enumerator.service || exit ; \
				@SYSTEMD_SYSTEMCTL_PROGRAM@ enable  nut.target nut-driver.target nut-udev-settle.service nut-monitor nut-server nut-driver-enumerator.path nut-driver-enumerator.service || exit ; \
			fi ; \
			@SYSTEMD_SYSTEMCTL_PROGRAM@ restart udev && applied_udev=true || true ; \
			if [ -s '@sysconfdir@/ups.conf' ] ; then \
				echo "$@: Reconfigure nut-driver-enumerator (service instance wrapping)" >&2 ; \
				$(top_builddir)/scripts/upsdrvsvcctl/nut-driver-enumerator.sh --reconfigure || { RES=$$?; if [ $$RES != 42 ] ; then exit $$RES ; fi ; } ; \
			fi; \
			echo "$@: Restart NUT services" >&2 ; \
			@SYSTEMD_SYSTEMCTL_PROGRAM@ restart nut-driver-enumerator.service nut-monitor.service nut-server.service \
			|| if [ -s '@sysconfdir@/ups.conf' -a -s '@sysconfdir@/upsd.conf' -a -s '@sysconfdir@/upsd.users' -a -s '@sysconfdir@/upsmon.conf' ] ; then exit 1 ; \
			else echo "$@: some configs are missing, assuming new NUT installation" >&2; fi; \
		fi ; \
		if ! $${applied_udev} && (command -v udevadm); then \
			udevadm control --reload-rules && udevadm trigger && applied_udev=true || true ; \
		fi ; \
	 fi ; \
	 echo "$@: Finished SUCCESSFULLY" >&2

# Clean the dist tarball and packages
MAINTAINERCLEANFILES_DISTBALL = nut-*.tar.gz
# HP-UX:
MAINTAINERCLEANFILES_PACKAGES = NUT_HPUX_package@PACKAGE_VERSION@.depot NUT_HPUX_package-@PACKAGE_VERSION@.depot
# AIX as below, and RedHat-compatible (cover binary and source packages):
MAINTAINERCLEANFILES_PACKAGES += nut*rpm
# Debian-compatible (cover binary and source packages):
MAINTAINERCLEANFILES_PACKAGES += nut*deb
# Solaris SVR4 package archives:
MAINTAINERCLEANFILES_PACKAGES += NUT_solaris_*_package@PACKAGE_VERSION@.local.gz NUT_solaris_*_package-@PACKAGE_VERSION@.local.gz
# Newer Solaris IPS (aka "pkg(5)" format archives)
MAINTAINERCLEANFILES_PACKAGES += *.p5p

MAINTAINERCLEANFILES += $(MAINTAINERCLEANFILES_DISTBALL)
MAINTAINERCLEANFILES += $(MAINTAINERCLEANFILES_PACKAGES)

package: dist
	+DESTDIR="$(abs_builddir)/_install_pkgprotodir" ; export DESTDIR; \
	rm -rf "$$DESTDIR"; \
	case "`uname -s`" in \
	"HP-UX") \
		( cd scripts/HP-UX && \
		  $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$DESTDIR" package && \
		  mv NUT_HPUX_package.depot $(abs_top_builddir)/NUT_HPUX_package-@PACKAGE_VERSION@.depot ) ;; \
	"SunOS") \
		$(MAKE) $(AM_MAKEFLAGS) && \
		$(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$DESTDIR" install && \
		( cd scripts/Solaris && \
		  $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$DESTDIR" package ) && \
		$(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$DESTDIR" uninstall && \
		rm -rf "$$DESTDIR" || \
		{ echo "FAILED to produce SunOS packages, inspect '$$DESTDIR' for clues" >&2 ; exit 1; } ;; \
	"AIX") \
		if test -d /usr/src/packages/SPECS -a -w /usr/src/packages/SPECS ; then : ; else echo "Can not write to /usr/src/packages/SPECS" >&2 ; exit 1; fi ; \
		if test -d /usr/src/packages/SOURCES -a -w /usr/src/packages/SOURCES ; then : ; else echo "Can not write to /usr/src/packages/SOURCES" >&2 ; exit 1; fi ; \
		$(MAKE) $(AM_MAKEFLAGS) dist && \
		cp scripts/Aix/nut-aix.spec /usr/src/packages/SPECS && \
		cp scripts/Aix/nut.init nut-@PACKAGE_VERSION@.tar.gz /usr/src/packages/SOURCES && \
		rpm -ba /usr/src/packages/SPECS/nut-aix.spec && \
		mv /usr/src/packages/RPMS/nut*rpm $(abs_top_builddir)/ ;; \
	*)	echo "Unsupported OS for 'make $@' (no recipe bound)" >&2; exit 1;; \
	esac

if HAVE_WINDOWS
# Steam-roll over all executables/libs we have placed in DESTDIR and copy over
# any resolved dependencies from the cross-build (or native MSYS2) environment.
# Then hardlink libraries for sbin... (alternative: all bins in one dir)
# TOTHINK: Are there more dirs to consider? So far we cover bindir, sbindir and
# driverexecdir (e.g. some Linux distros place drivers to /lib/nut while tools
# and daemons are in /usr/bin and /usr/sbin), and cgiexecdir, and occasional
# helpers like "sockdebug.exe" in libexecdir; anything else?..
# Note we hold existance of cgiexecdir as optional, but the name is expected to
# be defined. Other dirs are "just assumed" to exist (that we are not packaging
# some NUT build without drivers/tools/daemons). Subject to change if needed.
# Currently this is handled by a CHECKING... step that should fail if it hits
# anything.
install-win-bundle: all
	@if test -z "$(DESTDIR)" ; then echo "ERROR: '$@': Bundle may only be installed to some DESTDIR prototype area'" >&2 ; exit 1; fi
	+$(MAKE) $(AM_MAKEFLAGS) DESTDIR='$(DESTDIR)' install
	+$(MAKE) $(AM_MAKEFLAGS) DESTDIR='$(DESTDIR)' install-win-bundle-thirdparty

install-win-bundle-thirdparty:
	@if test -z "$(DESTDIR)" ; then echo "ERROR: '$@': Bundle may only be installed to some DESTDIR prototype area'" >&2 ; exit 1; fi
	@echo "Searching which DLLs need to be bundled with NUT for Windows..." >&2
	@if test -z "$$ARCH" ; then \
	    if test -n "$(target)" ; then \
	        ARCH='$(target)' \
	    ; else \
	        if test -n "$(target_triplet)" ; then ARCH='$(target_triplet)' ; fi ; \
	    fi ; \
	 fi ; \
	 if test -n "$$ARCH" ; then export ARCH ; fi ; \
	 DESTDIR='$(DESTDIR)' ; export DESTDIR ; \
	 (  cd '$(DESTDIR)' || exit ; \
	    DESTDIR="" '$(abs_top_srcdir)/scripts/Windows/dllldd.sh' dllldddir . \
	    | while read D ; do \
	        echo "   DLL->bin       $$D" 2>&1 ; \
	        cp -pf "$$D" './$(bindir)/' ; \
	    done ; \
	 ) || exit ; \
	 (  if test x"$(bindir)" = x"$(sbindir)" ; then exit 0 ; fi ; \
	    cd '$(DESTDIR)/$(sbindir)' || exit ; \
	    '$(abs_top_srcdir)/scripts/Windows/dllldd.sh' dllldddir . \
	    | while read D ; do \
	        echo "   DLL->sbin      $$D" 2>&1 ; \
	        ln -f '$(DESTDIR)/$(bindir)'/"`basename "$$D"`" ./ ; \
	    done ; \
	 ) || exit ; \
	 (  if test x"$(driverexecdir)" = x"$(bindir)" ; then exit 0 ; fi ; \
	    if test x"$(driverexecdir)" = x"$(sbindir)" ; then exit 0 ; fi ; \
	    cd '$(DESTDIR)/$(driverexecdir)' || exit ; \
	    '$(abs_top_srcdir)/scripts/Windows/dllldd.sh' dllldddir . \
	    | while read D ; do \
	        echo "   DLL->drv       $$D" 2>&1 ; \
	        ln -f '$(DESTDIR)/$(bindir)'/"`basename "$$D"`" ./ ; \
	    done ; \
	 ) || exit ; \
	 (  if test -z "$(cgiexecdir)" -o ! -d "$(DESTDIR)/$(cgiexecdir)" ; then exit 0 ; fi ; \
	    if test x"$(cgiexecdir)" = x"$(bindir)" ; then exit 0 ; fi ; \
	    if test x"$(cgiexecdir)" = x"$(sbindir)" ; then exit 0 ; fi ; \
	    if test x"$(driverexecdir)" = x"$(cgiexecdir)" ; then exit 0 ; fi ; \
	    cd '$(DESTDIR)/$(cgiexecdir)' || exit ; \
	    '$(abs_top_srcdir)/scripts/Windows/dllldd.sh' dllldddir . \
	    | while read D ; do \
	        echo "   DLL->cgi       $$D" 2>&1 ; \
	        ln -f '$(DESTDIR)/$(bindir)'/"`basename "$$D"`" ./ ; \
	    done ; \
	 ) || exit ; \
	 (  if test x"$(libexecdir)" = x"$(bindir)" ; then exit 0 ; fi ; \
	    if test x"$(libexecdir)" = x"$(sbindir)" ; then exit 0 ; fi ; \
	    if test x"$(libexecdir)" = x"$(driverexecdir)" ; then exit 0 ; fi ; \
	    if test x"$(libexecdir)" = x"$(cgiexecdir)" ; then exit 0 ; fi ; \
	    cd '$(DESTDIR)/$(libexecdir)' || exit ; \
	    '$(abs_top_srcdir)/scripts/Windows/dllldd.sh' dllldddir . \
	    | while read D ; do \
	        echo "   DLL->libexec   $$D" 2>&1 ; \
	        ln -f '$(DESTDIR)/$(bindir)'/"`basename "$$D"`" ./ ; \
	    done ; \
	 ) || exit
	@echo "CHECKING if any executable files were installed to locations other than those covered by this recipe, so might not have needed DLLs bundled near them" >&2 ; \
	 relbindir="`echo './$(bindir)/' | sed 's,//*,/,g'`" ; \
	 relsbindir="`echo './$(sbindir)/' | sed 's,//*,/,g'`" ; \
	 reldriverexecdir="`echo './$(driverexecdir)/' | sed 's,//*,/,g'`" ; \
	 relcgiexecdir="`echo './$(cgiexecdir)/' | sed 's,//*,/,g'`" ; \
	 rellibexecdir="`echo './$(libexecdir)/' | sed 's,//*,/,g'`" ; \
	 cd '$(DESTDIR)' || exit ; \
	 find . -type f | grep -Ei '\.(exe|dll)$$' \
	 | grep -vE "^($${relbindir}|$${relsbindir}|$${reldriverexecdir}|$${relcgiexecdir}|$${rellibexecdir})" \
	 | ( RES=0 ; while IFS= read LINE ; do echo "$$LINE" ; RES=1; done; exit $$RES )

else !HAVE_WINDOWS
install-win-bundle:
	@echo "  SKIP	'$@' : not enabled for current build configuration"

install-win-bundle-thirdparty:
	@echo "  SKIP	'$@' : not enabled for current build configuration"
endif !HAVE_WINDOWS

print-MAINTAINERCLEANFILES print-REALCLEANFILES:
	@echo $(MAINTAINERCLEANFILES)

print-DISTCLEANFILES:
	@echo $(DISTCLEANFILES)

# TODO: Recursive mode to consider patterns defined in sub-dir makefiles
git-realclean-check:
	@if test -e .git && (command -v git); then \
		git status --ignored || while read F ; do \
			for P in $(MAINTAINERCLEANFILES) ; do \
				case "$$F" in \
				*/$$P) exit 1 ;; \
				esac ; \
			done; \
		done ; \
	 fi

# Simply group recipes which depend on a ton of source files but are generally
# quickly handled by external tools, to group (in life and log) various checks
# separately from the probably slower/louder stages for test programs like
# check-NIT:
CHECK_FILES_QUICK_TARGETS = check-man
if WITH_SPELLCHECK
CHECK_FILES_QUICK_TARGETS += spellcheck-quick
endif WITH_SPELLCHECK
check-files-quick: $(CHECK_FILES_QUICK_TARGETS)

# Autotools hook: run the quick checks before recursing for defaults:
check-recursive: check-files-quick

# Caller can set this to "false" to not regenerate below
# (e.g. in CI vs. developer iterations):
CHECK_PARALLEL_BUILDS_REGEN = true

# Not pulled in directly so far, can be used by developers to verify that build
# rules (inlcuding dependencies pulled from other directories) make sense and
# do not cause conflict by writing into same file names. Something that builds
# from root directory arrange nicely, but needs careful balancing on a tightrope
# if developers build pieces of code right in the directory of their interest.
# It is recommended to have "ccache" (or similar) setup working, to minimize the
# time cost and workload of compilations involved in these looped build retries.
# Oddly, the NetBSD 9.2 `bmake` faced with out-of-tree (subdir) builds does
# find itself work to do even if called in the root dir (with no `Makefile`
# generated there -- it detects a build to do under e.g. `obj/` in CI tests),
# but also tends to have that directory as current at the time we try to call
# the `./config.status` line -- and fails there. Hence the `cd top_builddir`.
# Auto-parallel recipe (if current 'make' implementation supports the "-j N"
# syntax; the optional MAXPARMAKES may be set in NUT CI farm style builds):
check-parallel-builds:
	if [ x"$(CHECK_PARALLEL_BUILDS_REGEN)" = xtrue ] ; then cd '$(top_srcdir)' && $(AUTOMAKE) -f ; fi
	if [ x"$(CHECK_PARALLEL_BUILDS_REGEN)" = xtrue ] ; then cd '$(top_builddir)' && ./config.status ; fi
	+@$(SET_PARMAKES_OPT); \
	  DIRS=""; \
	  cd '$(abs_top_builddir)' || exit ; \
	  $(MAKE) $(AM_MAKEFLAGS) -k -s $${PARMAKES_OPT} clean || { RES=$$?; echo "$@: FAILED: make pre-clean before checking subdirs for sources" >&2; exit $$RES; } ; \
	  for D in `cd '$(top_srcdir)' && find . -name '*.c' -o -name '*.cpp' | sed 's,/[^/]*\.cp*$$,,' | uniq` ; do \
		[ -e "$(top_srcdir)/$$D/Makefile.am" ] && [ -s "$$D/Makefile" ] || continue ; \
		$(MAKE) $(AM_MAKEFLAGS) -k -s $${PARMAKES_OPT} clean || { RES=$$?; echo "$@: FAILED: make clean before going to $$D" >&2; exit $$RES; } ; \
		echo "  $@	in $${D}" ; \
		( cd "$$D" && $(MAKE) $(AM_MAKEFLAGS) -k -s $${PARMAKES_OPT} ) || { RES=$$?; echo "$@: FAILED: parallel make in $$D" >&2; \
		  echo "To investigate, try:  (MAKEFLAGS='$(MAKEFLAGS)' ; export MAKEFLAGS; $(MAKE) $(AM_MAKEFLAGS) $${PARMAKES_OPT} clean ; automake -f && ./config.status && clear && (cd $${D}/ && $(MAKE) $(AM_MAKEFLAGS) V=1 $${PARMAKES_OPT} 2>&1 ; echo $$?) | tee /tmp/make.log ; RES=$$? ; grep -E '(\] Error|No rule to)' /tmp/make.log && less /tmp/make.log ; exit $$RES )"; \
		  echo "If builds were interrupted before, you may also have to re-initialize the build area completely, e.g.:  git clean -fdX ; ./ci_build.sh && $(MAKE) $(MAKEFLAGS) $(AM_MAKEFLAGS) $@" ; \
		  exit $$RES; } ; \
		DIRS="$${DIRS} $${D}" ; \
	  done ; \
	  echo "$@: SUCCESS in all checked directories:$${DIRS}" >&2