# ~/public_html/Makefile by Julian H. Stacey
# JJLATER:
#	All .html have been converted to .lmth, Except in bim/
#		The following dirs may break, Makefiles need looking at:
#			src/bsd/jhs/bin/public/phone/
#			src/bsd/jhs/bin/public/satelite/
#	I could add public /etc files

.include "/site/Makefile.rdist6"

# See also:	README.htaccess

# NO_OBJ does not stop /usr/obj/`pwd` being created,
# as subsidiary directories create it.

SUBDIR += Inc
# SUBDIR += ../private/html/mac

# SUBDIR += fun
# SUBDIR += home
# SUBDIR += jpg
# SUBDIR += pics

#ifndef BERKLIX_SERVER	#{
SUBDIR += cv		# Build depends on source text not on server.
#endif			#}

#ifndef BERKLIX_SERVER	#{
SUBDIR += bin		# Loops on land.berklix.org 9.3-RELEASE
#endif			#}

SUBDIR += aachen_1944
SUBDIR += adverts
SUBDIR += android
SUBDIR += average_adjuster
SUBDIR += bash
SUBDIR += berklix
SUBDIR += bg
SUBDIR += bike
SUBDIR += bike-fast
SUBDIR += bim
SUBDIR += blog
SUBDIR += books
SUBDIR += brexit
SUBDIR += bsd
SUBDIR += bsdpie
SUBDIR += cars/narrow
SUBDIR += chilli
SUBDIR += chinchilla_giganta
SUBDIR += consultants
SUBDIR += contact
SUBDIR += contacts
SUBDIR += cool_box
SUBDIR += corona
SUBDIR += ctm
SUBDIR += deter
SUBDIR += dots
SUBDIR += epub
SUBDIR += eu
SUBDIR += ewinter
SUBDIR += fall
SUBDIR += faraday
SUBDIR += fax
SUBDIR += ferries
SUBDIR += flights
SUBDIR += fraud
SUBDIR += free
SUBDIR += freebsd
SUBDIR += ftp
SUBDIR += garda
SUBDIR += gea
SUBDIR += gifs
SUBDIR += github
SUBDIR += hardware
SUBDIR += harries
SUBDIR += help
SUBDIR += holz
SUBDIR += ibu
SUBDIR += idacas
SUBDIR += isar
SUBDIR += jlucas
SUBDIR += kindle
SUBDIR += laptops
SUBDIR += loom
SUBDIR += mail
SUBDIR += maps
SUBDIR += mecc
SUBDIR += media
SUBDIR += meet
SUBDIR += mobile
SUBDIR += monorail
SUBDIR += ms
SUBDIR += nim
SUBDIR += north
SUBDIR += pc532
SUBDIR += pcs
SUBDIR += petitions
SUBDIR += phone
SUBDIR += pi
SUBDIR += pics
SUBDIR += pirates
SUBDIR += population
SUBDIR += post
SUBDIR += pri
SUBDIR += project
SUBDIR += putin
SUBDIR += puzzle
SUBDIR += qr
SUBDIR += rates
SUBDIR += sale
SUBDIR += scanjet
SUBDIR += security
SUBDIR += seismik
SUBDIR += sfd
SUBDIR += ski
SUBDIR += smart-home
SUBDIR += sms
SUBDIR += src
SUBDIR += ssl
SUBDIR += stadtmuenchen
SUBDIR += standards
SUBDIR += starnberg
SUBDIR += std
SUBDIR += symmetric
SUBDIR += systems
SUBDIR += techtalk
SUBDIR += teeth
SUBDIR += test
SUBDIR += tolino
SUBDIR += toytown
SUBDIR += trans
SUBDIR += translate
SUBDIR += txt
SUBDIR += vcfe
SUBDIR += video
SUBDIR += volksentscheid
SUBDIR += walk
SUBDIR += water

.if exists (pics/isar/Makefile)
SUBDIR += pics/isar
.endif

.if (make(clean) || make(obj))		# {
# Reduce SUBDIR to only clean src/
#	cleandir is not here, as I want all
#	dependent files generated local ready for access by web server;
#	so I do not limit the removal of obj links.
.if !target(html)			# {
SUBDIR = src
.endif					# }
.endif					# }

# Next target not called automatically, till I work out how to
# do it along with SUBDIR & working on different versions of FreeBSD
all:	html index.html all.html modes rd asclist grepnowww warn_mirror
	# checks usb cdrom

# Incomplete experiment JJLATER
# REVTMP=
# REV !=  echo ${SUBDIR} | \
# 	( for d in `cat` ; do \
# 		REVTMP:= echo "aa $$d bb $${REVTMP} cc" ; \
# 		done ; \
# 	echo "$${REVTMP}" )
# rev:
# 	@echo ${REV}

all_lmth_hook:	${.CURDIR}/all.lmth Makefile
	@${STA} #{
	cd ${.CURDIR} ; make all.html
	@${FIN} #}

all_html_hook:	all.html
	@${STA} #{
	@echo "You should have called all_lmth_hook, calling for you"
	cd ${.CURDIR} ; make all_lmth_hook
	@${FIN} #}

clean:
	@${STA} #{
	@# echo "Not removing all.html index.html"
	@# -cd ${.CURDIR} ; ls -l all.html index.html
	@echo "Suggestion: make clean-c"
	@echo "Suggestion: make cleandepend"
	@${FIN} #}

cleandependX:
	find . -type f -name \.depend\* | xargs ls -l
	find . -type f -name \.depend\* | xargs rm
	@# SUBDIR fails with make cleandepend

clean-c:
	@# -cd ${.CURDIR}/src/bsd/jhs/bin && \
		find . -type f -name \\.depend\\* | xargs ls -l
	-cd ${.CURDIR}/src/bsd/jhs/bin && \
		find . -type f -name \\.depend\\* | xargs rm -f
	@# -cd ${.CURDIR}/src/bsd/jhs/bin && \
		find . -type f -name \\.depend\\.\*\\.o | xargs ls -l
	-cd ${.CURDIR}/src/bsd/jhs/bin && \
		find . -type f -name \\.depend\\.\*\\.o | xargs rm -f
	-cd ${.CURDIR}/src/bsd/jhs/bin && \
		find . -type f -name \*\.o | xargs ls -l

${.CURDIR}/all.lmth!
	echo -n '<!DOCTYPE html PUBLIC '			>  $@
	echo	'"-//W3C//DTD HTML 4.01 Transitional//EN">'	>> $@
	echo	"<HTML>"					>> $@
	echo	"<HEAD><TITLE>Index ~jhs/</TITLE>"		>> $@
	echo -n '<meta name="description" content="Julian H. '	>> $@
	echo	'Stacey ~jhs/all.html"/>'			>> $@
	echo	'<meta name="keywords" content="Julian H. '	>> $@
	echo -n 'Stacey, Berklix, Unix, '			>> $@
	echo -n 'C Programming Language, Internet Consultant, ' >> $@
	echo	'Computer Systems Engineer"/>'			>> $@
	echo	"</HEAD>"					>> $@
	echo -n "<BODY>"					>> $@
	echo -n "<!-- DO NOT EDIT - all.lmth is generated by "	>> $@
	echo	"~/public_html/Makefile"			>> $@
	echo	"Which also generates all.html -->"		>> $@
	echo -n "<H1><a href=../>Julian H. Stacey</a>'s "	>> $@
	echo	"Index Of Files. </H1>"				>> $@
	echo -n "<H2> Commented Indexes For Some categories"	>> $@
	echo	" Also Exist </H2>"				>> $@
	echo	"<UL>"						>> $@
	echo	"<LI><H3><a href=src/>Sources</A></H3></LI>"	>> $@
	echo	"<LI><H3><a href=txt/>Texts</A></H3></LI>"	>> $@
	echo -n "<LI><H3><a href=dots/>Dot Files (Unix "	>> $@
	echo	" Initialisers)</A></H3></LI>"			>> $@
	echo -n "<LI><H3><a href=http://www.berklix.net/Default/>Domain" >> $@
	echo -n " directories</A> in /site"			>> $@
	echo	" not listed in this web file. </H3></LI>"	>> $@
	echo	"</UL>"						>> $@
	echo -n "<H2>Uncommented Alphabetic List Of All Files"	>> $@
	echo	" in ~jhs/,<br>Generated `date` by "		>> $@
	echo	"<a href=Makefile>Makefile</a></H2><UL>"	>> $@
	@#OLD (cd ${.CURDIR} ; find . -type f \! -name \*.lmth -print )
	@#OLD	| sort > ${HOME}/tmp/`basename $@`.1.tmp
	@#OLD for i in `cat ${HOME}/tmp/$@.1.tmp` ; do
	@#OLD	echo " <LI><a href=$$i>$$i</a>"		>> $@ ;
	@#OLD	done
	for i in `cd ${.CURDIR} ; find -s . -type f \! -name \*.lmth \
			\! -name \.just_com -print			\
			| sed -e s/..// | sort` ; do			\
		echo " <LI><a href=$$i>$$i</a>"		>> $@ ; \
		done
	@# -rm ${HOME}/tmp/$@.1.tmp
	echo -n '</UL><HR><a href="txt/footer.html">'		>> $@
	echo -n '<img alt="footer" src="/gifs/footer.gif">'	>> $@
	echo	'</a>'						>> $@
	echo	'</BODY>'					>> $@
	echo	'</HTML>'					>> $@
	chmod 644 $@
	@${FIN} #}

asclist:
	@${STA} #{
	@echo "Convert these to .txt make it easy for others browsers"
	find . -name \*.asc
	@${FIN} #}

clean_find_depends:
	@${STA} #{
	cd ${.CURDIR} ; find . -name .depend.\* | xargs ls -l
	@# cd ${.CURDIR} ; find . -name .depend.\* | xargs rm
	@${FIN} #}

warn_find_obsolete_links:
	@${STA} #{
	touch ~/tmp/warn_find_obsolete_links.lst
	@echo "Patches for obsolete release I no longer use"
	@# Links need to be removed After pointing to new files I still need
	-cd ${.CURDIR}/src/bsd/fixes/FreeBSD ; pwd ; \
		 find . -name \*REL=10.0-RELEASE\*	\
		>> ~/tmp/warn_find_obsolete_links.lst
	-cd ${.CURDIR}/src/bsd/fixes/FreeBSD ; pwd ; \
		 find . -name \*REL=10.1-RELEASE\*	\
		>> ~/tmp/warn_find_obsolete_links.lst
	-cd ${.CURDIR}/src/bsd/fixes/FreeBSD ; pwd ; \
		 find . -type l | xargs ls -l | grep REL=10.2-RELEASE	\
		>> ~/tmp/warn_find_obsolete_links.lst
	-cd ${.CURDIR}/src/bsd/fixes/FreeBSD ; pwd ; \
		 find . -type l | xargs ls -l | grep REL=10.3-RELEASE	\
		>> ~/tmp/warn_find_obsolete_links.lst
	@echo "I should move 10.* files to 10.4-RELEASE" # released 2017-10-03
	-cd ${.CURDIR}/src/bsd/fixes/FreeBSD ; pwd ; \
		 find . -name \*REL=11.0-RELEASE\*	\
		>> ~/tmp/warn_find_obsolete_links.lst
	@echo "I should move 11.* files to 11.3-RELEASE" # released 2019-07-09
	-cd ${.CURDIR}/src/bsd/fixes/FreeBSD ; pwd ; \
		 find . -name \*REL=9.0-RELEASE\*	\
		>> ~/tmp/warn_find_obsolete_links.lst
	@# In future I'd be better having symbolic links all pointing forward,
	@# not backward to older releases, then each new release per item,
	@# I would just have to move 1 file & create 1 link.
	@${FIN} #}

# man make:
	@${STA} #{
#	! Targets are always re-created, but not until all sources
#	  have been examined and re-created as necessary.  Sources for a
#	  target accumu- late over dependency lines when this operator is
#	  used.  The target is removed if make is interrupted.
modes!
	echo $@
.if exists(/site)
	@echo "Next chown fails only if something is not owned by jhs"
	cd /tmp; cd ${.CURDIR} && find . -print0 | \
		xargs -0 ${XS} chown -h jhs
.else	# build for cdrom inside a chroot
	find . -print0 | xargs -0 chown -h www
.endif
	@# chown fixes things like src/bsd/jhs/bin/public/filesize/tst.20
	@# which if make runs as root are uid 0
	@# The print0 & xargs -0 are to cope with a nasty
	@#	".kde/share/apps/knotes/notes/knote/knote 1"
.if exists(/site)
	cd /tmp;cd ${.CURDIR} && find . -type f -print0 \
		| xargs -0 chmod a+r
	cd /tmp;cd ${.CURDIR} && find . -type d -print0 \
		| xargs -0 chmod a+rx
.else	# build for cdrom inside a chroot
.endif
	@# Symbolic links must be owned by me, (not root as I once discovered
	@# some to be), else I suspect rdist may not upload them to the web.

rd!
	@echo "skipping rdist lap 2>&1 > /dev/null	&"
tidy:
	@${STA} #{
	find . -type f -name \*.lmth | \
		grep -v ./bin/index.html	| \
		grep -v ./src/fixes.html	| \
		grep -v ./dots/index.html	| \
		xargs tidy -i -m
	@# I could add some bim m4 exceptions to the list above.
	@# but not important if I dont, equally unimportantn if I
	@# werent to except the 3 above.
	@${FIN} #}

script_strip:
	@${STA} #{
	@# Run this after "script webcp_local_one_off"
	@# to show only errors im interested to fix.
	cat typescript | \
		grep -v jhs/ewinter	| \
		grep -v jhs/bim/talks/asterisk | \
		grep -v Inc/gifs | \
		grep -v JHS | \
		grep -v "Authorization Required" | \
		grep -v "Forbidden" | \
		grep -v "not parsed, looks like binary" | \

	@${FIN} #}
# JJLATER fix webcp_local_one_off to supply flags to tell httrack
#	to ignore files called inc & .sed

usb:	usb.ufs usb.dos
	@${STA} #{
usb.ufs:
	@${STA} #{
	cd ${.CURDIR}/.. ; rdist -R -f public_html/Distfile usb.ufs
	@${FIN} #}
usb.dos:
	@${STA} #{
	cd ${.CURDIR}/.. ; rdist -R -f public_html/Distfile usb.dos
	@${FIN} #}

cdrom:
	@${STA} #{
	@echo "JJLATER also mkisofs a cdrom rock ridge image & put it on"
	@echo "king /dev/da0s1 so that it can be read on MS systems"
	@${FIN} #}

# Scan for phone numbers @ other strings I may not want public.
.if exists(../private/Makefile_personal)
.include "../private/Makefile_personal"
SCAN_TEL =
SCAN_TEL += $(TKOM_CARD_FAX)
SCAN_TEL += $(TKOM_CARD_TEL)
SCAN_TEL += $(TKOM_FAMILY_TEL)
SCAN_TEL += $(TKOM_MODEM)
SCAN_TEL += $(TKOM_PERMIN_TEL)
SCAN_TEL += $(TKOM_PERSONAL_FAX)
SCAN_TEL += $(TKOM_PERSONAL_TEL)
SCAN_TEL += $(TKOM_MEL)
SCAN_TEL += $(MOBILE_REAL)
# JJLATER MOBILE_REAL is just last digit string
# JJLATER MOBILE_DE is full +49 etc
# SCAN_TEL += $(TKOM_CDROM_TEL)		# Do not scan for this.
# SCAN_TEL += $(TKOM_WEB_TEL)		# Do not scan for this.
SCAN_TEL += $(AACHEN_1)
SCAN_TEL += $(AACHEN_2)
SCAN_TEL += $(AACHEN_3)
SCAN_TEL += $(MOBILE_UK)
.else
SCAN_TEL =	"no_Makefile_personal_list_of_numbers"
.endif

grep_tel:
	@${STA} #{
	-for i in ${SCAN_TEL} ; \
		do	\
		echo "Scanning for $$i" ; \
		find . -type f \
		| xargs grep -l $$i ; \
		done
	@echo done
	@${FIN} #}

# Scan for phone numbers @ other strings I may not want public
.if exists(../private/Makefile_personal)
SCAN_EML =
SCAN_EML +=	$(EMAIL_BUSINESS)
SCAN_EML +=	$(EMAIL_PERSONAL)
SCAN_EML +=	$(LEAK_SCAN)
.else
SCAN_EML =	"no_Makefile_personal_list_of_emails"
.endif
SCAN_EML +=	jhs@'berklix'\.com
SCAN_EML +=	jhs@'berklix'\.net
SCAN_EML +=	jhs@'berklix'\.org
SCAN_EML +=	jhs\-list@'berklix'\.com
SCAN_EML +=	jhs\-list@'berklix'\.net
SCAN_EML +=	jhs\-list@'berklix'\.org
SCAN_EML +=	jhs@'bsdpie'\.com
SCAN_EML +=	jhs@'freebsd'\.org
SCAN_EML +=	jhs@'surfacevision'\.com
SCAN_EML +=	postmaster@'berklix'\.com
SCAN_EML +=	postmaster@'berklix'\.net
SCAN_EML +=	postmaster@'berklix'\.org
SCAN_EML +=	postmaster@'bsdpie'\.com
SCAN_EML +=	postmaster@'surfacevision'\.com
SCAN_EML +=	webmaster@'berklix'\.com
SCAN_EML +=	webmaster@'berklix'\.net
SCAN_EML +=	webmaster@'berklix'\.org
SCAN_EML +=	webmaster@'bsdpie'\.com

grep_private:
	@${STA} #{
	@echo Scanning for ${SCAN_EML}
	@# The echo is to break up components in LEAK_SCAN.
	-cd ${.CURDIR}; for i in `echo ${SCAN_EML}` ; \
		do	\
		echo " " ; \
		echo "Scanning for $$i" ; \
		find . -type f | xargs grep -l $$i ; \
		done
	@echo done
	@${FIN} #}

grep_for:
	@${STA} #{
	-cd ${.CURDIR}; find . -name \*.eps -o -name \*.ps | \
		xargs grep "^%%For: " | rev | sort | uniq | rev
	@${FIN} #}

# LINKCHECKER=	-t -1 -r20 --no-warnings --no-status
LINKCHECKER=
LINKCHECKER+=	-t-1
#		-t-1	disable threading
#		Cos otherwise going through the log
#		looking for errors, one keep dancing around chopping
#		between same hand full of files editing them, & its
#		a waste of time keep editing single errors in
#		dfferent files, closing & opening others.
LINKCHECKER+=	-r20
#		-r20	restricy recursion to 20, not infinite
LINKCHECKER+=	--no-warnings
#		Suppress warnings until all errors cleared.

# LINKCHECKER+= -oblacklist
#	too cryptic only lists failing URLS, but where they come from,
#	so might be OK later if I only have a a few errors left to
#	keep under control, but not now while I have loads to search for.

LINKCHECKER+=	--no-status
# LINKCHECKER+= --ignore-url=^https://
#		LinkChecker internal error, over and out
# LINKCHECKER+= --ignore-url=https://
#		LinkChecker internal error, over and out
# LINKCHECKER+= --ignore-url=^mailto:
#		I deliberately cripple all my mailto to upset spammers
#		so I used not to check,
#		but now am I am moving my cripplings to left of the @
#		so I can at least check domains exist.
#	-q

checks: checkindex all_lmth_hook linkcheck linkchecker.jhs httrack_chk \
	symlinkcheck checksite checklmthhtml

checklmthhtml:	html
	@${STA} #{
	find . -type f -name \*.lmth | sed -e s/.lmth// | \
		sort > ~/tmp/$@.lmth.lst
	find . -type f -name \*.html | sed -e s/.html// | \
		sort > ~/tmp/$@.html.lst
	-diff ~/tmp/$@.lmth.lst ~/tmp/$@.html.lst | more
	rm ~/tmp/$@.lmth.lst ~/tmp/$@.html.lst
	@${FIN} #}

checkindex:
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	test -e ${.CURDIR}/index.html
	@# Above is to check index.html is under eg
	@# /home/jhs/public_html/ or /0s1/usr1/home/jhs/._dir/public_html
	@# and not under /usr/obj/`pwd`, else ~jhs/ Web will not be visible
	@# to browsers.
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

linkcheck:
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	@# linkcheck -help
	-ls -l ${HOME}/tmp/linkcheck.report.localhost
	@# This recurses.
	@# It doesnt write ${HOME}/linkcheck.report.localhost till ^C
	echo Skipping # cd ${HOME}/tmp && linkcheck -r http://localhost/~jhs/
	@# the command above creates loads of useless "Bad return code!!!!"
	-ls -l ${HOME}/tmp/linkcheck.report.localhost
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

linkchecker.jhs:
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	@echo "Big test of all of ~jhs"
	@echo "Problem faraday & newer have a load of missing .txt in"
	@echo "free/talk/faraday/presentations/export/1_intro_julian/"
	@# - Run on remote servers fails, fails,
	@#   complaining it needs some python that clashes with Mailman.
	@# - Run on gate: Loads of spurious 'Connection reset by peer'
	@# - Run on internal host behind a forewall, using a proxy:"
	@#   all these things fail, making it hard to peruse the log
	@#	Error - Cause
	@#	403 Forbidden - https
	@#	error: [Errno 61] Connection refused - mailto:
	@#   These would still fail even if run direct on internet:
	@#	401 Unauthorized - passworded pages
	cd ${HOME}/tmp && linkchecker ${LINKCHECKER} http://localhost/~jhs/
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

httrack_chk:
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	@echo "This will not copy, just check"
	@# It is not much use as:
	@# - It produces a files with no CR,
	@#   so I use s/localhost/ControlVEnter/g
	@# - Theres then a load of Escape[ to swap to CR
	@# - Then use !}sort !}uniq
	@# - Then unresolved refs end in eg:
	@#	localhost/~jhs/cv/stacey_g.1.pcl (220 bytes) - 404
	@# - So use | rev | sort | rev
	@#	- 301	OK
	@#	- 401	Password locked
	@#	- 403	.htaccess
	cd ${HOME}/tmp && httrack -d --spider http://localhost/~jhs/
	@#	--spider <URLs>, to test links: reports Errors
	@#		& Warnings (-p0C0I0t)
	@#	-t test all URLs (even forbidden ones) (--test)
	@# See more httrack notes in bin/.sh/web_cp_remote
	cd ${HOME}/tmp && ls -l hts-log.txt
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

symlinkcheck:
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	@echo "Creating a deliberately bad link"
	-ln -s dangling-end dangling-link
	ls -l dangling-link
	@echo "Next test[s] fail to detect this broken dangling link"
	cd ${.CURDIR} ; nice tar -c -f /dev/null -H .
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

checksite:
	@${STA} #{
	echo "Consider: " grep "^check" /site/Makefile

linkchecker.com:	# Called by nothing.
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	cd ${HOME}/tmp && linkchecker ${LINKCHECKER}		\
	 --no-follow-url=http://www.berklix.com/~jhs/		\
	 --no-follow-url=http://www.berklix.com/jhs/		\
	 --no-follow-url=http://berklix.com/~jhs/		\
	 --no-follow-url=http://berklix.com/jhs/		\
	http://berklix.com
	@#	--ignore-url= http://www.berklix.com/~jhs/
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

linkchecker.net:	# Called by nothing.
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	cd ${HOME}/tmp && linkchecker ${LINKCHECKER}		\
	 --no-follow-url=http://www.berklix.net/~jhs/		\
	 --no-follow-url=http://www.berklix.net/jhs/		\
	 --no-follow-url=http://berklix.net/~jhs/		\
	 --no-follow-url=http://berklix.net/jhs/		\
	http://berklix.net
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

linkchecker.org:	# Called by nothing.
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	cd ${HOME}/tmp && linkchecker ${LINKCHECKER}		\
	 --no-follow-url=http://www.berklix.org/~jhs/		\
	 --no-follow-url=http://www.berklix.org/jhs/		\
	 --no-follow-url=http://berklix.org/~jhs/		\
	 --no-follow-url=http://berklix.org/jhs/		\
	http://berklix.org
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

linkchecker.eu:		# Called by nothing.
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	cd ${HOME}/tmp && linkchecker ${LINKCHECKER}		\
	 --no-follow-url=http://www.berklix.eu/~jhs/		\
	 --no-follow-url=http://www.berklix.eu/jhs/		\
	 --no-follow-url=http://berklix.eu/~jhs/		\
	 --no-follow-url=http://berklix.eu/jhs/			\
	http://berklix.eu
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

linkchecker.de:		# Called by nothing.
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	cd ${HOME}/tmp && linkchecker ${LINKCHECKER}		\
	 --no-follow-url=http://www.berklix.de/~jhs/		\
	 --no-follow-url=http://www.berklix.de/jhs/		\
	 --no-follow-url=http://berklix.de/~jhs/		\
	 --no-follow-url=http://berklix.de/jhs/			\
	 http://berklix.de
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

linkchecker.uk:		# Called by nothing.
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	cd ${HOME}/tmp && linkchecker ${LINKCHECKER}		\
	 --no-follow-url=http://www.berklix.uk/~jhs/		\
	 --no-follow-url=http://www.berklix.uk/jhs/		\
	 --no-follow-url=http://berklix.uk/~jhs/		\
	 --no-follow-url=http://berklix.uk/jhs/			\
	 http://berklix.uk
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

linkchecker.bsdpie.eu:		# Called by nothing.
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	cd ${HOME}/tmp && linkchecker ${LINKCHECKER}		\
	 --no-follow-url=http://www.bsdpie.eu/~jhs/		\
	 --no-follow-url=http://www.bsdpie.eu/jhs/		\
	 --no-follow-url=http://bsdpie.eu/~jhs/		\
	 --no-follow-url=http://bsdpie.eu/jhs/			\
	 http://bsdpie.eu
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

httrack_cp:
	@${STA} #{
	@echo "Starting `pwd`/Makefile $@"
	mkdir ${HOME}/tmp/httrack && cd ${HOME}/tmp/httrack && \
		httrack -a -d -w http://localhost/~jhs/
	@# -a = --stay-on-same-address
	@# -d	stay on the same principal domain (--stay-on-same-domain)
	@# -w	*mirror web sites (--mirror)
	ls -l hts-log.txt
	@echo "Finished `pwd`/Makefile $@"
	@${FIN} #}

findxv:
	@${STA} #{
	find . -type d -name .xvpics
	@${FIN} #}

pwd:
#	${JHS_BASE} is defined in /site/Makefile.rdist6.common
#       For reasons beyond the scope of this Makefile
#               ( Common home on multiple hosts, multiple domains
#               on laptop, partial export, allow switching home base etc)
#       I have these symbolic links:
#               ~/public_html -> .HOME/public_html
#               ~/.HOME -> ._
#               ~/._ -> ._dir
#       so      ~/public_html indirects to directory:
#               ~/._dir/public_html
	@${STA} #{
	@# Show where we are. Consider:
	@# - May be /home/jhs/public_html
	@# - May be /usr/obj/home/jhs/public_html	- If /usr/obj exists.
	@# - May be /.amd_mnt/fire/home/jhs/public_html - If on a remote host.
	@# - Which Shell? :
	@#   - The cd that is called within Makefile is built in to a bourne shell for that line.
	@#   - Whereas I normally run an interactive cshell,
	@#   - With these symbolic links:
	@#		~/public_html -> .HOME/public_html
	@#		~/.HOME		-> ._
	@#		~/._		-> ._dir
	@#	Man builtin lists:
	@#		Command       External    csh(1)    sh(1)
	@#		pwd           Yes         No        Yes
	@#		Commands marked ``No**'' under External do exist externally, but are
	@#		implemented as scripts using a builtin command of the same name.
	@#	file /bin/pwd
	@#		/bin/pwd: ELF 64-bit LSB executable, x86-64, version 1 (FreeBSD),
	@#		 dynamically linked (uses shared libs), for FreeBSD 9.2, stripped
	@#	The 2 shells differently implement cd differently:
	@#	/bin/csh:	"cd ~/public_html ; cd .. ; /bin/pwd"   /home/jhs/._dir
	@#	/bin/csh:	"cd ~/public_html/.. ; /bin/pwd"	/home/jhs/._dir
	@#	/bin/sh:	"cd ~/public_html ; cd .. ; /bin/pwd" 	/home/jhs
	@#	/bin/sh:	"cd ~/public_html/.. ; /bin/pwd" 	/home/jhs
	@echo -n "01 "					;      pwd
	@# fire:	       /usr/obj/home/jhs/._dir/public_html
	@# lapr:	 /.amd_mnt/fire/home/jhs/._dir/public_html
	@# lapr: /usr/obj/.amd_mnt/fire/home/jhs/._dir/public_html
	@#
	@echo -n "02 "					; /bin/pwd
	@# fire:	       /usr/obj/home/jhs/._dir/public_html
	@# lapr:	 /.amd_mnt/fire/home/jhs/._dir/public_html
	@# lapr: /usr/obj/.amd_mnt/fire/home/jhs/._dir/public_html
	@#
	@echo -n "03 " ; cd ${.CURDIR}			;      pwd
	@# fire:	       /home/jhs/._dir/public_html
	@# lapr: /.amd_mnt/fire/home/jhs/._dir/public_html
	@#
	@echo -n "04 " ; cd ${.CURDIR}			; /bin/pwd
	@# fire:	       /home/jhs/._dir/public_html
	@# lapr: /.amd_mnt/fire/home/jhs/._dir/public_html
	@#
	@echo -n "05 " ; cd ${.CURDIR} ; cd ..		;      pwd
	@# fire:               /home/jhs/._dir
	@# lapr: /.amd_mnt/fire/home/jhs/._dir
	@#
	@echo -n "06 " ; cd ${.CURDIR} ; cd ..		; /bin/pwd
	@# fire:               /home/jhs/._dir
	@# lapr: /.amd_mnt/fire/home/jhs/._dir
	@#
	@echo -n "07 " ; cd ${.CURDIR}/..		;      pwd
	@# fire:               /home/jhs/._dir
	@# lapr: /.amd_mnt/fire/home/jhs/._dir
	@# NOTE different to interactive /bin/sh which gives /home/jhs
	@#
	@echo -n "08 " ; cd ${.CURDIR}/..		; /bin/pwd
	@# fire:               /home/jhs/._dir
	@# lapr: /.amd_mnt/fire/home/jhs/._dir
	@# NOTE different to interactive /bin/sh which gives /home/jhs
	@#
	@echo -n "09 " ; cd ${HOME}/public_html		;      pwd
	@#	 /home/jhs/public_html
	@#
	@echo -n "10 " ; cd ${HOME}/public_html 	; /bin/pwd
	@# fire:	       /home/jhs/._dir/public_html
	@# lapr: /.amd_mnt/fire/home/jhs/._dir/public_html
	@#
	@echo -n "11 " ; cd				;      pwd
	@#	 /home/jhs
	@#
	@echo -n "12 " ; cd				; /bin/pwd
	@#       /home/jhs
	@#
	@echo -n "13 " ; cd ${HOME}/public_html/..	;      pwd
	@#	 /home/jhs
	@#
	@echo -n "14 " ; cd ${HOME}/public_html/..	; /bin/pwd
	@#       /home/jhs
	@#
	@echo -n "15 " ; cd ${HOME}/public_html/..	;      pwd
	@#	 /home/jhs
	@#
	@echo -n "16 " ; cd ${HOME}/public_html/..	; /bin/pwd
	@#       /home/jhs
	@#
	@echo -n "17 " ; cd ${HOME}/._dir		;      pwd
	@#	 /home/jhs/._dir
	@#
	@echo -n "18 " ; cd ${HOME}/._dir		; /bin/pwd
	@# fire:                /home/jhs/._dir
	@# lapr: /crypt/fs/1700m/home/jhs/._dir
	@#
	@echo -n "19 " ; cd ${JHS_BASE}			;      pwd
	@#
	@echo -n "20 " ; cd ${JHS_BASE}			; /bin/pwd
	@#
	@${FIN} #}

int2gate:	# fire: cd ~/public_html ; make int2gate
	@${STA} #{
	@# cd ${.CURDIR} ; make clean
	cd ${.CURDIR}/.. && rdist6 -P /usr/bin/ssh \
		-f public_html/Distfile $@
	@${FIN} #}

int2gates:	# fire: cd ~/public_html ; make int2gates
	@${STA} #{
	@# cd ${.CURDIR} ; make clean
	cd ${.CURDIR}/.. && rdist6 -P /usr/bin/ssh \
		-f public_html/Distfile $@
	@${FIN} #}


bin2gate:	# cd public_html ; make bin2gate
	@${STA} #{
	cd ${.CURDIR}/.. && rdist6 -P /usr/bin/ssh \
		-f public_html/Distfile $@
	@${FIN} #}

noroot:
	@${STA} #{
	cd ~/xtra ; make noroot
	@${FIN} #}

# On gate or dell.no.berklix.net: cd ~/public_html ; make ext2remo
ext2remo: noroot int2remohook
	@${STA} #{
	-cd ${.CURDIR}/..; rdist6 -P /usr/bin/ssh \
		-f public_html/Distfile -M 2 ext2remo
	@# - is to ignore error if 1 remote server is down.
	@${FIN} #}

int2remo:
	@echo "Bodged out till debug JJLATER"
	@echo "when laptop was connected direct to router,"
	@echo "and domain name was dell.no.berklix.net"
	@echo "this label went berserk & started deleting all of my master"
	@echo "~/public/html"
	@echo "till I killed it & repaired damage from backup."
	@echo "I must analyse why JJLATER"
	@echo "To re-enter hell & test, after backup, type make JJLATERintremo"

JJLATERintremo:	noroot int2remohook # On fire.js.berklix.net: cd ~/pu*l ; make int2remo
	@${STA} #{
	echo "Starting $@ { `date`"
	cd ${.CURDIR} ; make int2gate
	-ssh gate "cd ${HOME}/public_html/..; rdist6 -P /usr/bin/ssh \
		-f public_html/Distfile -M 2 ext2remo"
	@# - is to ignore error if 1 remote server is down.
	echo "Finished $@ } `date`"
	@${FIN} #}

int2remohook:
	${STA} #{
	@echo "Please JJLATER Add an update to ~/public_html/Makefile"
	@echo "for ~/httpuserfile (held outside of ~public_html"
	@echo "which holds lines of plain names & encrypted passwords)."
	ls -l ${.CURDIR}/../httpuserfile
	${FIN} #}

grepnowww:	grep1
grep1:
	@${STA} #{
	cd ${HOME}/public_html && find . -type f -name \*.lmth | \
		xargs grep -l http://berklix | wc -l
	@${FIN} #}
grep2:	# JJLATER
	@${STA} #{
	@# Search for non www. addresses
	@# (I have already purged /site).
	cd ${HOME}/public_html && find . -type f -name \*.lmth | \
		xargs grep -l http://berklix
	@# later extend the search to include .html then all then other domains
	@# bsdpie surface
	@${FIN} #}

warn:	warn_find_obsolete_links

warn_mirror:
	@${STA} #{
	@echo "Warning a mirror of gate with web_cp_local_one_off produces > 1G"
	@echo "with such spurious duplicates as:"
	@echo "JHS/standards/gifs/berklix.net-vert.gif"
	@echo "& the mirror takes more than all night to run"
	@# This is cos I need to get rid of sym links such as gifs -> ../gifs
	@# & dups of _jhs & JHS
	@${FIN} #}

package_number:
	find -s /home/jhs/public_html /site -type f | \
		xargs grep -l 36.000 | grep -v ./Makefile
	@#  /site/usr/local/www/Data/virtual/berklix.com/vsl/legal/index.lmth
	@#  ~/job/groff/handout/vector.l_rof	# /site/usr/local/www/Data/virtual/berklix.com/vsl/vector_[eg].txt
	@#  ~/job/groff/handout/frozen_original/vector.l_rof
	@#  ~/public_html/free/talk/faraday/2013-01-12/index.lmth
	@#  ~/public_html/free/talk/faraday/presentations/source/1_intro_julian/faraday/talk.mgp
	@#  ~/public_html/free/talk/faraday/presentations/export/1_intro_julian/faraday/mgp00020.html
	@#  ~/public_html/free/talk/faraday/presentations/source/1_intro_julian/newer/talk.mgp
	@#  ~/public_html/free/talk/faraday/presentations/export/1_intro_julian/newer/mgp00021.html
	@#  ~/public_html/freebsd/packages/index.lmth
	@#  ~/public_html/index.lmth
	@#  ~/public_html/sale/index.lmth
	@#  ~/public_html/txt/tar_tape_recovery.lmth

release:	# temporary label
	@echo "This is run once to raise a generic to a jhs berklix."
	cd ~/public_html/src/bsd/fixes/FreeBSD/src/jhs/share/mk; \
		 cp berklix.mk	/usr/share/mk/
	cd ~/public_html/src/bsd/fixes/FreeBSD/src/jhs/share/mk; \
		 cp berklix2.mk	/usr/share/mk/
	cd ~/public_html/src/bsd/fixes/FreeBSD/src/jhs/share/mk; \
		 cp berklix_no_man.mk	/usr/share/mk/
	cd ${.CURDIR}/standards ; make install
	mkdir /usr/local/share/xmodmap
	cd ~/public_html/dots/.xmodmap; make install
	mkdir -p /usr/local/www/data/virtual
	cd ~/public_html/src/bsd/jhs/bin/public/xearth4; make install
	@echo "Suggestion: cd ${.CURDIR} ; make sasl-test"

sasl-test:
	date | mail -s "tst from `hostname -s` to jhs-list@land.berklix.org" jhs-list@land.berklix.org
	date | mail -s "tst from `hostname -s` to jhs@land.berklix.org" jhs@land.berklix.org
	@# date | mail -s "tst from `hostname -s` to jhs-list@slim.berklix.org" jhs-list@slim.berklix.org
	@# date | mail -s "tst from `hostname -s` to jhs@slim.berklix.org" jhs@slim.berklix.org

check:	gif_transparent_check

# JJLATER
#	Add a find . -name \*.gif
#	check all gif are of type transparent
gif_transparent_check:
	@# cd ${.CURDIR} && find . -type f | grep .gif | sort | xargs file | grep "GIF image data" |  more
	@# file does not show what is transparent, so I need to process all with a for loop.

.include <berklix.mk>
