# Makefile to keep this microsite up to date.

all::
	@echo Building all...

UTCYEAR := $(shell date -u +'%Y')
UTCMONTH := $(shell date -u +'%m')
UTCDAY := $(shell date -u +'%d')

# Suffix for GZip optimised files, without a dot.
COMPSUFGZIP=gz
# Suffix for Brotli optimised files, without a dot.
COMPSUFBROTLI=br
# Most-pre-compressed suffix currently generally supported.
COMPSUFMAX=${COMPSUFBROTLI}


# If true, AMP is deprecated.
# Parallels the flag in wrap_art.
AMPDEPRECATED=true


# Use of lockfile with no (0) retries to fail-fast on contention.
LOCKFILENR=lockfile -r 0
# Standard lock-out time for tasks not expected to be very long.
# This is long enough to avoid prematurely letting a second instance run
# if for example the system is running very slow (~15m).
LOCKFILENRSTD=$(LOCKFILENR) -l 907
# Slow lock for somewhat expensive tasks (~2h).
LOCKFILENRSLOW=$(LOCKFILENR) -l 7211
# Conventional exclusive locker such as flock ending "lockfile" -c "command"
FLOCK=flock
# Non-blocking flock.
FLOCKNB=$(FLOCK) -n
# EXAMPLE
#	@($(FLOCKNB) 9 || exit 1; \
#	    sh script/genPodcastHTMLinc.sh > $@.tmp && \
#            /bin/mv $@.tmp $@ ) 9>>$(@D)/.$(@F).flock
# Blocking flock with 'standard' timeout.
FLOCKBSTD=$(FLOCK) -w 907

# If present, and if a non-zero value, the Pi is throttling, eg thermally.
# The file either exists or not for an entire run,
# (RPITHROTTLEFILEEXISTS defined and true)
# but when the file exists its time between changes can be seconds.
RPITHROTTLEFILE=/sys/devices/platform/soc/soc:firmware/get_throttled
# This value is always 0 if throttle file does not exist,
# else is 0 if throttle file exists but CPU is not throttling,
# else is non-zero.
RPITHROTTLEVALCMD = echo 0
RPITHROTTLEVAL = 0
ifneq ($(wildcard $(RPITHROTTLEFILE)),)
RPITHROTTLEFILEEXISTS := true
# The value of RPITHROTTLEVAL is read from the system each time.
# NOTE: really RPITHROTTLEVAL = $(shell, not !=, to re-test on every use.
RPITHROTTLEVALCMD = cat $(RPITHROTTLEFILE)
RPITHROTTLEVAL = $(shell $(RPITHROTTLEVALCMD))
endif
# Allow some work to be avoided when the GB grid status is red (high intensity).
# This could be work that will induce network traffic for example.
# Presence of GRIDRED1DFLAG indicates grid intensity high cf last 24h.
GRIDRED1DFLAG=_gridCarbonIntensityGB.red.flag
# Presence of GRIDRED7DFLAG indicates grid intensity high cf last week.
GRIDRED7DFLAG=_gridCarbonIntensityGB.7d.red.flag
# Allow some work to be postponed until healthy/excess power available.
# Flag present on server system to indicate low power available.
PWRLOWFLAG=/run/EXTERNAL_BATTERY_LOW.flag
# Allow some work to be postponed until healthy/excess power available.
# Flag present on server system to indicate healthy power available.
PWRHIGHFLAG=/run/EXTERNAL_BATTERY_HIGH.flag
# Flag present on server system to indicate excess power available.
PWRVHIGHFLAG=/run/EXTERNAL_BATTERY_VHIGH.flag
# Flag present on server when it's dumping excess power.
# Not generally any of our business if we're propping up the grid, but...
# May not happen for very long so don't use to trigger many Wh of tasks.
PWRDUMPING=/run/DUMPING.flag
# Pair for flags one of which should always be present on battery system.
# If both are absent then this system should ignore the other PWR flags.
PWRPAIR=/run/EXTERNAL_BATTERY_NOTHIGH.flag $(PWRHIGHFLAG)
# If NOPWR is defined and non-empty then ignore other PWR flags.
ifeq ($(wildcard $(PWRPAIR)),)
NOPWR := true
endif
.PHONY: debuginfo
debuginfo::
	@echo "NOPWR=$(NOPWR), flags present: $(wildcard $(PWRVHIGHFLAG) $(PWRHIGHFLAG) $(PWRDUMPING) $(PWRLOWFLAG))"
	@echo RPITHROTTLEVAL=$(RPITHROTTLEVAL)

# Location of optipng PNG optimiser, if any.
OPTIPNG=/usr/bin/optipng
# Settings for optimisation of moderately long-term images.
OPTIPNGMAX=-o7
OPTIPNGLOTS=-o4

# Find zopfli tools for maximal GZip compression.
ZOPFLIDIR := $(shell if [ -x /usr/bin/zopfli ]; then echo /usr/bin; else echo /usr/local/bin; fi)
# Location of zopfli compressor, if any.
ZOPFLI=$(ZOPFLIDIR)/zopfli
# Location of zopflipng PNG optimiser, if any.
ZOPFLIPNG=$(ZOPFLIDIR)/zopflipng
# If enough energy, compress extra hard, though savings usually zero/small.
ZOPFLIPNGEXTFLAGS= -m
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# Multiplies effort by 4.
ZOPFLIPNGEXTFLAGS= -m -m
endif


# Make all non-hidden files visible.
# Hide all top-level 'dot' files (avoiding . and ..).
all::
	@chmod a+r *
	@chmod go-rx .??*

.PHONY: all

# Non-public working directory for files that can be recreated at whim.
WORKTMP=.work/tmp
all:: $(WORKTMP)
	mkdir -p $(WORKTMP)

# Site log file (older one is .1).
SITELOG=/var/log/apache2/other_vhosts_access.log
SITELOGPREV=$(SITELOG).1

# Directory for offline pages: not visible for Web users.
OFFLINEDIR=.offline

# Files that change with approximately the stated frequency/interval.
# These can be used to force updates of targets dependent on them.
# Because these files may be absent entirely use of $(wildcard X) may be useful.
# These may well be artifacts of other system processes.
# The content of these files is not important for these purposes.
#
# DAILY is a file touched approximately daily (though may be absent entirely).
DAILY=/var/log/syslog.1
# WEEKLY is touched approximately weekly (though may be absent entirely).
WEEKLY=$(SITELOGPREV)
# MONTHLY is touched usually at the start of the month (though may be absent).
#MONTHLY=data/16WWMonthlyHDD12.dat
MONTHLY=data/.flags/MONTHLY.flag
# YEARLY is touched at around the start of the year.
YEARLY=.work/copyrightYearLatest.txt
ifneq ($(wildcard $(PWRLOWFLAG)),)
# When power is LOW then turn off these periodic rebuild triggers.
DAILY=
WEEKLY=
MONTHLY=
YEARLY=
endif


# All desktop content pages to build, in (logical, canonical) order.
# All the 'note-on-*' then structural entries are grouped at the end.
ORDEREDPAGES := \
    16WW-2025-high-res-temp-and-RH-dataset.html \
    16WW-heat-pump-sound-1.html \
    16WW-hot-noon-garden-birds-and-bees.html \
    16WW-merit-order.html \
    about-16WW.html \
    aerogel-on-the-north-face.html \
    AI-promo-video.html \
    aircon-on-windows-open.html \
    airport-landside-ambient.html \
    air-source-heat-pump.html \
    ASHP-case-study-Kingston-UK.html \
    bats-at-16WW.html \
        bats-at-16WW-1.html \
        bats-at-16WW-2.html \
        bats-at-16WW-3.html \
        bats-at-16WW-4.html \
        bats-at-16WW-5.html \
        bats-at-16WW-6.html \
        bats-at-16WW-7.html \
    bats-at-Hogsmill-bridge-dataset.html \
    battery-bank-replacement.html \
    battery-sounds-electric-music.html \
    Beddington-ERF-visit.html \
    bibliography.html \
    BRE-Tiny-House-Trove.html \
    BRE-Low-Carbon-Innovation.html \
    can-we-live-on-local-renewables.html \
    catalogue-of-aerodynamic-characteristics-of-airfoils.html \
    church-green.html \
    commercial-rent-UK-solar-incentives-2022.html \
    community-energy-for-Kingston.html \
    community-energy-for-Kingston-2.html \
    composting-vs-solar-power.html \
    coronacast-lockdown.html \
    coronalog.html \
    crows.html \
    diarycast-20200413.html \
        diarycast-20200422.html \
        diarycast-20200510.html \
        diarycast-20200513.html \
        diarycast-20200523.html \
        diarycast-20200712.html \
        diarycast-20200726.html \
        diarycast-20200925.html \
        diarycast-20201221.html \
        diarycast-20210114.html \
        diarycast-20210321.html \
        diarycast-20210614.html \
        diarycast-20210828.html \
        diarycast-20211114.html \
        diarycast-20211228.html \
        diarycast-20230103.html \
        diarycast-20240128.html \
        diarycast-20250509.html \
        diarycast-20251230.html \
    domestic-dynamic-demand-ideas.html \
    eddi-diverter-dataset.html \
    eddi-diverter-export-margin-analysis.html \
    efficient-dishwashing.html \
    EGC-Energy-Game-Changer-intro.html \
        EGC-WP1-D13-Technical-Research.html \
        EGC-WP1-D15-Customer-Research.html \
	EGC-WP1-D17-Financial-Research.html \
	EGC-WP5-Summary.html \
        EGC-Energy-Game-Changer-HaaS-OSHUG-talk.html \
    electricity-profiles.html \
    electricity-storage-whole-household.html \
        electricity-storage-whole-household-2018.html \
    electricity-usage-snapshot-202208.html \
    Energy-Data-Best-Practice-Discoverable-Searchable-Understandable-meeting.html \
    energy-saving-advice-for-householders.html \
    energy-series-dataset.html \
    energy-systems-diagrams.html \
    Enphase-AC-Battery-REVIEW.html \
    Enphase-AC-Battery-2.html \
    Enphase-AC-Battery-3-4.html \
    Enphase-AC-Battery-5.html \
    EOU-is-10.html \
    EV-home-charge-point-case-study.html \
    expanding-off-grid-PV-system.html \
    food-and-CO2.html \
    from-the-inbox.html \
        from-the-inbox-1.html \
        from-the-inbox-2.html \
	from-the-inbox-3.html \
	from-the-inbox-4.html \
    Gaming-Heating-For-Profit-and-Laughs.html \
    GB-electricity-grid-CO2-intensity-live-dataset.html \
    going-postal-ambient.html \
    green-halloween-pumpkin.html \
    green-living-at-home-with-Adam-Hart-Davis.html \
    grid-tie-generation-stats-SunnyBeam.html \
    ground-source-heat-pump.html \
    GSHP-case-study-France.html \
    Hanwell-Hootie-ambient.html \
    Heat-and-Health-10-10-Conference.html \
    heat-battery-early-notes-for-EcoHome-Lab.html \
    heat-battery-topup-dataset.html \
    heat-pump-16WW-control.html \
    heat-pump-16WW-load-profile.html \
    heat-pump-2-day-install.html  \
    heat-pump-barriers.html \
    Herne-Bay-ambient.html \
    Hey-Siri-Help-Flatten-The-Duck.html \
    holiday-low-carbon.html \
    home-heat-carol.html \
    house-grid-flows.html \
    impulse-heating-test.html \
    installing-solar-PV-for-electricity-at-home.html \
    insulation-barriers.html \
    IoT-Launchpad-research.html \
    IoT-Launchpad-deployment.html \
    IoT-Launchpad-outcomes.html \
    KEHS-2024-talk-Draughts-Insulation-Ventilation.html \
    KEHS-2024-talk-Fresh-home-solar-PV-install-in-Kingston.html \
    KEHS-2024-talk-The-truth-about-heat-pumps.html \
    KEHS-2024-talk-Why-Do-Solar.html \
    KEHS-2025-talk-Home-Batteries.html \
    KEHS-2025-talk-Home-Heat-Pump-Installation.html \
    KEHS-2025-talk-Keep-Cool.html \
    KEHS-2025-talk-Solar-One-Year-On.html \
    Kingston-Efficient-Homes-Show-2023.html \
        Kingston-Efficient-Homes-Show-2024.html \
        Kingston-Efficient-Homes-Show-2025.html \
        Kingston-Efficient-Homes-Show-2025-Bitesize.html \
    LED-homebrew-nightlight.html \
    LED-lighting.html \
    LiFePO4-battery-testing-with-solar-PV-off-grid-system.html \
    low-carbon-investing.html \
    low-power-laptop.html \
    low-power-software.html \
    low-voltage-drop-out-circuit-design.html \
    manage-the-heat-Maltese-style.html \
    mass-loft-insulation.html \
    measuring-appliance-consumption.html \
    MEng-Eco-Puzzlers.html \
        MEng-Eco-Puzzlers-1.html \
        MEng-Eco-Puzzlers-2.html \
        MEng-Eco-Puzzlers-3.html \
        MEng-Eco-Puzzlers-4.html \
        MEng-Eco-Puzzlers-5.html \
        MEng-Eco-Puzzlers-6.html \
    metacast-1.html \
    metacast-2.html \
    MHRV-mechanical-heat-recovery-ventilation.html \
    MHRV-Vent-Axia-Lo-Carbon-Tempra-P-REVIEW.html \
    microbrewery-ambient.html \
    milk-tanker-thermal-store.html \
    MODBUS-and-Raspberry-Pi.html \
    National-Housing-Federation-Annual-Conference-2017.html \
    Octopus-heat-pump-journey.html \
    Octopus-Innovation-Centre-visit.html \
    off-grid-stats-historical.html \
        off-grid-stats-historical-k8055.html \
        off-grid-stats-historical-200909.html \
    off-grid-stats-powermng.html \
    open-source-programmable-thermostatic-radiator-valve.html \
    OpenTRV-archive.html \
	OpenTRV-protocol-discussions-201411-1.html \
	OpenTRV-protocol-discussions-201411-2.html \
	OpenTRV-protocol-discussions-201412-3.html \
        OpenTRV-demo.html \
    OpenTRV-video-mashup-1.html \
    Optimising-from-Banking-to-Energy.html \
    Paris-Gare-du-Nord-afternoon-piano.html \
    PhD-research.html \
    PhD-research-plan-sketch-2024.html \
    PhD-research-zoned-heat-pump-CS1.html \
    PV-sounds.html \
    PV-domestic-hybrid-system-south-east-England.html \
    rad-sounds-listening-to-home-heating.html \
    rad-sounds-2-listening-to-home-heating.html \
    Radbot-Origin-Myth.html \
        Radbot-Origin-Myth-1.html \
        Radbot-Origin-Myth-2.html \
	Radbot-Origin-Myth-3.html \
	Radbot-Origin-Myth-4.html \
	Radbot-Origin-Myth-5.html \
	Radbot-Origin-Myth-6.html \
	Radbot-Origin-Myth-7.html \
	Radbot-Origin-Myth-8.html \
    Reading-Hydro-TTK-visit.html \
    reconnecting-the-16WW-heat-battery.html \
    Repair-Cafe-Kingston-report-to-TTK-AGM-2023.html \
        Repair-Cafe-Kingston-report-to-council-and-TTK-AGM-2024.html \
    river-ambient.html \
    River-Thames-Scheme-and-renewable-generation.html \
    RSS-efficiency.html \
    RSS-efficiency-sonification-1.html \
    RSS-efficiency-sonification-2.html \
    saving-electricity.html \
        saving-electricity-2008.html \
        saving-electricity-2009.html \
        saving-electricity-2010.html \
        saving-electricity-2011.html \
        saving-electricity-2012.html \
        saving-electricity-2013.html \
        saving-electricity-2014.html \
        saving-electricity-2015.html \
        saving-electricity-2016.html \
        saving-electricity-2017.html \
        saving-electricity-2018.html \
        saving-electricity-2019.html \
        saving-electricity-2020.html \
        saving-electricity-2021.html \
        saving-electricity-2022.html \
        saving-electricity-2023.html \
        saving-electricity-2024.html \
        saving-electricity-2025.html \
        saving-electricity-2026.html \
        saving-electricity-sidestory.html \
    science-week-solar-power-and-eco-warriors.html \
    signoff-2019-brief.html \
    six-minutes-of-Med-ness.html \
    Smart-Meters-2019-Conference.html \
    Smart-Systems-and-Heat-Phase-2-learnings.html \
    smart-radiator-valves-talk-20201112.html \
    solar-panels-in-the-garden.html \
    Solar-Power-Finance-Without-the-Jargon-REVIEW.html \
    solar-powered-schools.html \
    solar-PV-pilot-summer-2007.html \
    solar-PV-pilot-summer-2007-more.html \
    solar-PV-roof-renting.html \
    sonification.html \
    sound-of-campsite.html \
    soundwalk-20200912.html \
    SPAM-and-CO2.html \
    sparrow-16WW-ambient.html \
    statscast-202004.html \
        statscast-202005.html \
        statscast-202006.html \
        statscast-202009.html \
        statscast-202012.html \
        statscast-202205.html \
    superinsulating-our-living-room.html \
    Sustainable-Heat-Workshop.html \
    testing-a-house-for-air-leaks.html \
    thermal-imaging-survey-of-house.html \
    To-Zone-Or-Not-To-Zone-with-TRVs-for-Retrofit-Heat-Pumps.html \
    towards-a-LZC-business.html \
    towards-a-LZC-home.html \
    towards-a-LZC-office.html \
    travel-green.html \
    triple-glazing-3G.html \
    TTK-AGM-2022.html \
    VAWT-experiment3-scanner-box-and-real-PM.html \
    walk-solstice-soundscape.html \
    wants-and-offers.html \
    water-music-1.html \
    water-music-2.html \
    Weymouth-ambient.html \
    What-to-do-with-150-bad-NiMH-AA-cells.html \
    whitepaper-OpenTRV-TRV1.5-North-London-trial-winter-2016.html \
    Why-Do-Startups.html \
    wind-power-pilot-autumn-2007.html \
    wind-power-pilot-autumn-2007-MotorWind.html \
    wren-bold-ambient.html \
    UK-Home-Decarbonisation-seminar-presentation-20230809.html \
    UK-homes-needing-retrofit.html \
    note-on-16WW-mains-voltage-monitoring.html \
    note-on-2022-CPO-consultation.html \
    note-on-2025-SSES-first-phase-ESA-regulations.html \
    note-on-Air-Quality-Egg-REVIEW.html \
    note-on-All-Ride-12V-kettle-and-cup-immersion-heater-REVIEW.html \
    note-on-APPGIE-Taking-the-Carbon-out-of-Heat.html \
    note-on-backup-energy-efficiency.html \
    note-on-being-a-hardware-developer.html \
    note-on-best-time-to-fix-your-heating.html \
    note-on-Better-Futures-meeting-at-London-City-Hall.html \
    note-on-building-a-new-eco-home-in-England.html \
    note-on-carbon-cost-of-CDN.html \
    note-on-Cleanweb-meetup-201506.html \
    note-on-clip-on-power-meters-for-the-UK-REVIEW.html \
    note-on-combi-replacement-dilemma.html \
    note-on-condensation-management.html \
    note-on-Conrad-FHT80BTF-programmable-TRV.html \
    note-on-data-centre-heat-recovery.html \
    note-on-data-for-16WW-mains-water-inlet-temperature.html \
    note-on-data-for-16WW-mains-water-inlet-temperature-sandbox.html \
    note-on-data-for-16WW-manual-RH-relative-humidity-measurements.html \
    note-on-data.html \
    note-on-desktop-humidity-RH-and-temperature-meter-HTC-1.html \
    note-on-distributed-grid-support-from-microgeneration.html \
    note-on-door-replacement.html \
    note-on-dynamic-demand-value.html \
    note-on-E2-Energy-Solutions-Expo-2007.html \
    note-on-Ecobuild-2012.html \
    note-on-edie-Live-Utility-Week-Live-2017.html \
    note-on-Electra-C1845W-freestanding-slimline-dishwasher-REVIEW.html \
    note-on-Energy-Harvesting-Dissemination-Event.html \
    note-on-energy-saving-TV-replacement.html \
    note-on-Energy-Solutions-Expo-2011.html \
    note-on-EU-US-supergrid-interconnector.html \
    note-on-Futurebuild-2019.html \
        note-on-Futurebuild-2020.html \
        note-on-Futurebuild-2024.html \
    note-on-G83-lite.html \
    note-on-going-green-in-Newcastle.html \
    note-on-going-solar-in-the-northwest.html \
    note-on-Green-Light-Signal-and-Beyond.html \
    note-on-green-hair-ties-REVIEW.html \
    note-on-greening-Christmas.html \
    note-on-Homebuilding-and-Renovating-Show-2011.html \
    note-on-heated-insoles-REVIEW.html \
    note-on-i30-iTemp-terrier-programmable-TRV.html \
    note-on-iButton-temperature-monitoring-of-aerogel-drylined-bedroom.html \
    note-on-IKEA-SMAKLIG-built-in-induction-hob.html \
    note-on-IoT-comms-backhaul.html \
    note-on-IoT-data-sets-and-processing.html \
    note-on-IoT-leaf-enclosure-sizing.html \
    note-on-IoT-live-interaction.html \
    note-on-IoT-security.html \
    note-on-IoT-sensor-set.html \
    note-on-JProfiler-tuning-app-for-smaller-systems.html \
    note-on-LIME-energy-saving-plug-REVIEW.html \
    note-on-Local-Bytes-Power-Monitoring-Smart-Plug-REVIEW.html \
    note-on-MachMetrics-site-speed-testing.html \
    note-on-MEGA-City-electric-car.html \
    note-on-Method-Laundry-Detergent-REVIEW.html \
    note-on-net-metering-and-CO2-vs-money.html \
    note-on-NiMH-rechargeable-batteries.html \
    note-on-office-comfort-and-energy-efficiency.html \
    note-on-Pasta-Perfect-REVIEW.html \
    note-on-PHEX-Chelsea-2018.html \
    note-on-plug-in-power-meters-for-the-UK-REVIEW.html \
    note-on-UK-policy-modelling-tool.html \
    note-on-Project-CHARM-conference-20130227.html \
    note-on-Raspberry-Pi-setup.html \
        note-on-Raspberry-Pi-2-setup.html \
        note-on-Raspberry-Pi-3-setup.html \
    note-on-RE-solar-PV-vs-wind.html \
    note-on-Salford-Energy-House.html \
    note-on-SheevaPlug-setup.html \
    note-on-Siemens-KG34NA10GB-upright-fridge-freezer-REVIEW.html \
    note-on-SilverCrest-Portable-Induction-Hob-SIKP-2000-A1-REVIEW.html \
    note-on-smart-heating.html \
    note-on-solar-DHW-for-16WW.html \
        note-on-solar-DHW-for-16WW-UniQ-and-PV-diversion.html \
    note-on-solar-PV-for-diffuse-light.html \
    note-on-solar-tools.html \
    note-on-Spacetherm-aerogel-thermal-insulation.html \
    note-on-superinsulating-bedroom.html \
    note-on-survey-results.html \
    note-on-survey-UK-central-heating-on-off-dates.html \
    note-on-survey-UK-central-heating-on-off-dates-2.html \
    note-on-TEDDINET-Ctech-Symposium-2017.html \
    note-on-TEG-thermo-electric-generator.html \
    note-on-the-cost-of-washing.html \
    note-on-two-meetings-IPPR-and-CIBSE.html \
    note-on-ubitricity-EV-mobile-MPAN.html \
    note-on-UK-grid-CO2-intensity-buttons.html \
    note-on-UK-grid-CO2-intensity-variations.html \
        note-on-UK-grid-CO2-intensity-variations-by-year.html \
    note-on-UK-grid-CO2-intensity-vs-WIND-records-Sep-2012.html \
    note-on-USB-DC-power-meters-REVIEW.html \
    note-on-Vigor2862ac-setup.html \
    note-on-viz-tools.html \
    note-on-WIRED-Energy-2017.html \
    note-on-WV1-window-vac.html \
    note-on-Xsorb-interseasonal-heat-storage.html \
    note-on-Zanussi-ZDS2010-freestanding-slimline-dishwasher-REVIEW.html \
    note-on-Zanussi-ZWD14581W-freestanding-washer-dryer-REVIEW.html \
    note-on-Zanussi-ZWF01483W-freestanding-washing-machine-REVIEW.html \
    note-on-Zoom-H1n-field-recorder.html \
    note-on-site-technicals.html \
        note-on-site-technicals-1.html \
        note-on-site-technicals-2.html \
        note-on-site-technicals-3.html \
        note-on-site-technicals-4.html \
        note-on-site-technicals-5.html \
        note-on-site-technicals-6.html \
        note-on-site-technicals-7.html \
        note-on-site-technicals-8.html \
        note-on-site-technicals-9.html \
        note-on-site-technicals-10.html \
        note-on-site-technicals-11.html \
        note-on-site-technicals-12.html \
        note-on-site-technicals-13.html \
        note-on-site-technicals-14.html \
        note-on-site-technicals-15.html \
        note-on-site-technicals-16.html \
        note-on-site-technicals-17.html \
        note-on-site-technicals-18.html \
        note-on-site-technicals-19.html \
        note-on-site-technicals-20.html \
        note-on-site-technicals-21.html \
        note-on-site-technicals-22.html \
        note-on-site-technicals-23.html \
        note-on-site-technicals-24.html \
        note-on-site-technicals-25.html \
        note-on-site-technicals-26.html \
        note-on-site-technicals-27.html \
        note-on-site-technicals-28.html \
        note-on-site-technicals-29.html \
        note-on-site-technicals-30.html \
        note-on-site-technicals-31.html \
        note-on-site-technicals-32.html \
        note-on-site-technicals-33.html \
        note-on-site-technicals-34.html \
        note-on-site-technicals-35.html \
        note-on-site-technicals-36.html \
        note-on-site-technicals-37.html \
        note-on-site-technicals-38.html \
        note-on-site-technicals-39.html \
        note-on-site-technicals-40.html \
        note-on-site-technicals-41.html \
        note-on-site-technicals-42.html \
        note-on-site-technicals-43.html \
        note-on-site-technicals-44.html \
        note-on-site-technicals-45.html \
        note-on-site-technicals-46.html \
        note-on-site-technicals-47.html \
        note-on-site-technicals-48.html \
        note-on-site-technicals-49.html \
        note-on-site-technicals-50.html \
        note-on-site-technicals-51.html \
        note-on-site-technicals-52.html \
        note-on-site-technicals-53.html \
        note-on-site-technicals-54.html \
        note-on-site-technicals-55.html \
        note-on-site-technicals-56.html \
        note-on-site-technicals-57.html \
        note-on-site-technicals-58.html \
        note-on-site-technicals-59.html \
        note-on-site-technicals-60.html \
        note-on-site-technicals-61.html \
        note-on-site-technicals-62.html \
        note-on-site-technicals-63.html \
        note-on-site-technicals-64.html \
        note-on-site-technicals-65.html \
        note-on-site-technicals-66.html \
        note-on-site-technicals-67.html \
        note-on-site-technicals-68.html \
        note-on-site-technicals-69.html \
        note-on-site-technicals-70.html \
        note-on-site-technicals-71.html \
        note-on-site-technicals-72.html \
        note-on-site-technicals-73.html \
        note-on-site-technicals-74.html \
        note-on-site-technicals-75.html \
        note-on-site-technicals-76.html \
        note-on-site-technicals-77.html \
        note-on-site-technicals-78.html \
        note-on-site-technicals-79.html \
        note-on-site-technicals-80.html \
        note-on-site-technicals-81.html \
        note-on-site-technicals-82.html \
        note-on-site-technicals-83.html \
        note-on-site-technicals-84.html \
        note-on-site-technicals-85.html \
        note-on-site-technicals-86.html \
        note-on-site-technicals-87.html \
        note-on-site-technicals-88.html \
        note-on-site-technicals-89.html \
        note-on-site-technicals-90.html \
        note-on-site-technicals-91.html \
        note-on-site-technicals-92.html \
        note-on-site-technicals-93.html \
        note-on-site-technicals-94.html \
        note-on-site-technicals-95.html \
        note-on-site-technicals-96.html \
        note-on-site-technicals-97.html \
        note-on-site-technicals-98.html \
        note-on-site-technicals-99.html \
        note-on-site-technicals-100.html \
        note-on-site-technicals-101.html \
        note-on-site-technicals-102.html \
        note-on-site-technicals-103.html \
        note-on-site-technicals-104.html \
        note-on-site-technicals-105.html \
        note-on-site-technicals-106.html \
        note-on-site-technicals-107.html \
        note-on-site-technicals-108.html \
    note-on-other-sites.html \
    glossary.html

PAGES := $(ORDEREDPAGES)

# Source files of normal pages.
PAGESSRC=$(PAGES:%=.%)

ifeq (0,$(MAKELEVEL))
ifneq (q,$(findstring n,$(firstword -$(MAKEFLAGS))))
# Optional optimisation to improve parallelism in concurrent large rebuilds.
# Randomise page build order so that concurrent makes fight over locks less.
# Do this only when invoked at top level, eg as a "make -k all" from cron,
# to save a little CPU time on recursive queries, etc.
PAGES := $(shell echo $(PAGES) | xargs -n1 | sort -R | xargs)
endif
endif


# Force some Dataset pages to refresh periodically to update dateModified.
# Modification date precision should be no finer than YYYY-MM with weekly touch.
AUTOUPDATEDATASETPAGES= \
    eddi-diverter-dataset.html \
    energy-series-dataset.html \
    Enphase-AC-Battery-REVIEW.html \
    GB-electricity-grid-CO2-intensity-live-dataset.html \
    grid-tie-generation-stats-SunnyBeam.html \
    heat-battery-topup-dataset.html \
    note-on-data.html \
    off-grid-stats-powermng.html
# Only do periodic updates when some excess energy available.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
$(AUTOUPDATEDATASETPAGES): $(wildcard $(WEEKLY))
$(AUTOUPDATEDATASETPAGES:%=m/%): $(wildcard $(MONTHLY))
$(AUTOUPDATEDATASETPAGES:%=$(OFFLINEDIR)/%): $(wildcard $(MONTHLY))
endif

# Core pages which have auto-updating content in them, eg stats driven.
# Not to include any OTHERPAGES; they are handled separately.
AUTOUPDATINGPAGES=note-on-site-technicals.html \
    $(AUTOUPDATEDATASETPAGES)

# Updates when data is added to.
RSS-efficiency.html: img/research/RSS-efficiency/data

# Pages which cannot render under AMP (at least for now).
# Normal vanilla mobile pages should be used in their place.
# This list is assumed to be only very slowly changing.
NONAMPPAGES := \
    OpenTRV-demo.html \
    Radbot-Origin-Myth-1.html \
    Radbot-Origin-Myth-2.html \
    Radbot-Origin-Myth-3.html \
    Radbot-Origin-Myth-4.html \
    Radbot-Origin-Myth-5.html \
    Radbot-Origin-Myth-6.html \
    Radbot-Origin-Myth-7.html \
    Radbot-Origin-Myth-8.html \
    can-we-live-on-local-renewables.html

# Clean up any non-compliant AMP page residue.
pages:: cleanNonAMPPages
.PHONY: cleanNonAMPPages
cleanNonAMPPages:
	@W="$(wildcard $(NONAMPPAGES:%=amp/%) $(NONAMPPAGES:%=amp/%$(COMPSUFGZIP)) $(NONAMPPAGES:%=amp/%$(COMPSUFBROTLI)))"; if [ "" != "$$W" ]; then echo "Cleaning up non-AMP-compliant pages: $$W."; rm -f $$W; fi

# Mobile/tablet pages, under m/, that are vanilla HTML5.
MPAGES=$(PAGES:%.html=m/%.html)
# Offline pages, under .offline/, that are vanilla HTML5.
FPAGES=$(PAGES:%.html=$(OFFLINEDIR)/%.html)
# Mobile/AMP pages, under amp/, that will actually work in AMP!
APAGES=$(patsubst %,amp/%,$(filter-out $(NONAMPPAGES),$(PAGES)))
#debuginfo::
#	@echo APAGES=$(APAGES)

# Other supporting pages.
# Possibly not to be included in links or sitemaps.
OTHERPAGES=index.html sitemap.html about-us.html \
    offline.html content-calendar.html \
    404.html 406.html 429.html 503.html \
    SECTION_dataset.html SECTION_frugal.html SECTION_microgen.html \
    SECTION_podcast.html SECTION_research.html SECTION_review.html \
    blank.html test-page.html
OTHERMPAGES=$(OTHERPAGES:%.html=m/%.html)
OTHERFPAGES=$(OTHERPAGES:%.html=$(OFFLINEDIR)/%.html)
#OTHERAPAGES=$(OTHERPAGES:%.html=amp/%.html)
OTHERAPAGES=
opages: $(OTHERPAGES) $(OTHERMPAGES) $(OTHERFPAGES)
ifneq ($(AMPDEPRECATED),true)
opages: $(OTHERAPAGES)
endif
Opages: opages otherscpages
all:: opages Opages


# Plain text sitemap, one full URL per line.
# See https://www.sitemaps.org/index.html for detail and use in robots.txt.
# Needs a full canonical URL prefix.
# Main site has some useful extra spiderable URLs included in urllist
# (including regularly-updated stats/data pages)
# and separate data directories in a separate list.
# The implicit (redirected) index.html home page is omitted.
# DHD20210128: do not include non-canonical _gridCarbonIntensityGB.xhtml
GRIDINTENSITYHTMLPAGE=_gridCarbonIntensityGB.html
URLLISTEXT= \
    $(GRIDINTENSITYHTMLPAGE)
#
# Not included in URLLISTEXT are pages primarily of interest to DHD!
#    _live-grid-tie-stats.html _off-grid-stats.html
#
# These prefixes should match the preferred scheme for each site view.
URLLISTPREFIX=https://www.earth.org.uk/
MURLLISTPREFIX=https://m.earth.org.uk/
AURLLISTPREFIX=https://amp.earth.org.uk/
urllist.txt: makefile
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@for f in $(PAGES:index.html=) $(URLLISTEXT);do echo $(URLLISTPREFIX)$$f;done | \
	    (export LC_ALL=C; sort) > $@.tmp
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.lock $@.tmp
all:: urllist.txt
ifeq ($(wildcard $(PWRLOWFLAG)),)
ifeq ($(wildcard $(GRIDRED1DFLAG)),)
feeds: rss/dataurllist.txt
rss/dataurllist.txt: $(wildcard $(WEEKLY)) makefile
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp $@.presort
	@find data -not \( -name '.?*' -prune \) -a  -type d -print | \
	    awk '{print "'$(URLLISTPREFIX)'"$$1"/"}' >>$@.presort
	@(export LC_ALL=C; sort) < $@.presort > $@.tmp
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.lock $@.tmp $@.presort
all:: rss/dataurllist.txt
endif
endif

# HTML page list (in ASCII-sorted order) to facilitate some other operations.
# This is the un-dotted version of the base filename.
# Can be done safely in one line so does not need a lock.
# The HTMLFILELISTBUILT flag is touched when the underlying file is up to date.
# The HTMLFILELIST file may only be updated when its content changes.
HTMLFILELIST=$(WORKTMP)/htmllist.txt
HTMLFILELISTBUILT=$(HTMLFILELIST).built
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# If enough energy, force HTMLFILELIST build more eagerly.
$(HTMLFILELIST): ${HTMLFILELISTBUILT}
	touch $(HTMLFILELIST)
endif
${HTMLFILELISTBUILT}: makefile
	@echo "Checking $(HTMLFILELIST)..."
	@for f in $(PAGES);do echo $$f;done | (export LC_ALL=C; sort) > $@.$$$$.tmp; \
	    if cmp -s $@.$$$$.tmp $(HTMLFILELIST); then echo No change for $(HTMLFILELIST).; else mv -f $@.$$$$.tmp $(HTMLFILELIST); echo Rebuilt $(HTMLFILELIST).; fi; \
	    rm -f $@.$$$$.tmp
	@touch $(HTMLFILELISTBUILT)
all:: $(HTMLFILELIST) ${HTMLFILELISTBUILT}

# Non-AMP page list (in ASCII-sorted order) to facilitate some other operations.
# This is the un-dotted version of the base filename.
# Can be done safely in one line so does not need a lock.
NONAMPFILELIST=$(WORKTMP)/nonamplist.txt
${NONAMPFILELIST}: makefile
	@echo "Building $@"
	@for f in $(NONAMPPAGES);do echo $$f;done | \
	    (export LC_ALL=C; sort) > $@.$$$$.tmp && mv -f $@.$$$$.tmp $@
all:: ${NONAMPFILELIST}
# Get this rebuilt before anything that depends on it, if possible.
pages:: ${NONAMPFILELIST}


# Ensure that auto-generated image/video directories exist.
img/a/b img/a/h img/a/v:
	mkdir $@
all:: | img/a/b img/a/h img/a/v

# Keep scripts up to date, and enforce important dependencies.
# Script dependencies for generic HTML (desktop, mobile) *generation*.
# These dependencies may be indirect, eg via wrap_art.
# These are scripts that ALL page versions may depend on.
# However, these may be treated as order dependencies,
# ie must be up to date but an updated timestamp will not force a rebuild.
# Touch HTMLREGENFLAG to force a rebuild of all HTML output.
HTMLGENDEPS= \
    .work/script/dateCreated \
    .work/script/datePublished \
    .work/script/pgdescription \
    .work/script/pgintro \
    .work/script/spatialCoverage \
    .work/script/wrap_art \
    script/altTextFromFilename \
    script/get_article_rank \
    script/get_article_title \
    script/get_FEATUREM_declared \
    script/get_hero_twplayer_declared \
    script/get_hero_video_declared \
    script/get_img_XxY \
    script/get_tags \
    script/linkImageCredit \
    script/renderTemporalCoverage \
    script/safecssmin
# Set of scripts which don't change the logical/semantic page content.
# These optimise or change only decorative, supplemental or (eg) ad content.
# Changes in these need not force an immediate page rebuild,
# ie the next page rebuild that happens anyway will be fine.
# The scripts themselves can be kept up to date on an 'all' run.
HTMLGENDEPSASIDE=\
    .work/script/pickPageToPromote \
    .work/script/safecssmin-in-place \
    script/build_alternate_format_compact_image \
    script/energyStatsInsertHTML5 \
    script/get_hero_img_declared \
    script/get_hero_img_inline \
    script/html-simplify \
    script/image_list_body \
    script/image_list_hero \
    script/lossless_JPEG_compress \
    script/productHTML \
    script/rawHTML-readability \
    script/get_minreadability \
    script/html5-minify
.PHONY: allhtmlgendeps
allhtmlgendeps: $(HTMLGENDEPS) $(HTMLGENDEPSASIDE)
all:: allhtmlgendeps
#pages:: $(HTMLGENDEPS)
pages:: allhtmlgendeps
all:: pages


# Create dependencies on the audio scripts for appropriate pages.
# Builds atomically without the need for locks.
# DHD20210709: AMP pages excluded.
# DHD20230828: offline pages added.
AUDIOGENDEPS= \
    script/audio_inline \
    script/audioBuildLossy.sh
pages:: $(AUDIOGENDEPS)
all:: $(AUDIOGENDEPS)
AUDIODEPSMK=$(WORKTMP)/AUDIOdeps.mk
# Only do rebuild if HIGH.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
-include $(AUDIODEPSMK)
pages:: $(AUDIODEPSMK)
${AUDIODEPSMK}: ${HTMLFILELISTBUILT} $(wildcard $(WEEKLY))
	@echo "Building $@"
	@T=$@.$$$$.tmp; \
	    egrep -l '^ *<AUDIO ' /dev/null ${PAGESSRC} ${OTHERPAGES:%=.%} | \
		awk '{p=substr($$1,2); print p, "m/"p, "${OFFLINEDIR}/"p, ": $${AUDIOGENDEPS}"}' > $$T; \
	    chmod -f u+w $@; \
	    mv -f $$T $@; \
	    chmod og-rwx $@
endif

# Create dependencies on the video scripts for appropriate pages.
# Builds atomically without the need for locks.
# DHD20210709: AMP pages excluded.
# DHD20230828: offline pages added.
VIDEOGENDEPS= \
    script/video_inline \
    script/compress_video_mp4 script/shrink_video_mp4 \
    script/extract_video_poster_image \
    script/audioBuildLossy.sh
pages:: $(VIDEOGENDEPS)
all:: $(VIDEOGENDEPS)
VIDEODEPSMK=$(WORKTMP)/VIDEOdeps.mk
# Only do rebuild if HIGH.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
-include $(VIDEODEPSMK)
pages:: $(VIDEODEPSMK)
${VIDEODEPSMK}: ${HTMLFILELISTBUILT} $(wildcard $(WEEKLY))
	@echo "Building $@"
	@T=$@.$$$$.tmp; \
	    egrep -l '^ *<VIDEO ' /dev/null ${PAGESSRC} ${OTHERPAGES:%=.%} | \
		awk '{p=substr($$1,2); print p, "m/"p, "${OFFLINEDIR}/"p, ": $${VIDEOGENDEPS}"}' > $$T; \
	    chmod -f u+w $@; \
	    mv -f $$T $@; \
	    chmod og-rwx $@
endif


# INSERTFROMDB event / product / etc dependencies.
# Dependencies of form db.event/KingstonGreenDrinks
# from db.TYPE/XXX.attr.txt to intermediate .work/inc/db.TYPE.XXX.html.inc
# All these are do atomic updates and do not need locks.
.work/inc/db.event.%.html.inc: db.event/%.attr.txt
	@echo Building $@ from $^
	@T=$$$$; sh script/productHTML.sh $(@:.work/inc/db.event.%.html.inc=db.event/%.attr.txt) > $@.$$T && mv $@.$$T $@; rm -f $@.$$T
.work/inc/db.product.%.html.inc: db.product/%.attr.txt
	@echo Building $@ from $^
	@T=$$$$; sh script/productHTML.sh $(@:.work/inc/db.product.%.html.inc=db.product/%.attr.txt) > $@.$$T && mv $@.$$T $@; rm -f $@.$$T
#
# Create dependencies on the DB (Event, etc) for appropriate pages.
# Builds atomically without the need for locks.
# Since script does not logically change content, only made dep when HIGH.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
INSERTFROMDBGENDEPS=script/productHTML.sh
else
INSERTFROMDBGENDEPS=
endif
pages:: $(INSERTFROMDBGENDEPS)
all:: $(INSERTFROMDBGENDEPS)
INSERTFROMDBDEPSMK=$(WORKTMP)/INSERTFROMDBdeps.mk
-include $(INSERTFROMDBDEPSMK)
pages:: $(INSERTFROMDBDEPSMK)
# Source properties file in DB is named in the form:
#     db.event/KingstonGreenDrinks.attr.txt
# Expanded (dependency) HTML output of DB is in file named in the form:
#     .work/inc/db.event.KingstonGreenDrinks.html.inc
# Update dependencies when any of the relevant database dirs is touched.
# Only force periodic/pessimistic rebuild if HIGH.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
${INSERTFROMDBDEPSMK}: ${HTMLFILELISTBUILT} $(wildcard $(WEEKLY))
endif
${INSERTFROMDBDEPSMK}: db.event db.product
	@echo "Building $@"
	@T=$@.$$$$.tmp; \
	    egrep '^ *<!--INSERTFROMDB ' /dev/null ${PAGESSRC} ${OTHERPAGES:%=.%} | \
		awk -F: '{p=substr($$1,2); split($$2, a, "[ .-]"); inc=".work/inc/db."a[4]"."a[5]".html.inc"; attr="db."a[4]"/"a[5]".attr.txt"; print p, "m/"p, ": " inc; print inc " : " attr " $${INSERTFROMDBGENDEPS}"; print "${INSERTFROMDBDEPSMK} : ."p; }' > $$T; \
	    chmod -f u+w $@; \
	    mv -f $$T $@; \
	    chmod og-rwx $@
# Keep the embedded dataset description modification date current.
SECTION_review.html m/SECTION_review.html: db.event db.product


# Statically-compressed pages (main and mobile).
# These are compressed more than is easy to do on the fly with mod_deflate
# and should be served statically (using minimal CPU) with sendfile().
SCWPAGES=$(PAGES:%=%$(COMPSUFGZIP))   $(PAGES:%=%$(COMPSUFBROTLI))
SCMPAGES=$(MPAGES:%=%$(COMPSUFGZIP))  $(MPAGES:%=%$(COMPSUFBROTLI))
SCAPAGES=$(APAGES:%=%$(COMPSUFGZIP))  $(APAGES:%=%$(COMPSUFBROTLI))
ifneq ($(AMPDEPRECATED),true)
SCPAGES=$(SCWPAGES) $(SCMPAGES) $(SCAPAGES)
else
SCPAGES=$(SCWPAGES) $(SCMPAGES)
endif
ifneq ($(AMPDEPRECATED),true)
OTHERSCPAGES=\
    $(OTHERPAGES:%=%$(COMPSUFGZIP)) $(OTHERMPAGES:%=%$(COMPSUFGZIP)) $(OTHERAPAGES:%=%$(COMPSUFGZIP)) \
    $(OTHERPAGES:%=%$(COMPSUFBROTLI)) $(OTHERMPAGES:%=%$(COMPSUFBROTLI)) $(OTHERAPAGES:%=%$(COMPSUFBROTLI))
else
OTHERSCPAGES=\
    $(OTHERPAGES:%=%$(COMPSUFGZIP)) $(OTHERMPAGES:%=%$(COMPSUFGZIP)) \
    $(OTHERPAGES:%=%$(COMPSUFBROTLI)) $(OTHERMPAGES:%=%$(COMPSUFBROTLI))
endif
scpages: $(SCPAGES)
otherscpages: $(OTHERSCPAGES)
all:: $(SCPAGES) $(OTHERSCPAGES)

# Generated HTML pages should not be capriciously removed, causing 404s!
.PRECIOUS: $(PAGES) $(MPAGES) $(FPAGES) $(APAGES) $(SCPAGES)
# Rebuild all automatically-wrapped HTML pages...
all:: pages pages-ping

# Marker(s) for all HTML being valid.
PAGESOVALID=$(WORKTMP)/pages.o.valid
PAGESWVALID=$(WORKTMP)/pages.w.valid
PAGESMVALID=$(WORKTMP)/pages.m.valid
PAGESFVALID=$(WORKTMP)/pages.f.valid
PAGESAVALID=$(APAGES)
ifneq ($(AMPDEPRECATED),true)
PAGESVALID=$(PAGESOVALID) $(PAGESWVALID) $(PAGESMVALID) $(PAGESFVALID) $(PAGESAVALID)
else
PAGESVALID=$(PAGESOVALID) $(PAGESWVALID) $(PAGESMVALID) $(PAGESFVALID)
endif
# On-line pages valid.
PAGESVALIDONLINE=$(PAGESOVALID) $(PAGESWVALID) $(PAGESMVALID)
# Dummy target to build enough of the pages to validate most things.
.PHONY: pgval pgoval pgwval pgmval pgfval
pgval: $(PAGESVALID)
ifneq ($(AMPDEPRECATED),true)
pgoval: amp/index.html
endif
pgoval: $(PAGESOVALID) 
pgwval: $(PAGESWVALID)
pgmval: $(PAGESMVALID)
pgfval: $(PAGESFVALID)
pgaval: $(PAGESAVALID)
#
# Make (and validate) all (online) pages and derived objects such as sitemaps.
# Excludes archive pages which will be updated periodically as needed.
# Do not ping anything.
# Try to make all the pre-/super- compressed pages before sitemaps/feeds.
.PHONY: pages
pages:: $(PAGES) $(MPAGES) \
    $(OTHERPAGES) $(OTHERMPAGES) \
    $(SCPAGES) $(OTHERSCPAGES) \
    urllist.txt sitemap.xml \
    sitemap.atom
ifneq ($(AMPDEPRECATED),true)
pages:: $(APAGES) $(OTHERAPAGES)
endif
# Make all core pages and then ping delta to feeds / search engines.
# Build lightweight sitemap.atom ready for polling by search engines.
# Doesn't force OTHERPAGES to be built first.
# Doesn't force mobile pages to be built if power is low.
.PHONY: pages-ping
pages-ping: $(SCWPAGES) \
    $(WORKTMP)/IndexNow.ping \
    sitemap.atom
# DHD20240111: ERROR 404: Sitemaps ping is deprecated. See https://developers.google.com/search/blog/2023/06/sitemaps-lastmod-ping.
# $(WORKTMP)/.sitemap.atom.ping
ifeq ($(wildcard $(PWRLOWFLAG)),)
# Require all compressed pages built before ping if power not low.
pages-ping: $(SCPAGES)
endif
.PHONY: wpages Wpages mpages Mpages apages Apages
# Make core (www.) pages only: no validation.
wpages: $(PAGES)
# Make pre-compressed core (www.) pages only: no validation.
Wpages: $(PAGES) $(PAGES:%=%$(COMPSUFGZIP)) $(PAGES:%=%$(COMPSUFBROTLI))
# Make mobile (m.) pages only for speed: no validation.
mpages: $(MPAGES)
# Make pre-compressed mobile (m.) pages only: no validation.
Mpages: $(MPAGES) $(MPAGES:%=%$(COMPSUFGZIP)) $(MPAGES:%=%$(COMPSUFBROTLI))
# Make offline pages only for speed: no validation.
fpages: $(FPAGES)
# NO pre-compressed offline pages.
Fpages:
# Make AMP (amp.) core pages only for speed: no validation.
apages: $(APAGES)
# Make pre-compressed AMP (amp.) pages only: no validation.
Apages: $(APAGES) $(APAGES:%=%$(COMPSUFGZIP)) $(APAGES:%=%$(COMPSUFBROTLI))

# Touch HTMLREGENFLAG to force a rebuild of all HTML output pages.
#HTMLREGENFLAG=.work/htmlregen.flag
HTMLREGENFLAG=.work/newsflash.html
$(PAGES) $(MPAGES) $(FPAGES) $(APAGES): $(wildcard $(HTMLREGENFLAG)) 
$(OTHERPAGES) $(OTHERMPAGES) $(OTHERFPAGES) $(OTHERAPAGES): $(wildcard $(HTMLREGENFLAG)) 
# Equivalent flag to force autogen images and videos to be rebuilt too.
AUTOGENIMGREBUILDFLAG=img/a/.rebuild.flag
$(PAGES) $(MPAGES) $(FPAGES) $(APAGES): $(wildcard $(AUTOGENIMGREBUILDFLAG)) 
$(OTHERPAGES) $(OTHERMPAGES) $(OTHERFPAGES) $(OTHERAPAGES): $(wildcard $(AUTOGENIMGREBUILDFLAG)) 

# VHIGH (and FULL) power levels force rebuild against strongest dependencies.
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Build key pages w/ stricter criteria if excess power available.
$(OTHERPAGES) $(OTHERMPAGES) $(OTHERFPAGES) $(OTHERAPAGES): $(HTMLGENDEPS)
# Build all main page versions w/ stricter criteria if excess power available.
$(PAGES) $(MPAGES) $(FPAGES) $(APAGES): $(HTMLGENDEPS)
## All page dependencies on build scripts for particular page types...
#$(PAGES) $(OTHERPAGES): script/rawHTML-readability
#$(PAGES) $(OTHERPAGES): script/get_minreadability
#$(PAGES) $(OTHERPAGES): script/html5-minify
#$(MPAGES) $(OTHERMPAGES): script/html5-minify
#$(APAGES) $(OTHERAPAGES): script/ampval
endif

# HIGH (and VHIGH/FULL) power levels force rebuild of key pages vs all deps.
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Build supporting pages w/ stricter criteria if much excess power available.
$(OTHERPAGES): $(HTMLGENDEPS)
$(OTHERMPAGES): $(HTMLGENDEPS)
# Build lite main pages w/ stricter criteria if some excess available.
$(MPAGES): $(HTMLGENDEPS)
endif
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# Build main canonical pages w/ stricter criteria if some excess available.
$(PAGES): $(HTMLGENDEPS)
endif

# Unless LOW (or VLOW) force some rebuild of a handful of important pages.
ifeq ($(wildcard $(PWRLOWFLAG)),)
$(OTHERPAGES): $(HTMLGENDEPS)
endif


# Compute this year's cumulative electricity CO2 emissions to date.
THISYEARELECTCO2=out/yearly/16WW-elect-kgCO2-$(UTCYEAR).txt
# Should be atomic / concurrency safe.
$(THISYEARELECTCO2): $(wildcard $(MONTHLY)) \
	script/analytic/16WW-elect-CO2-Enphase-FUELINST.sh
	@sh script/analytic/16WW-elect-CO2-Enphase-FUELINST.sh
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# Force weekly re-calculation checks when HIGH or better.
$(THISYEARELECTCO2): $(wildcard $(WEEKLY))
endif


# Popular articles.
# Rebuild when Apache log rolls.
# Wildcard the actual log file to allow make to run on non-server machine,
# eg to allow off-line test builds on a laptop.
all:: .work/script/poparts .work/script/undeclaredSpiderIPs
POPARTS=$(WORKTMP)/populararts.txt
${POPARTS}: .work/script/poparts \
		.work/script/undeclaredSpiderIPs \
		$(wildcard $(DAILY)) \
		$(wildcard $(SITELOGPREV))
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@.work/script/poparts >$@.tmp
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.lock
all:: ${POPARTS}
# Find pages that have not been visited at all.
UNVISITED=$(WORKTMP)/unvisited.txt
${UNVISITED}: ${POPARTS} ${HTMLFILELIST} | ${HTMLFILELISTBUILT}
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp $@.pop
	@awk < ${POPARTS} '{print $$2}' | \
	    (export LC_ALL=C; sort) > $@.pop
	@export LC_ALL=C; comm -23 ${HTMLFILELIST} $@.pop | \
	    egrep -v 'index.html' > $@.tmp || echo Empty $@
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp $@.pop

# Gather all articles in creation order, most-recently-created first.
# This omits pages missing a datePublished.
# This is not the same as the most recent update.
# Names are in the non-dotted base form.
# This logically depends on all the page sources,
# but in practice because the original publication date is rarely adjusted
# this depends on page list changes.
$(WORKTMP)/newestArticles.txt: $(wildcard $(WEEKLY)) \
    .work/script/datePublished \
    ${HTMLFILELIST} | ${HTMLFILELISTBUILT}
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@for f in $(PAGES); do \
	    DP="`.work/script/datePublished < .$$f`"; \
	    if [ "" != "$$DP" ]; then echo "$$DP $$f"; fi; \
	    done | sort -r > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp

# The content calendar pages (full/lite) depends on source content.
# The desktop and mobile content is the same.
content-calendar.html m/content-calendar.html amp/content-calendar.html: \
    .work/inc/.content-calendar.html.inc
# NOTE: although the content calendar nominally depends on all source content
# (at least for main pages) in practice it changes slowly and is not critical.
# To reduce the rebuild burden on every page update,
# this is made to rebuild ~weekly and on page additions (in ${HTMLFILELIST})
# rather than depending on the HTML source PAGES directly.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
.work/inc/.content-calendar.html.inc: $(wildcard $(WEEKLY))
endif
.work/inc/.content-calendar.html.inc: \
    .work/script/gen-content-calendar-inc \
    script/get_FEATUREM_declared \
    ${HTMLFILELIST} | ${HTMLFILELISTBUILT}
	@echo "Building $@"
	@($(FLOCKNB) 9 || exit 1; \
            .work/script/gen-content-calendar-inc > $@.tmp && \
            /bin/mv $@.tmp $@ ) 9>>$(@D)/.$(@F).flock


# List of source pages canonicalised all and major tags.
# Format is:
#     file.html (tag )*
# Note that each tag is preceeded and followed by space.
# Thus the line ends with space, to make matching easier.
# Any pages without tags are omitted.
TAGSALLBYPAGE=$(WORKTMP)/sourceByCanonAllTags.txt
TAGSMAJORBYPAGE=$(WORKTMP)/sourceByCanonMajorTags.txt
# Force rebuild weekly.
$(TAGSALLBYPAGE) $(TAGSMAJORBYPAGE): $(wildcard $(WEEKLY))
# Force rebuild whenever pages' source changes, if spare energy.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
$(TAGSALLBYPAGE) $(TAGSMAJORBYPAGE): $(PAGESSRC) ${HTMLFILELISTBUILT}
endif
# Assumes that the HTMLFILELIST is already sorted in a sensible way.
# Updates whenever page list changes.
$(TAGSALLBYPAGE): script/get_tags ${HTMLFILELIST} | ${HTMLFILELISTBUILT}
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@for f in `cat $(HTMLFILELIST)`; do \
	    TAGS="`script/get_tags < .$$f`"; \
	    if [ "" = "$$TAGS" ]; then continue; fi; \
	    echo $$f $$TAGS" "; \
	    done > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp
$(TAGSMAJORBYPAGE): script/get_tags ${HTMLFILELIST} | ${HTMLFILELISTBUILT}
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@for f in `cat $(HTMLFILELIST)`; do \
	    TAGS="`script/get_tags -major < .$$f`"; \
	    if [ "" = "$$TAGS" ]; then continue; fi; \
	    echo $$f $$TAGS" "; \
	    done > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp
all:: $(TAGSALLBYPAGE) $(TAGSMAJORBYPAGE)


# Build the RSS file (and lite version) for podcast episodes.
RSSPODCAST=rss/podcast.rss
RSSPODCASTLITE=rss/podcast-lite.rss
ifeq ($(wildcard $(PWRLOWFLAG)),)
ifeq ($(wildcard $(GRIDRED1DFLAG)),)
# When power is LOW or grid intensity is red decline to rebuild podcast feed.
# This should let more clients get 304s, and Internet traffic reduce.
feeds: $(RSSPODCAST).built $(RSSPODCASTLITE).built
all:: $(RSSPODCAST).built $(RSSPODCASTLITE).built
all:: script/svn_last_changed
$(RSSPODCAST).built $(RSSPODCASTLITE).built: $(TAGSALLBYPAGE)
$(RSSPODCAST).built $(RSSPODCASTLITE).built: script/generatePodcastRSS.sh
$(RSSPODCAST).built $(RSSPODCASTLITE).built: \
    script/get_hero_img_declared \
    script/get_article_title .work/script/pgdescription \
    script/svn_last_changed
endif
endif
$(RSSPODCAST).built:
	@echo "Building $(@:%.built=%)"
	@sh script/generatePodcastRSS.sh
	@touch $@
$(RSSPODCASTLITE).built:
	@echo "Building $(@:%.built=%)"
	@sh script/generatePodcastRSS.sh -lite
	@touch $@
#
# Build/update defence against very heaviest/greediest podcast feed pullers.
# This is an Apache txt map from UA md5 hash to a non-empty token.
# This also maintains a set of flags MD5HASH.flag for each greedy bot UA.
# For the top-few bots with more than a hit per hour,
# these flags are non-zero size, to allow tougher action.
# This is visible to Web visitors so is partly anonymised (eg truncated UAs).
# WARNING: Apache server may fail to run if the .txt file is absent.
PODCASTGREEDYFLAGDIR=rss/greedybot
PODCASTGREEDYMAP=$(PODCASTGREEDYFLAGDIR)/greedy-podcast-bot-map.txt
.PRECIOUS: $(PODCASTGREEDYMAP)
$(PODCASTGREEDYMAP): .work/script/gen-podcast-greedy-UA-map.sh
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@cat /dev/null $(wildcard $(SITELOGPREV) $(SITELOG)) | \
	    sh .work/script/gen-podcast-greedy-UA-map.sh > $@.tmp
	@chmod a+r $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r $@
	@echo "Building flags in $(PODCASTGREEDYMAP)..."
	@rm -f $(wildcard $(PODCASTGREEDYFLAGDIR)/*.flag) $@.tmp
	@awk '/^[0-9a-f]/ {++count;print $$1".flag",(((count<=5)&&($$2>24))?count:0)}' < $@ | \
		while read hashflag count;\
		do \
                touch "$(PODCASTGREEDYFLAGDIR)/$${hashflag}"; \
                if [ "$$count" -ne 0 ]; then printf "%d" "$$count" > "$(PODCASTGREEDYFLAGDIR)/$${hashflag}"; fi; \
		chmod a+r "$(PODCASTGREEDYFLAGDIR)/$${hashflag}"; \
		done
	@/bin/rm -f $@.lock
feeds: $(PODCASTGREEDYMAP)
all:: $(PODCASTGREEDYMAP)
ifeq ($(wildcard $(PWRLOWFLAG)),)
# Prevent rebuild of the greedy map when the battery is LOW.
# Rebuild ~weekly from the previous log file, if it exists.
$(PODCASTGREEDYMAP): $(wildcard $(SITELOGPREV))
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# Allow more frequent (daily) updates when HIGH or better.
$(PODCASTGREEDYMAP): $(wildcard $(DAILY))
endif
endif



# Auto-generated content for (hand-selected) SECTION_xxx pages.
# Included content is the same across all versions (desktop/lite/AMP).
# Rebuild when the tags (or file set) change (which should be slowly).
#.work/inc/.SECTION_%.html.inc: $(TAGSALLBYPAGE}
#	@echo "Building $@"
#	@$(LOCKFILENRSTD) $@.lock
#	@/bin/rm -f $@.tmp
#	@echo '<p>IN PROGRESS</p>' > $@.tmp
#	@/bin/mv $@.tmp $@
#	@/bin/rm -f $@.lock $@.tmp
# TODO: do without explicit dependency list.
# Podcast specifics/specialisation.
# Picks the first (.mp3) AUDIO or (.mp4) VIDEO element
# from each podcast-tagged page.
SECTION_podcast.html m/SECTION_podcast.html amp/SECTION_podcast.html: \
    .SECTION_podcast.html \
    .work/insert/SECTION_podcast.html.inc \
    .work/inc/.SECTION_podcast.html.inc
#SECTION_%.html: .work/inc/.SECTION_%.html.inc
#m/SECTION_%.html: .work/inc/.SECTION_%.html.inc
#amp/SECTION_%.html: .work/inc/.SECTION_%.html.inc
# Rebuild the podcast home page when tags change.
# (Do not force the RSS feed file up to date when rebuilding the include.)
# Duration is reported in rounded-down minutes to match the Apple Podcast page.
.work/inc/.SECTION_podcast.html.inc: $(TAGSALLBYPAGE) \
		.work/insert/SECTION_podcast.html.inc \
		script/genPodcastHTMLinc.sh
	@echo "Building $@"
	@($(FLOCKNB) 9 || exit 1; \
	    sh script/genPodcastHTMLinc.sh > $@.tmp && \
            /bin/mv $@.tmp $@ ) 9>>$(@D)/.$(@F).flock

# Dataset section page depends on insert.
SECTION_dataset.html m/SECTION_dataset.html amp/SECTION_dataset.html: \
    .SECTION_dataset.html \
    .work/inc/.SECTION_dataset.html.inc
# Rebuild the dataset page insert when tags change.
.work/inc/.SECTION_dataset.html.inc: $(TAGSALLBYPAGE)
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@F="`awk '/ dataset / {print $$1}' < $(TAGSALLBYPAGE)`"; \
         echo "<p>(`echo $$F | wc -w | awk '{print $$1}'` entries.)</p>" >> $@.tmp; \
	 echo '<ul class="cb conc">' >> $@.tmp; \
         for f in $$F; do \
	    printf '<li>%s' '<a href='$$f' itemprop=hasPart>'"`script/get_article_title <.$$f`"'</a>'; \
            echo ''; \
	    done | sort -d -f -t '>' -k 3 >> $@.tmp
	@echo '</ul>' >> $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp

# Frugal section page depends on insert.
# Taken to change only very slowly for now.
SECTION_frugal.html m/SECTION_frugal.html amp/SECTION_frugal.html: \
    .SECTION_frugal.html \
    .work/inc/.SECTION_frugal.html.inc
# Rebuild the frugal page insert when tags change.
.work/inc/.SECTION_frugal.html.inc: $(TAGSALLBYPAGE)
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@echo '<ul class="cb conc">' >> $@.tmp
	@for f in `awk '/ frugal / {print $$1}' < $(TAGSALLBYPAGE)`; do \
	    printf '<li>%s' '<a href='$$f' itemprop=hasPart>'"`script/get_article_title <.$$f`"'</a>'; \
            echo ''; \
	    done | sort -d -f -t '>' -k 3 >> $@.tmp
	@echo '</ul>' >> $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp

# Microgen section page depends on insert.
# Taken to change only very slowly for now.
SECTION_microgen.html m/SECTION_microgen.html amp/SECTION_microgen.html: \
    .SECTION_microgen.html \
    .work/inc/.SECTION_microgen.html.inc
# Rebuild the microgen page insert when tags change.
.work/inc/.SECTION_microgen.html.inc: $(TAGSALLBYPAGE)
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@echo '<ul class="cb conc">' >> $@.tmp
	@for f in `awk '/ microgen / {print $$1}' < $(TAGSALLBYPAGE)`; do \
	    printf '<li>%s' '<a href='$$f' itemprop=hasPart>'"`script/get_article_title <.$$f`"'</a>'; \
            echo ''; \
	    done | sort -d -f -t '>' -k 3 >> $@.tmp
	@echo '</ul>' >> $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp

# Research section page depends on insert.
# Taken to change only very slowly for now.
SECTION_research.html m/SECTION_research.html amp/SECTION_research.html: \
    .SECTION_research.html \
    .work/inc/.SECTION_research.html.inc
# Rebuild the research page when tags change.
.work/inc/.SECTION_research.html.inc: $(TAGSALLBYPAGE)
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@echo '<ul class="cb conc">' >> $@.tmp
	@for f in `awk '/ research / {print $$1}' < $(TAGSALLBYPAGE)`; do \
	    printf '<li>%s' '<a href='$$f' itemprop=hasPart>'"`script/get_article_title <.$$f`"'</a>'; \
            echo ''; \
	    done | sort -d -f -t '>' -k 3 >> $@.tmp
	@echo '</ul>' >> $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp

# Reviews section.
# Taken to change only very slowly for now.
SECTION_review.html m/SECTION_review.html amp/SECTION_review.html: \
    .SECTION_review.html \
    .work/inc/.SECTION_review.html.inc
# Rebuild the review page when tags change.
.work/inc/.SECTION_review.html.inc: $(TAGSALLBYPAGE)
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@echo '<ul class="cb conc">' >> $@.tmp
	@for f in `awk '/ review / {print $$1}' < $(TAGSALLBYPAGE)`; do \
	    printf '<li>%s' '<a href='$$f' itemprop=hasPart>'"`script/get_article_title <.$$f`"'</a>'; \
            echo ''; \
	    done | sort -d -f -t '>' -k 3 >> $@.tmp
	@echo '</ul>' >> $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp


# Incrementally rebuild core pages (ultimately) at random.
# Aims to rebuild more recently-updated and canonical and popular pages sooner.
# Aims to rebuild sooner those pages that have not been rebuilt for a long time.
# Should eventually reach a state with all such core pages up to date.
# Builds the HTML pages and pre-compressed versions,
# but not necessarily all derived content nor doing all validation.
# This does not itself create state,
# so is safe to run multiple instances of concurrently,
# and this attempts to inject a little concurruency if the system is quiet.
# This aims to leave a CPU (out of 4) free for foreground tasks.
# Does most work 'nice'd; could ionice too?
# For simplicity this only explicitly builds key leaf targets.
# Minimises work when 'pages' target already up-to-date.
# Wrap a lock around to prevent multiple concurrent instances.
# Make lock short enough that recovery from a fail doesn't take too long.
# Once urgent page builds (recently updated, popluar) done, be more gentle.
# This is intended to be run both from cron,
# and by hand to bring the site up to date quickly for small changes.
#
# When battery is HIGH (or higher) then be more aggressive.
# DHD20210707: no longer building AMP pages.
INCRSMAX=53
INCRFLAGS= -j2 -l3
# Lightweight dummy build target (desktop).
PILWDUMMYTARGET=404.html$(COMPSUFMAX)
.PHONY: pages-incr
pages-incr: debuginfo allhtmlgendeps ${POPARTS} ${HTMLFILELIST} | ${HTMLFILELISTBUILT}
	@echo "Incremental pages rebuild..."
	@$(LOCKFILENR) -l 86401 $@.lock
	@date
	@-startTime="`date +%s`"; endTime="`expr $$startTime + $(INCRSMAX)`"; \
	  rndSample=3; \
          if [ -f "$(PWRHIGHFLAG)" ]; then rndSample=5; endTime="`expr $$endTime + $(INCRSMAX)`"; fi; \
	  rpit=`$(RPITHROTTLEVALCMD)`; \
          if [ "0" != "$$rpit" ]; then rndSample=1; fi; \
          while [ "`date +%s`" -lt "$$endTime" ]; do \
            if [ "$$rndSample" -gt 9999 ]; then break; fi; \
	    echo "INFO: n(rnd) = $$rndSample."; \
	    if $(MAKE) -s -q Wpages Mpages; then \
		echo "INFO: incremental ping, build of remaining (O)pages and feeds..."; \
                nice $(MAKE) -s -k $(INCRFLAGS) pages-ping pages feeds; \
		echo "INFO: this incremental build finished."; \
                exit 0; \
	    elif $(MAKE) -s -q Wpages $(PILWDUMMYTARGET) ; then \
		echo "INFO: incremental build of Mpages..."; \
		P="`ls -rt1 $(wildcard $(SCMPAGES)) | awk 'BEGIN{srand()} {if(rand()<(1/NR)){print;if(++count>='$$rndSample'){exit}}}'` \
		    `echo $(AUTOUPDATINGPAGES:%=m/%$(COMPSUFGZIP)) $(AUTOUPDATINGPAGES:%=m/%$(COMPSUFBROTLI)) | xargs -n1 | sort -R | head -$$rndSample` \
                    `echo $(SCMPAGES) | xargs -n1 | sort -R | head -$$rndSample | xargs`"; \
		nice $(MAKE) -s -k $(INCRFLAGS) $$P m/$(PILWDUMMYTARGET); \
	    else \
                JUSTUPDATED="`ls -t1 .404.html $(wildcard $(PAGES:%=.%)) | awk 'BEGIN{srand()} {if(rand()<(1/NR)){print;if(++count>='$$rndSample'){exit}}}' | sed -e 's/^[.]\(.*\)$$/\1gz m\/\1gz \1br m\/\1br/'`"; \
                if $(MAKE) -s -q $$JUSTUPDATED; then \
                    echo "INFO: incremental build of mainly Wpages..."; \
		    P="`head -33 < ${POPARTS} | sed -e 's/^ *[0-9]* *\(.*\)$$/\1gz m\/\1gz \1br m\/\1br/' | sort -R | head -$$rndSample` \
                        `ls -rt1 $(PILWDUMMYTARGET) $(wildcard $(SCWPAGES)) | awk 'BEGIN{srand()} {if(rand()<(1/NR)){print;if(++count>='$$rndSample'){exit}}}'` \
		        `echo $(AUTOUPDATINGPAGES:%=%$(COMPSUFGZIP)) $(AUTOUPDATINGPAGES:%=%$(COMPSUFBROTLI)) | xargs -n1 | sort -R | head -$$rndSample` \
                        `echo $(SCWPAGES) | xargs -n1 | sort -R | head -$$rndSample | xargs`"; \
		        nice $(MAKE) -s -k $(INCRFLAGS) $$P $(PILWDUMMYTARGET); \
                else \
                    echo "INFO: incremental build of recently-updated pages..."; \
                    nice $(MAKE) -s -k $(INCRFLAGS) $$JUSTUPDATED $(PILWDUMMYTARGET); \
		    echo "INFO: this incremental build of recently-updated finished."; \
                    exit 0; \
                fi; \
	    fi; \
	  if [ "0" != "`$(RPITHROTTLEVALCMD)`" ]; then echo "INFO: CPU throttled, stopping..."; exit 0; fi; \
	  rndSample="`expr 5 \* $$rndSample`"; \
	  echo "INFO: incremental build pass done."; \
	  date; \
          sleep 1; \
          done;
	@date
	@rm -f $@.lock

# Recreate/expose the IndexNow key as necessary.
# It is not built in to the makefile since it is meant to be 'secret'.
# https://www.indexnow.org/documentation
IndexNowKeySrc=.work/IndexNow.key.txt
IndexNowKey := $(shell cat $(IndexNowKeySrc))
IndexNowKeyFile := $(IndexNowKey).txt
all:: $(IndexNowKeyFile)
$(IndexNowKeyFile): $(IndexNowKeySrc)
	@echo "Building $@"
	@rm -f $@
	ln $(IndexNowKeySrc) $(IndexNowKeyFile)
	@chmod a+r $(IndexNowKeyFile)
# Pings main-page updates to IndexNow and remembers which have been done.
# Submits updates incrementally, 1 or a small number at once.
# https://www.indexnow.org/
# Errs on the side of under-reporting.
# Submits newest (latest-updated) pages first: max novelty, risk starvation.
# Only considers pages up to a few days old.
IndexNowMaxDaysOld=3
# Eliminates explicit 'NOINDEX' pages.
# Does not attempt to ping any one page more than once between updates.
# All pings can be sent to the central (URL0) or can be shared at random.
IndexNowSEURL0=https://api.indexnow.org/indexnow
#URL=`( echo ${IndexNowSEURL1} ; echo ${IndexNowSEURL2} ) | sort -R | head -1`; \
IndexNowSEURL1=https://yandex.com/indexnow
IndexNowSEURL2=https://www.bing.com/indexnow
IndexNowFlags=.work/IndexNow.flags
.PHONY: IndexNow.ping
IndexNow.ping: $(WORKTMP)/IndexNow.ping
IndexNowMaxPingsAtOnce=7
all:: $(WORKTMP)/IndexNow.ping
$(WORKTMP)/IndexNow.ping: makefile $(IndexNowKeyFile) $(SCWPAGES)
	@echo "Building $@"
	@$(LOCKFILENRSLOW) $@.lock
	@count=0; \
         for f in `find $(PAGES) -mtime -${IndexNowMaxDaysOld} -exec ls -1t '{}' +`; do \
	    if egrep -q '<!-- *NOINDEX *-->' .$$f; then continue; fi; \
	    n=$$f; \
            flag=${IndexNowFlags}/$$f.log; \
            if [ ! -f $$flag -o $$f -nt $$flag ]; then \
	        echo IndexNow: $$n; \
                URL=${IndexNowSEURL0}; \
	        wget -q -O $$flag "$$URL"'?url=$(URLLISTPREFIX)'"$$n"'&key=${IndexNowKey}'; \
                count="`expr $$count + 1`"; \
                echo "INFO: IndexNow: done $$count..."; \
                if [ "$$count" -ge ${IndexNowMaxPingsAtOnce} ]; then break; fi; \
                sleep 1; \
            fi; \
	    done; \
            if [ 0 = "$$count" ]; then echo "INFO: IndexNow up to date..."; touch $@; fi
	@/bin/rm -f $@.lock

# The site-technicals page may be updated along with stats
# since it contains some embedded dynamic results.
# Only force update of the desktop page with stats.
# The lite and AMP pages can update much more slowly.
# TODO: update page Cache-Control to match likely update frequency.
.PHONY: stats
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
stats:: note-on-site-technicals.html$(COMPSUFGZIP)
stats:: note-on-site-technicals.html$(COMPSUFBROTLI)
endif
# Site technicals index pages (www m amp) depend on the include.
note-on-site-technicals.html: .work/inc/.note-on-site-technicals.html.inc
m/note-on-site-technicals.html: .work/inc/.note-on-site-technicals.html.inc
$(OFFLINEDIR)/note-on-site-technicals.html: .work/inc/.note-on-site-technicals.html.inc
# The include page depends on the script and the sub-pages and data sources.
# Can take a while, so long lock timeout.
.work/inc/.note-on-site-technicals.html.inc: \
    .work/script/gen-note-on-site-technicals-inc
	@echo "Building $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.tmp
	@.work/script/gen-note-on-site-technicals-inc > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp
# Postpone daily stats and other automated updates until HIGH.
# Postpone immediate update following any individual page update until HIGH.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
.work/inc/.note-on-site-technicals.html.inc: \
    $(wildcard $(DAILY)) \
    $(wildcard $(SITELOGPREV)) \
    $(filter .note-on-site-technicals-%.html,$(PAGES:%=.%)) \
    .work/script/pgdescription
endif

# Update series RSS feed: note-on-site-technicals
# Decline to rebuild when power not HIGH or grid intensity is high.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
ifeq ($(wildcard $(GRIDRED1DFLAG)),)
feeds: rss/note-on-site-technicals.rss.built
# Postpone 'all' update following any individual page update until HIGH.
all:: rss/note-on-site-technicals.rss.built
rss/note-on-site-technicals.rss.built: \
    $(filter .note-on-site-technicals-%.html,$(PAGES:%=.%)) \
    script/generateRSS.sh
endif
endif
rss/note-on-site-technicals.rss.built:
	@echo "Building $(@:%.built=%)"
	@sh script/generateRSS.sh note-on-site-technicals.html
	@touch $@

# Update series RSS feed: saving-electricity
# Decline to rebuild when power not HIGH or grid intensity is high.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
ifeq ($(wildcard $(GRIDRED1DFLAG)),)
feeds: rss/saving-electricity.rss.built
# Postpone 'all' update following any individual page update until HIGH.
all:: rss/saving-electricity.rss.built
# This should man that more clients see 304s and Internet traffic will be lower.
rss/saving-electricity.rss.built: \
    $(filter .saving-electricity-*.html,$(PAGES:%=.%)) \
    script/generateRSS.sh
endif
endif
rss/saving-electricity.rss.built:
	@echo "Building $(@:%.built=%)"
	@sh script/generateRSS.sh saving-electricity.html
	@touch $@


# The "easy read" social media article collection.
# Intended to be interesting to a UK/EU flat-renting core demograpic.
# Such articles are tagged with EASYREAD.
# Depends on page sources only, to check the tags.
# These are top-level relative HTML filename, without leading '.'.
# Sorted just because we can (may make some operations more efficient).
# Builds atomically without a lock.
# Depends on page source to allow tags to come and go, if spare energy.
EASYREAD=$(WORKTMP)/easyread.txt
EASYREADBUILT=$(EASYREAD).built
$(EASYREADBUILT): $(PAGESSRC)
$(EASYREADBUILT): $(HTMLFILELISTBUILT)
	@echo "Checking $(EASYREAD)..."
	@egrep -l '^<!-- *TAGS .*EASYREAD.* *-->$$' $(PAGESSRC) | sed -e 's/^[.]//' | sort > $@.$$$$.tmp; \
	    if cmp -s $@.$$$$.tmp $(EASYREAD); then echo No change for $(EASYREAD).; else mv -f $@.$$$$.tmp $(EASYREAD); echo Rebuilt $(EASYREAD).; fi; \
            rm -f $@.$$$$.tmp
	@touch $(EASYREADBUILT)
# If enough energy, force EASYREAD build more eagerly.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
all:: $(EASYREAD) $(EASYREADBUILT)
$(EASYREAD): ${EASYREADBUILT}
	touch $(EASYREAD)
endif


# The main index has a dynamic include for new/popular/updated pages.
# It this depends on at least all page sources.
# Can take a while, so long lock timeout.
# This can also include an optional featured page and a random page.
# FEATUREPAGE if present contains the name of an HTML page to use as feature.
# Rebuilt DAILY to allow at least the random article of the day to be refreshed.
# Ensure that at least all main pages exist (eg new ones) for home-page build.
FEATUREPAGE=.work/feature.txt
index.html: .work/inc/.index.html.inc
m/index.html: .work/inc/.index.html.inc.m
$(OFFLINEDIR)/index.html: .work/inc/.index.html.inc.m
amp/index.html: .work/inc/.index.html.inc.amp
# Each non-compressed page should exist before building the home page.
#.work/inc/.index.html.inc: | $(PAGES)
#.work/inc/.index.html.inc.m: | $(MPAGES)
#.work/inc/.index.html.inc.m: | $(FPAGES)
#.work/inc/.index.html.inc.amp: | $(APAGES)
# The home page(s), via the includes, depend(s) on many other things.
.work/inc/.index.html.inc \
	.work/inc/.index.html.inc.m \
	.work/inc/.index.html.inc.amp: \
    $(PAGES:%=.%) ${HTMLFILELISTBUILT} \
    $(wildcard $(FEATUREPAGE)) \
    $(POPARTS) \
    $(WORKTMP)/newestArticles.txt \
    $(THISYEARELECTCO2) \
    $(HTMLGENDEPS) \
    .work/script/gen-index-inc \
    .work/script/recentEGUse.sh data/.private/WeeklyMeterReadings.csv \
    $(HTMLFILELIST) $(EASYREAD) | $(HTMLFILELISTBUILT) $(EASYREADBUILT)
#
ifneq ($(wildcard $(PWRHIGHFLAG) $(PWRDUMPING))$(NOPWR),)
.work/inc/.index.html.inc \
        .work/inc/.index.html.inc.m \
        .work/inc/.index.html.inc.amp: \
    $(HTMLFILELISTBUILT) $(EASYREADBUILT)
endif
# Postpone periodic home page forced update until HIGH.
# Desktop daily, lite/AMP weekly.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
.work/inc/.index.html.inc: \
    $(wildcard $(DAILY))
.work/inc/.index.html.inc.m \
    .work/inc/.index.html.inc.amp: \
    $(wildcard $(WEEKLY))
endif
.work/inc/.index.html.inc:
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@.work/script/gen-index-inc false $(FEATUREPAGE) > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp
.work/inc/.index.html.inc.m:
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@.work/script/gen-index-inc true $(FEATUREPAGE) > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp
.work/inc/.index.html.inc.amp:
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@.work/script/gen-index-inc amp $(FEATUREPAGE) > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp

# The site guide needs to reflect changes in which pages are to be built.
# It can also be updated with a new 'popular pages' list.
# Have the site guide incl depend on the popular articles list.
# All site guide versions depend on the page/article list.
# There are only weak dependencies on 'dynamic' content such as
# popular and updated articles so as to avoid forcing a full rebuild
# on every page update.
.work/inc/.sitemap.html.inc: ${HTMLFILELIST} | ${HTMLFILELISTBUILT}
# Have the site guide incl strongly depend on the newest article list.
.work/inc/.sitemap.html.inc: $(WORKTMP)/newestArticles.txt
# Have the site guide incl weakly depend on the popular articles list.
# Mobile version may or may not depend on this to save effort.
.work/inc/.sitemap.html.inc: | ${POPARTS}
# Have the site guide incl weakly depend on the unvisited article list.
.work/inc/.sitemap.html.inc: | ${UNVISITED}
# Have the site guide incl weakly depend on the easy-read article list.
.work/inc/.sitemap.html.inc: | $(EASYREADBUILT)
# Have the site guide incl depend on a key script or two.
.work/inc/.sitemap.html.inc: script/get_tags
# Make the site guide pages depend on the include.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# If system storage in a good state then...
# Force a daily update to catch any explicit dependencies omitted.
.work/inc/.sitemap.html.inc: $(wildcard $(DAILY))
endif
all:: .work/inc/.sitemap.html.inc
sitemap.html m/sitemap.html amp/sitemap.html: .work/inc/.sitemap.html.inc
# Build of include.
# Can take a while, so long lock timeout.
.work/inc/.sitemap.html.inc: .work/script/gen-sitemap-inc
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@.work/script/gen-sitemap-inc > $@.tmp "$(ORDEREDPAGES)"
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp


# Max permitted size for (compressed) head/lead-in text to <main>.
# Should be << 1460 to ensure that real content starts in the first packet,
# especially given HTTP header overhead.
# DHD20170716: after optimising HTTP overhead ~290 bytes leaving 1170.
MAXCOMPHEADSIZE=1260
# Be slightly keener for mobile to get significant first-packet content.
MAXCOMPHEADSIZEM=1100

# Maximum uncompressed HTML page length.
# Should preferrably be less than ~125kB to allow effective cacheing/spidering.
# Microsoft Bing suggested limit 128000.
PAGECMAX=128000
PAGEABSCMAX=999999

# Build info directory
BUILDINFO=.build
all:: $(BUILDINFO) m/$(BUILDINFO) $(OFFLINEDIR)/$(BUILDINFO)
	@mkdir -p $(BUILDINFO) m/$(BUILDINFO) $(OFFLINEDIR)/$(BUILDINFO)

# For automatic wrapping of bare content XHTML files...
# Desktop/full.
# Assumed more bandwidth than mobile, so less minification effort here.
# Though usually quick to run, rebuilding hero/body images can be very slow,
# so lock timeout has to be long enough to allow for that.
# Do readability scoring also: warn if text is unreadable.
# Any warnings are left in .build/$@.warn; it is removed if empty.
# Info without warnings are left in .build/$@.info.
# Validation is batched for efficiency with $(PAGESVALID).
# but can be done in this rule incrementally with:
#     @(script/vnu $(@D)/.$(@F).tmp >/dev/null) 3>&1 1>&2 2>&3 | awk '{print "ERROR: validation: "$$0}' | tee -a $(@D)/$(@F).warn
# FIXME: technically should depend on $(NONAMPFILELIST), but too painful.
all:: script/rawHTML-readability script/get_minreadability script/html5-minify
%.html: .%.html | $(HTMLGENDEPS)
	@echo "Building $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f .$@.wa.tmp .$@.tmp .$@.premin
	@/bin/rm -f $(@D)/$(BUILDINFO)/$(@F).warn $(@D)/$(BUILDINFO)/$(@F).info
	@mkdir -p $(@D)/$(BUILDINFO)
	@(.work/script/wrap_art .$(@F) | .work/script/safecssmin-in-place > .$@.wa.tmp && mv .$@.wa.tmp .$@.premin) 3>&1 1>&2 2>&3 | tee -a $(@D)/$(BUILDINFO)/$(@F).warn
	@SZ=`awk '{print} /<main>$$/ {exit}' < $(@D)/.$(@F).premin | gzip -8 | wc -c`; \
	    if [ $$SZ -gt ${MAXCOMPHEADSIZE} ]; then \
	        echo "INFO: $@ initial compressed header (will try -H) too large ($$SZ; max ${MAXCOMPHEADSIZE})." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
                awk '{print"INFO: "$$0} /<main>$$/ {exit}' < $(@D)/.$(@F).premin >> $(@D)/$(BUILDINFO)/$(@F).warn; \
		/bin/rm -f $(@D)/.$(@F).premin; \
		(.work/script/wrap_art -H .$(@F) | .work/script/safecssmin-in-place > $(@D)/.$(@F).wa.tmp && mv $(@D)/.$(@F).wa.tmp $(@D)/.$(@F).premin) 3>&1 1>&2 2>&3 | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
		SZ2=`awk '{print} /<main>$$/ {exit}' < $(@D)/.$(@F).premin | gzip -9 | wc -c`; \
		if [ $$SZ2 -gt ${MAXCOMPHEADSIZE} ]; then \
		    echo "ERROR: $@ compressed header too large ($$SZ2; max ${MAXCOMPHEADSIZE})." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
		    exit 1; \
                else \
		    echo "INFO: $@ compressed header now ($$SZ2; max ${MAXCOMPHEADSIZE})." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
		fi; \
	    fi
	@test -s $(@D)/.$(@F).premin
	@script/html5-minify -gentle < $(@D)/.$(@F).premin > $(@D)/.$(@F).tmp
	@if [ ! -s $(@D)/.$(@F).tmp ]; then "ERROR: $@ HTML minification failed" | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; exit 2; fi
	@FSZ=`wc -c < .$@.tmp`; \
	    if [ $$FSZ -gt ${PAGEABSCMAX} ]; then \
	    echo "WARNING: $@ uncompressed page too large ($$FSZ; recommended max ${PAGECMAX}, abs max ${PAGEABSCMAX})." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
	    elif [ $$FSZ -gt ${PAGECMAX} ]; then \
	    echo "INFO: $@ uncompressed page too large ($$FSZ; max ${PAGECMAX}, abs max ${PAGEABSCMAX})." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
	    fi
	@MRS="`script/get_minreadability $(@:%.html=.%.html)`"; \
	    RS="`script/rawHTML-readability < $(@:%.html=.%.html)`"; \
	    if [ "$$RS" = "" -o "$$RS" = "NaN" -o "$$RS" = "nan" ]; then \
	        echo "INFO: $(@:m/%.html=.%.html) readability score bad ($$RS; min $$MRS)." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
	    elif [ $$RS -lt $$MRS ]; then \
	        echo "INFO: $(@:m/%.html=.%.html) readability score on input text too low ($$RS; min $$MRS)." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
                RSO="`script/rawHTML-readability < .$@.tmp`"; \
	        if [ $$RSO -lt $$MRS ]; then \
	            echo "WARNING: $(@:m/%.html=.%.html) readability score on output text too low also ($$RSO; min $$MRS)." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
		else \
	            echo "INFO: $(@:m/%.html=.%.html) readability score on output text $$RSO." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
                fi; \
	    else \
	        echo "INFO: $(@:m/%.html=.%.html) readability score $$RS; min $$MRS." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
	    fi
	@sh script/spellb.sh .$(@F) | tee -a $(@D)/$(BUILDINFO)/$(@F).warn
	@if [ "" = "`egrep -v '^INFO' < $(@D)/$(BUILDINFO)/$(@F).warn`" ]; then mv -f $(@D)/$(BUILDINFO)/$(@F).warn $(BUILDINFO)/$@.info; else echo SEE $(@D)/$(BUILDINFO)/$(@F).warn; chmod go-r $(@D)/$(BUILDINFO)/$(@F).warn; if egrep -qs '^ERROR' < $(@D)/$(BUILDINFO)/$(@F).warn; then exit 1; fi; fi
	@if [ -s "$@" ] && [ -s $(@D)/$(BUILDINFO)/$(@F).warn ]; then \
            rank="`script/get_article_rank -autosource $@`"; \
	    if [ "" != "$${rank}" -a "$${rank}0" -le 100 ]; then \
                echo "ERROR: popular article rank=$${rank} WARNING treated as ERROR for $@" | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
                exit 1; \
                fi; \
	    fi
	@test -s .$@.tmp
	@-chmod -f u+w $@
	@chmod -f 644 .$@.tmp
	@/bin/mv .$@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f .$@.wa.tmp .$@.tmp .$@.premin
	@/bin/rm -f $@.lock

# For automatic wrapping of bare content XHTML files...
# Mobile/lite.
# Mobile assumed relatively low bandwidth, so full minification effort here.
# Could move header size checking to after minifying.
# Though usually quick to run, rebuilding hero/body images can be very slow,
# so lock timeout has to be long enough to allow for that.
# Minifying is a bit slow so a longer lock timeout is justified.
# Note this depends on image optimisers/minifiers as hero usually inlined.
# Does not re-do W3C/VNU validation assumed performed for desktop version.
# Possibly (if link to AMP) should depend on $(NONAMPFILELIST), but too painful.
# Validation is batched for efficiency with $(PAGESVALID).
# but can be done in this rule incrementally with:
#     @(script/vnu $(@D)/.$(@F).tmp >/dev/null) 3>&1 1>&2 2>&3 | awk '{print "ERROR: validation: "$$0}' | tee -a $(@D)/$(BUILDINFO)/$(@F).warn
m/%.html: .%.html | $(HTMLGENDEPS)
	@echo "Building $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $(@D)/.$(@F).wa.tmp $(@D)/.$(@F).tmp $(@D)/.$(@F).premin
	@/bin/rm -f $(@D)/$(BUILDINFO)/$(@F).warn $(@D)/$(BUILDINFO)/$(@F).info
	@mkdir -p $(@D)/$(BUILDINFO)
	@(.work/script/wrap_art -m .$(@F) | .work/script/safecssmin-in-place > $(@D)/.$(@F).wa.tmp && mv $(@D)/.$(@F).wa.tmp $(@D)/.$(@F).premin) 3>&1 1>&2 2>&3 | tee -a $(@D)/$(BUILDINFO)/$(@F).warn
	@test -s $(@D)/.$(@F).premin
	@SZ=`awk '{print} /<main>$$/ {exit}' < $(@D)/.$(@F).premin | gzip -8 | wc -c`; \
	    if [ $$SZ -gt ${MAXCOMPHEADSIZEM} ]; then \
	    echo "ERROR: $@ compressed header too large ($$SZ; max ${MAXCOMPHEADSIZEM})." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
	    exit 1; \
	    fi
	@script/html5-minify < $(@D)/.$(@F).premin > $(@D)/.$(@F).tmp
	@if [ ! -s $(@D)/.$(@F).tmp ]; then "ERROR: $@ HTML minification failed" | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; exit 2; fi
	@FSZ=`wc -c < $(@D)/.$(@F).tmp`; \
	    if [ $$FSZ -gt ${PAGEABSCMAX} ]; then \
	    echo "WARNING: $@ uncompressed page too large ($$FSZ; abs max ${PAGEABSCMAX})." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
	    fi
	@if [ "" = "`egrep -v '^INFO' < $(@D)/$(BUILDINFO)/$(@F).warn`" ]; then mv -f $(@D)/$(BUILDINFO)/$(@F).warn $(@D)/$(BUILDINFO)/$(@F).info; else echo SEE $(@D)/$(BUILDINFO)/$(@F).warn; chmod go-r $(@D)/$(BUILDINFO)/$(@F).warn; if egrep -qs '^ERROR' < $(@D)/$(BUILDINFO)/$(@F).warn; then exit 1; fi; fi
	@test -s $(@D)/.$(@F).tmp
	@-chmod -f u+w $@
	@chmod -f 644 $(@D)/.$(@F).tmp
	@/bin/mv $(@D)/.$(@F).tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $(@D)/.$(@F).wa.tmp $(@D)/.$(@F).tmp $(@D)/.$(@F).premin
	@/bin/rm -f $@.lock

# For automatic wrapping of bare content XHTML files...
# Offline (stand-alone mobile/lite).
# Assumes important stuff such as size and readability checked elsewhere.
$(OFFLINEDIR)/%.html: .%.html | $(HTMLGENDEPS)
	@echo "Building $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $(@D)/.$(@F).wa.tmp $(@D)/.$(@F).tmp $(@D)/.$(@F).premin
	@/bin/rm -f $(@D)/$(BUILDINFO)/$(@F).warn $(@D)/$(BUILDINFO)/$(@F).info
	@mkdir -p $(@D)/$(BUILDINFO)
	@(.work/script/wrap_art -o .$(@F) | .work/script/safecssmin-in-place > $(@D)/.$(@F).wa.tmp && mv $(@D)/.$(@F).wa.tmp $(@D)/.$(@F).premin) 3>&1 1>&2 2>&3 | tee -a $(@D)/$(BUILDINFO)/$(@F).warn
	@test -s $(@D)/.$(@F).premin
	@script/html5-minify < $(@D)/.$(@F).premin > $(@D)/.$(@F).tmp
	@if [ ! -s $(@D)/.$(@F).tmp ]; then "ERROR: $@ HTML minification failed" | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; exit 2; fi
	@if [ "" = "`egrep -v '^INFO' < $(@D)/$(BUILDINFO)/$(@F).warn`" ]; then mv -f $(@D)/$(BUILDINFO)/$(@F).warn $(@D)/$(BUILDINFO)/$(@F).info; else echo SEE $(@D)/$(BUILDINFO)/$(@F).warn; chmod go-r $(@D)/$(BUILDINFO)/$(@F).warn; if egrep -qs '^ERROR' < $(@D)/$(BUILDINFO)/$(@F).warn; then exit 1; fi; fi
	@test -s $(@D)/.$(@F).tmp
	@-chmod -f u+w $@
	@chmod -f 644 $(@D)/.$(@F).tmp
	@/bin/mv $(@D)/.$(@F).tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $(@D)/.$(@F).wa.tmp $(@D)/.$(@F).tmp $(@D)/.$(@F).premin
	@/bin/rm -f $@.lock

# For automatic wrapping of bare content XHTML files to AMP.
# AMP caches will do some minimising, and AMP doesn't allow all HTML5 features.
# Fails if AMP validation fails.
# Re-tries with a minimal head section if initially too large.
# Will fail quickly if not AMP-compatible.
# FIXME: faking HTML minification for now...
# FIXME: WILL NOT BE MOVED INTO PLACE IF VALIDATION FAILS.
# FIXME: MAY CREATE ZERO-SIZE PLACEHOLDER ON FAILURE.
# FIXME: can hand during AMP validation if network connectivity bad.
# TODO: WILL BE SLOW TO RUN VALIDATION.
# Though usually quick to run, rebuilding hero/body images can be very slow,
# so lock timeout has to be long enough to allow for that.
# Any warnings are left in $(@D)/$(BUILDINFO)/$(@F).warn; it is removed if empty.
all:: script/ampval
amp/%.html: .%.html | $(HTMLGENDEPS)
	@if [ "" != "$(filter $(@F),$(NONAMPPAGES))" ]; then echo "Fast fail: NOT SUITABLE FOR AMP: $(@F)"; exit 2; fi
	@echo "Building $@"
	@$(LOCKFILENRSLOW) $@.lock
	@mkdir -p (@D)/$(BUILDINFO)
	@/bin/rm -f $(@D)/.$(@F).wa.tmp $(@D)/.$(@F).tmp $(@D)/$(BUILDINFO)/$(@F).warn
	@(.work/script/wrap_art -a .$(@F) | .work/script/safecssmin-in-place > $(@D)/.$(@F).wa.tmp && mv $(@D)/.$(@F).wa.tmp $(@D)/.$(@F).premin) 3>&1 1>&2 2>&3 | tee -a $(@D)/$(BUILDINFO)/$(@F).warn
	@test -s $(@D)/.$(@F).premin
	@SZ=`awk '{print} /<main>$$/ {exit}' < $(@D)/.$(@F).premin | gzip -8 | wc -c`; \
	    if [ $$SZ -gt ${MAXCOMPHEADSIZE} ]; then \
	        echo "INFO: $@ initial compressed header (will try -H) too large ($$SZ; max ${MAXCOMPHEADSIZE}, uncompressed `awk '{print}/<main>$$/{exit}'<$(@D)/.$(@F).premin|wc -c`)." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
                awk '{print"INFO: "$$0} /<main>$$/ {exit}' < $(@D)/.$(@F).premin >> $(@D)/$(BUILDINFO)/$(@F).warn; \
		/bin/rm -f $(@D)/.$(@F).premin; \
		(.work/script/wrap_art -H -a .$(@F) | .work/script/safecssmin-in-place > $(@D)/.$(@F).wa.tmp && mv $(@D)/.$(@F).wa.tmp $(@D)/.$(@F).premin) 3>&1 1>&2 2>&3 | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
		SZ2=`awk '{print} /<main>$$/ {exit}' < $(@D)/.$(@F).premin | gzip -9 | wc -c`; \
		if [ $$SZ2 -gt ${MAXCOMPHEADSIZE} ]; then \
		    echo "ERROR: $@ compressed header too large ($$SZ2; max ${MAXCOMPHEADSIZE})." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
		    exit 1; \
                else \
		    echo "INFO: $@ compressed header now ($$SZ2; max ${MAXCOMPHEADSIZE})." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
		fi; \
	    fi
	@test -s $(@D)/.$(@F).premin
	@cat < $(@D)/.$(@F).premin > $(@D)/.$(@F).tmp
	@if [ ! -s $(@D)/.$(@F).tmp ]; then "ERROR: $@ HTML minification failed" | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; exit 2; fi
	@FSZ=`wc -c < $(@D)/.$(@F).tmp`; \
	    if [ $$FSZ -gt ${PAGEABSCMAX} ]; then \
	    echo "WARNING: $@ uncompressed page too large ($$FSZ; abs max ${PAGEABSCMAX})." | tee -a $(@D)/$(BUILDINFO)/$(@F).warn; \
	    fi
	@egrep ' ((src)|(href))="//' < $(@D)/.$(@F).tmp | awk '{print "WARNING: protocol-relative URL may not work from https public AMP cache: "$$0}' | tee -a $(@D)/$(BUILDINFO)/$(@F).warn
	@(script/ampval $(@D)/.$(@F).tmp >/dev/null) 3>&1 1>&2 2>&3 | awk '{print "ERROR: validation: "$$0}' | tee -a $(@D)/$(BUILDINFO)/$(@F).warn
	@if [ "" = "`egrep -v '^INFO' < $(@D)/$(BUILDINFO)/$(@F).warn`" ]; then rm -f $(@D)/$(BUILDINFO)/$(@F).warn; else echo SEE $(@D)/$(BUILDINFO)/$(@F).warn; chmod go-r $(@D)/$(BUILDINFO)/$(@F).warn; if egrep -qs '^ERROR' < $(@D)/$(BUILDINFO)/$(@F).warn; then exit 1; fi; fi
	@test -s $(@D)/.$(@F).tmp
	@-chmod -f u+w $@
	@chmod -f 644 $(@D)/.$(@F).tmp
	@/bin/mv $(@D)/.$(@F).tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $(@D)/.$(@F).wa.tmp $(@D)/.$(@F).tmp $(@D)/.$(@F).premin
	@/bin/rm -f $@.lock

# Create super-compressed GZIP form of HTML page.
# Uses zopfli on reasonable settings.
# Effort is cranked up a little if system has lots of energy available
# AND the CPU is not throttling.
# Streamlined version that avoids the need for locking.
ZOPFLISCFLAGS=
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# If enough energy, compress extra hard, though savings usually zero/small.
# (Default iterations: 15.)
# This enhanced value will however not be used if the CPU is being throttled.
ZOPFLISCFLAGS= --i30
endif
%.htmlgz: %.html
	@-chmod -f u+w $@
	@test -s $(@:%.htmlgz=%.html)
	@T=$(@D)/.$(@F).$$$$.tmp; rm -f $$T && \
	    F="$(ZOPFLISCFLAGS)"; \
	    if [ "0" != "$(RPITHROTTLEVAL)" ]; then F=""; fi; \
	    echo "Building GZip super-compressed ($$F) $@"; \
	    $(ZOPFLI) $$F -c $(@:%.htmlgz=%.html) > $$T && \
            test -s $$T && \
	    chmod -f 644 $$T && \
	    mv -f $$T $@ && \
	    chmod a+r,a-wx $@

# Create super-compressed Brotli form of HTML page.
%.htmlbr: %.html
	@-chmod -f u+w $@
	@test -s $(@:%.htmlbr=%.html)
	@T=$(@D)/.$(@F).$$$$.tmp; rm -f $$T && \
	    echo "Building Brotli super-compressed $@"; \
	    brotli -q 11 < $(@:%.htmlbr=%.html) > $$T && \
            test -s $$T && \
	    chmod -f 644 $$T && \
	    mv -f $$T $@ && \
	    chmod a+r,a-wx $@


# Batch-validate (all www, m) updated pages to mitigate start-up time, etc.
# Validate in limited-size batches to avoid blowing up Vnu.
${PAGESOVALID}: script/vnu $(OTHERPAGES) $(OTHERMPAGES) $(OTHERFPAGES)
	@rm -f $@
	@echo "Validating (OTHER, OTHERM, OTHERF) page HTML5..."
	@echo $(filter %.html,$(sort $? index.html)) | xargs -n512 -s32767 script/vnu
	@echo "Checked: $?" > $@
${PAGESWVALID}: script/vnu $(PAGES)
	@rm -f $@
	@echo "Validating (W) page HTML5..."
	@echo $(filter %.html,$(sort $? index.html)) | xargs -n512 -s32767 script/vnu
	@echo "Checked: $?" > $@
${PAGESMVALID}: script/vnu $(MPAGES)
	@rm -f $@
	@echo "Validating (M) page HTML5..."
	@echo $(filter %.html,$(sort $? index.html)) | xargs -n512 -s32767 script/vnu
	@echo "Checked: $?" > $@
${PAGESFVALID}: script/vnu $(FPAGES)
	@rm -f $@
	@echo "Validating (F) page HTML5..."
	@echo $(filter %.html,$(sort $? index.html)) | xargs -n512 -s32767 script/vnu
	@echo "Checked: $?" > $@


# Build all LED special includes.
all:: .work/inc/.LED-lighting.html.inc

LEDDB=db.LED-lighting
#LEDDBFILES:=$(shell ls $(LEDDB)/*.attr $(LEDDB)/*.html $(LEDDB)/fittings/*.txt)
# Script already does its own locking...
#.work/inc/.LED-lighting.html.inc: script/gen-LED-lighting-inc $(LEDDB) $(LEDDBFILES)
.work/inc/.LED-lighting.html.inc: $(LEDDB) \
	    script/gen-LED-lighting-inc
	@script/gen-LED-lighting-inc
# Make sure that the full generated HTML depends on the include depends on DB.
LED-lighting.html m/LED-lighting.html amp/LED-lighting.html: .work/inc/.LED-lighting.html.inc


# Generate seasonal content for note-on-office-comfort-and-energy-efficiency
# Rebuild periodically, at least roughly monthly.
note-on-office-comfort-and-energy-efficiency.html: \
    .work/inc/.note-on-office-comfort-and-energy-efficiency.html.inc
m/note-on-office-comfort-and-energy-efficiency.html: \
    .work/inc/.note-on-office-comfort-and-energy-efficiency.html.inc
$(OFFLINEDIR)/note-on-office-comfort-and-energy-efficiency.html: \
    .work/inc/.note-on-office-comfort-and-energy-efficiency.html.inc
all:: script/gen-note-on-office-comfort-and-energy-efficiency-inc
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
.work/inc/.note-on-office-comfort-and-energy-efficiency.html.inc: \
    $(wildcard $(MONTHLY))
endif
.work/inc/.note-on-office-comfort-and-energy-efficiency.html.inc: \
    script/gen-note-on-office-comfort-and-energy-efficiency-inc
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@script/gen-note-on-office-comfort-and-energy-efficiency-inc > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp


# Generate insert from CSV poll data from UKCHOnOff-poll-data.csv for
# note-on-survey-UK-central-heating-on-off-dates.html
# report on UK central heating on/off dates 201t to 2022.
UKCHONOFFPOLLDATA=data/UKCHOnOff/UKCHOnOff-poll-data.csv
all:: script/gen-note-on-survey-UK-central-heating-on-off-dates-inc
note-on-survey-UK-central-heating-on-off-dates.html: \
    .work/inc/.note-on-survey-UK-central-heating-on-off-dates.html.inc
m/note-on-survey-UK-central-heating-on-off-dates.html: \
    .work/inc/.note-on-survey-UK-central-heating-on-off-dates.html.inc
$(OFFLINEDIR)/note-on-survey-UK-central-heating-on-off-dates.html: \
    .work/inc/.note-on-survey-UK-central-heating-on-off-dates.html.inc
.work/inc/.note-on-survey-UK-central-heating-on-off-dates.html.inc: \
    $(UKCHONOFFPOLLDATA) \
    script/gen-note-on-survey-UK-central-heating-on-off-dates-inc.sh
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@sh script/gen-note-on-survey-UK-central-heating-on-off-dates-inc.sh > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp

# Generate insert from 2nd CSV poll data from UKCHOnOff2-poll-data.csv for
# note-on-survey-UK-central-heating-on-off-dates-2.html
# report on UK central heating on/off dates fot 2024.
UKCHONOFF2POLLDATA=data/UKCHOnOff2/UKCHOnOff2-poll-data.csv
all:: script/gen-note-on-survey-UK-central-heating-on-off-dates-2-inc
note-on-survey-UK-central-heating-on-off-dates-2.html: \
    .work/inc/.note-on-survey-UK-central-heating-on-off-dates-2.html.inc
m/note-on-survey-UK-central-heating-on-off-dates-2.html: \
    .work/inc/.note-on-survey-UK-central-heating-on-off-dates-2.html.inc
$(OFFLINEDIR)/note-on-survey-UK-central-heating-on-off-dates-2.html: \
    .work/inc/.note-on-survey-UK-central-heating-on-off-dates-2.html.inc
.work/inc/.note-on-survey-UK-central-heating-on-off-dates-2.html.inc: \
    $(UKCHONOFF2POLLDATA) \
    script/gen-note-on-survey-UK-central-heating-on-off-dates-inc.sh
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@sh script/gen-note-on-survey-UK-central-heating-on-off-dates-inc.sh -2 > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp

# This is a cheat!
# Generate note-on-survey-results.html general survey roundup.
# The data is updated manually about twice per year.
note-on-survey-results.html: \
    .work/inc/.note-on-survey-results.html.inc
m/note-on-survey-results.html: \
    .work/inc/.note-on-survey-results.html.inc
$(OFFLINEDIR)/note-on-survey-results.html: \
    .work/inc/.note-on-survey-results.html.inc
.work/inc/.note-on-survey-results.html.inc: \
    $(UKCHONOFFPOLLDATA) \
    $(UKCHONOFF2POLLDATA) \
    script/gen-note-on-survey-UK-central-heating-on-off-dates-inc.sh
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@sh script/gen-note-on-survey-UK-central-heating-on-off-dates-inc.sh -summary > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp


# Hogsmill bat data depends on its source file.
HBBATDAT=data/bat/Hogsmill/Hogsmill-River-at-Villers-Road-bridge-bat-observations-DHD.csv
# HTML5 include for Hogsmill bat data table.
HBBATINC=.work/inc/.bats-at-Hogsmill-bridge-dataset.html.inc
bats-at-Hogsmill-bridge-dataset.html: $(HBBATINC)
m/bats-at-Hogsmill-bridge-dataset.html: $(HBBATINC)
$(OFFLINEDIR)/bats-at-Hogsmill-bridge-dataset.html: $(HBBATINC)
$(HBBATINC): $(HBBATDAT) script/gen-bats-at-Hogsmill-bridge-dataset-inc.sh
	@echo "Building $@"
	@($(FLOCKNB) 9 || exit 1; \
	    sh script/gen-bats-at-Hogsmill-bridge-dataset-inc.sh < $(HBBATDAT) > $@.tmp && \
            /bin/mv $@.tmp $@ ) 9>>$(@D)/.$(@F).flock


# Generate insert for eddi dataset/stats page.
all:: script/gen-eddi-dataset-summary-inc
eddi-diverter-dataset.html: .work/inc/.eddi-diverter-dataset.html.inc
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Force quicker updates when VHIGH or better.
#.work/inc/.eddi-diverter-dataset.html.inc: $(wildcard $(DAILY))
# Force at-least weekly updates when VHIGH or better.
.work/inc/.eddi-diverter-dataset.html.inc: $(wildcard $(WEEKLY))
stats:: .work/inc/.eddi-diverter-dataset.html.inc
endif
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# Also update near the start of the month.
.work/inc/.eddi-diverter-dataset.html.inc: $(wildcard $(MONTHLY))
endif
.work/inc/.eddi-diverter-dataset.html.inc: script/gen-eddi-dataset-summary-inc
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@script/gen-eddi-dataset-summary-inc > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp

# Generate derived bibliography files.
# Directory for single-entry .bib files.
SINGLEBIBDIR=db.bibliography/single
# Dependencies on single-entry .bib files.
SINGLEBIBDEPS=$(WORKTMP)/singlebib.mk
# Atomically update dependencies as needed.
$(SINGLEBIBDEPS): $(SINGLEBIBDIR) $(SINGLEBIBDIR)/README.txt
	@echo "Building $@"
	@T=$(@D)/.$(@F).$$$$.tmp; rm -f $$T && \
	    ls -1 $(SINGLEBIBDIR)/*.bib | \
		awk '{print "db.bibliography/general.bib: "$$1}' > $$T && \
		test -s $$T && \
	    mv -f $$T $@
-include $(SINGLEBIBDEPS)
all:: $(SINGLEBIBDEPS)
db.bibliography/general.bib: $(SINGLEBIBDIR) $(SINGLEBIBDIR)/README.txt
db.bibliography/general.bib: $(SINGLEBIBDIR)/.validated
db.bibliography/general.bib: $(SINGLEBIBDEPS)
# Builds the consolidated general bibliography BibTeX file.
# Does a little extra normalisation on the fly for (~1%) compactness.
# (Strip leading and trailing space, and space around = for attributes.)
# Also attempts to make the source files globally readable
# since if they are new or updated in SVN they may not be.
db.bibliography/general.bib:
	@echo "Building $@"
	-@chmod a+r $(SINGLEBIBDIR)/*.bib
	@T=$(@D)/.$(@F).$$$$.tmp; rm -f $$T && \
            cat $$(export LC_ALL=C; ls -1 $(SINGLEBIBDIR)/*.bib | sort -f) | \
                sed -e 's/^ *//' -e 's/ *$$//' \
                    -e 's/^\([A-Za-z]*\) *= */\1=/' \
                    -e 's/  */ /g' > $$T && \
            test -s $$T && \
	    chmod -f 644 $$T && \
	    mv -f $$T $@ && \
	    chmod a+r,a-wx $@
all:: db.bibliography/general.bib
# Attempt to validate source single-entry .bib files.
# In particular, heck that name on first line "@xxx{NAME," matches filename.
all:: $(SINGLEBIBDIR)/.validated
# Create/update the manifest and create/update sub-pages as needed.
# The manifest is updated after all sub-pages to retry-ability...
# (The manifest is ASCII-sorted to maximise compressibility.)
bibliography/MANIFEST.txt: db.bibliography/general.bib \
    .work/script/wrap_subpage_complete.sh .work/script/wrap_subpage.sh
	@echo Creating/updating sub-pages and $@ manifest.
	@mkdir -p $(@D) && chmod a+rx $(@D)
	@$(LOCKFILENRSLOW) $@.lock
	@ls -1 $(SINGLEBIBDIR)/*.bib | sed -e 's|^.*/\([^.]*\).bib$$|\1|' | (export LC_ALL=C; sort) > $@.tmp
	@chmod a+r $@.tmp
	@$(MAKE) -s 404.html `awk <$@.tmp '{print "bibliography/"$$1".html"}'`
	@mv -f $@.tmp $@
	@rm -f $@.lock $@.tmp
# Share the underlying manifest file and create/update sub-pages as needed.
m/bibliography/MANIFEST.txt: bibliography/MANIFEST.txt
	@echo Creating/updating sub-pages and $@ manifest.
	@mkdir -p $(@D) && chmod a+rx $(@D)
	@$(LOCKFILENRSLOW) $@.lock
	@$(MAKE) -s 404.html `awk <bibliography/MANIFEST.txt '{print "m/bibliography/"$$1".html"}'`
	@rm -f $@
	@ln bibliography/MANIFEST.txt $@
	@rm -f $@.lock
# Share the underlying manifest file and create/update sub-pages as needed.
$(OFFLINEDIR)/bibliography/MANIFEST.txt: bibliography/MANIFEST.txt
	@echo Creating/updating sub-pages and $@ manifest.
	@mkdir -p $(@D) && chmod a+rx $(@D)
	@$(LOCKFILENRSLOW) $@.lock
	@$(MAKE) -s 404.html `awk <bibliography/MANIFEST.txt '{print "$(OFFLINEDIR)/bibliography/"$$1".html"}'`
	@rm -f $@
	@ln bibliography/MANIFEST.txt $@
	@rm -f $@.lock
all:: bibliography/MANIFEST.txt
all:: m/bibliography/MANIFEST.txt
all:: $(OFFLINEDIR)/bibliography/MANIFEST.txt
bibliography/%.html: $(SINGLEBIBDIR)/%.bib \
    script/BibTeX-to-subpage-HTML.sh script/BibTeX-to-HTML.sh \
    .work/script/wrap_subpage_complete.sh .work/script/wrap_subpage.sh
	@echo "Building $@"
	@sh script/BibTeX-to-subpage-HTML.sh -d $(shell basename $@ .html)
	@test -s $@ && touch $@
m/bibliography/%.html: $(SINGLEBIBDIR)/%.bib \
    script/BibTeX-to-subpage-HTML.sh script/BibTeX-to-HTML.sh \
    .work/script/wrap_subpage_complete.sh .work/script/wrap_subpage.sh
	@echo "Building $@"
	@sh script/BibTeX-to-subpage-HTML.sh -m $(shell basename $@ .html)
	@test -s $@ && touch $@
$(OFFLINEDIR)/bibliography/%.html: $(SINGLEBIBDIR)/%.bib \
    script/BibTeX-to-subpage-HTML.sh script/BibTeX-to-HTML.sh \
    .work/script/wrap_subpage_complete.sh .work/script/wrap_subpage.sh
	@echo "Building $@"
	sh script/BibTeX-to-subpage-HTML.sh -o $(shell basename $@ .html)
	@test -s $@ && touch $@
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
$(SINGLEBIBDIR)/.validated: script/BibTeX-validate-single-entry-file.sh
$(SINGLEBIBDIR)/.validated: $(wildcard $(SINGLEBIBDIR)/*.bib)
	@echo Validating updated source .bib files in $(SINGLEBIBDIR)...
	@sh script/BibTeX-validate-single-entry-file.sh $(filter %.bib,$?)
	@touch $@
endif

# Generate insert for bibliography.
# Also tries to ensure that the source DB is world-readable.
bibliography.html: .work/inc/.bibliography.html.inc
.work/inc/.bibliography.html.inc: \
    db.bibliography/general.bib \
    db.bibliography/copyright-licenceURL.csv \
    script/BibTeX-to-HTML.sh
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@chmod a+r db.bibliography/general.bib
	@sh script/BibTeX-to-HTML.sh -mainpage -d <db.bibliography/general.bib >$@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp
# Generate lighter insert for lite site.
m/bibliography.html: .work/inc/.bibliography.html.inc.m
.work/inc/.bibliography.html.inc.m: \
    db.bibliography/general.bib \
    db.bibliography/copyright-licenceURL.csv \
    script/BibTeX-to-HTML.sh
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@chmod a+r db.bibliography/general.bib
	@sh script/BibTeX-to-HTML.sh -mainpage -m <db.bibliography/general.bib >$@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp
# Generate lighter insert for offline site.
$(OFFLINEDIR)/bibliography.html: .work/inc/.bibliography.html.inc.m
# Get manifests (re)built.
bibliography.html: bibliography/MANIFEST.txt
m/bibliography.html: m/bibliography/MANIFEST.txt
$(OFFLINEDIR)/bibliography.html: $(OFFLINEDIR)/bibliography/MANIFEST.txt

# Config files for energy series data consolidation.
CONSOLIDATIONCONFIG= \
    data/consolidated/config_data_sources.csv \
    data/consolidated/config_granularity.csv \
    data/consolidated/config_synthetic.csv \
    data/consolidated/config_variables.csv \
    data/consolidated/config_het.csv

# Rebuild consolidated energy series datasets (at least) monthly near the start.
all:: data/.flags/consolidated.flag
data/.flags/consolidated.flag: $(wildcard $(MONTHLY)) \
                $(CONSOLIDATIONCONFIG) \
		script/updateConsolidatedEnergyOutputs.sh \
                script/sonifyConsolidatedEnergyOutputs.sh
	@echo "Building consolidated datasets..."
	@$(LOCKFILENRSTD) $@.lock
	@sh script/updateConsolidatedEnergyOutputs.sh
	@/bin/rm -f $@.lock
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Maintain dependencies and force rebuild as needed when VHIGH or better.
CONSOLIDATIONDEPSMK=$(WORKTMP)/consolidationdeps.mk
-include $(CONSOLIDATIONDEPSMK)
pages:: $(CONSOLIDATIONDEPSMK)
# Dependencies should not change more often than monthly.
# Only include inputs whose name contains the current year.
${CONSOLIDATIONDEPSMK}: $(wildcard $(MONTHLY))
	@echo "Building $@"
	@T=$@.$$$$.tmp; D="$$(date '+%Y')"; (find data/consolidated/energy/std -name '*.csv' -print | awk '{print "data/.flags/consolidated.flag:",$$1}'; find data/consolidated/energy/std -name '*.csv' -exec cat {} + | egrep '^#' | sort -u | awk -F, '$$0 ~ /^#((input,.*'"$$D"')|(script,))/ {print "data/.flags/consolidated.flag:",substr($$2,2,length($$2)-2)}') > $$T && mv -f $$T $@
stats:: data/.flags/consolidated.flag
endif
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# Force at-least weekly updates when HIGH or better.
data/.flags/consolidated.flag: $(wildcard $(WEEKLY))
stats:: data/.flags/consolidated.flag
# Force a (relatively quick) deps rebuild if HIGH and the data has been rebuilt.
${CONSOLIDATIONDEPSMK}: data/.flags/consolidated.flag
endif


# Page dependencies.
energy-series-dataset.html: .work/inc/.energy-series-dataset.html.inc
m/energy-series-dataset.html: .work/inc/.energy-series-dataset.html.inc.m
$(OFFLINEDIR)/energy-series-dataset.html: .work/inc/.energy-series-dataset.html.inc.o
# Make need for includes global/explicit.
all:: script/gen-energy-series-dataset-inc
# Inherit include dependencies transitively.
.work/inc/.energy-series-dataset.html.inc.m: \
	.work/inc/.energy-series-dataset.html.inc
.work/inc/.energy-series-dataset.html.inc.o: \
	.work/inc/.energy-series-dataset.html.inc
# Consolidation script dependencies.
# Update when events list or extraction script change.
.work/inc/.energy-series-dataset.html.inc: data/consolidated/events.csv
.work/inc/.energy-series-dataset.html.inc: script/energyEventsTableHTML5.sh
# Update when the consolidated data has been.
.work/inc/.energy-series-dataset.html.inc: data/.flags/consolidated.flag
# Update based on a few of the key scripts.
.work/inc/.energy-series-dataset.html.inc: script/updateConsolidatedEnergyOutputs.sh
.work/inc/.energy-series-dataset.html.inc: script/sonifyConsolidatedEnergyOutputs.sh
.work/inc/.energy-series-dataset.html.inc: script/energyTableHTML.sh
.work/inc/.energy-series-dataset.html.inc: .work/script/recentEGUse.sh
.work/inc/.energy-series-dataset.html.inc: data/.private/WeeklyMeterReadings.csv
# System setting dependencies.
.work/inc/.energy-series-dataset.html.inc: script/energySystemsSettingsSummary.sh
.work/inc/.energy-series-dataset.html.inc: $(THISYEARELECTCO2)
.work/inc/.energy-series-dataset.html.inc: \
    data/consolidated/softparams.txt \
    data/heatBattery/16WWDHW/storage-charge-pref-by-hour-local-time.csv
# Rebuild!
.work/inc/.energy-series-dataset.html.inc: \
		script/gen-energy-series-dataset-inc.sh
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@sh script/gen-energy-series-dataset-inc.sh -subpages > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp
.work/inc/.energy-series-dataset.html.inc.m: \
		script/gen-energy-series-dataset-inc.sh
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@sh script/gen-energy-series-dataset-inc.sh -lite -subpages > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp
.work/inc/.energy-series-dataset.html.inc.o: \
		script/gen-energy-series-dataset-inc.sh
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@sh script/gen-energy-series-dataset-inc.sh -offline -subpages > $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp

# Ensure that pages with auto-embedded energy stats are occasionally rebuilt.
# This allows such content to stay fresh.
# Includes all pages containing the (header) ID "Auto-Energy-Stats".
# This does not apply to 'lite' pages, which generally do not have this.
# Only do this rebuild, monthly or if critical files change, if VHIGH.
# Builds atomically without the need for locks.
AUTOENERGYSTATSMK=$(WORKTMP)/AutoEnergyStats.mk
.PHONY: AutoEnergyStats
AutoEnergyStats::
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
AutoEnergyStats:: $(AUTOENERGYSTATSMK)
pages:: $(AUTOENERGYSTATSMK)
$(AUTOENERGYSTATSMK): makefile $(wildcard $(MONTHLY))
	@echo "Building $@"
	@T=$@.$$$$.tmp; \
	    egrep -l Auto-Energy-Stats $(PAGES) | awk '{print $$1": $$(wildcard $$(MONTHLY)) data/consolidated/events.csv script/energyStatsInsertHTML5.sh"; print "AutoEnergyStats:: "$$1;}' > $$T; \
	    chmod -f u+w $@; \
	    mv -f $$T $@; \
	    chmod og-rwx $@
-include $(AUTOENERGYSTATSMK)
endif


# Build all glossary special includes.
all:: .work/inc/.glossary.html.inc
GLOSSARYDB=db.glossary
.work/inc/.glossary.html.inc: .work/script/gen-glossary-inc $(GLOSSARYDB) \
     .work/script/wrap_subpage_complete.sh .work/script/wrap_subpage.sh
	@.work/script/gen-glossary-inc -d
.work/inc/.glossary.html.inc.m: .work/script/gen-glossary-inc $(GLOSSARYDB) \
     .work/script/wrap_subpage_complete.sh .work/script/wrap_subpage.sh
	@.work/script/gen-glossary-inc -m
.work/inc/.glossary.html.inc.o: .work/script/gen-glossary-inc $(GLOSSARYDB) \
     .work/script/wrap_subpage_complete.sh .work/script/wrap_subpage.sh
	@.work/script/gen-glossary-inc -o
# Make sure that the full generated HTML depends on the include depends on DB.
glossary.html: .work/inc/.glossary.html.inc
m/glossary.html: .work/inc/.glossary.html.inc.m
$(OFFLINEDIR)/glossary.html: .work/inc/.glossary.html.inc.o


#---- SITEMAPS AND PINGS
# Try to ensure that in a 'make all' sitemaps and pings
# happen after pages (and compressed variants) are built
# so that a crawler coming along in response to (say) a ping
# doesn't find the old page still in place.
# Do this by having their all:: triggers after those for (eg) scpages.
#---- SITEMAPS AND PINGS

# XML www-site sitemap with update times (for generated HTML files).
# Main site: core pages (at all static compression levels) + auto-updated.
# Used to uses source file timestamp to ignore pure style changes.
# Replaces /index.html with / to avoid the SE dealing with the indirection.
# Eliminates explicit 'NOINDEX' pages.
# This takes time in page edit cycle to update; do for canonical pages only.
# Dependent on all versions of pages (ie SC) to avoid races.
# Note: not clear if indexable utility pages (eg about-us) should be included.
sitemap.xml: makefile $(SCPAGES) $(OTHERSCPAGES) $(NONAMPFILELIST) \
		script/get_article_title
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@echo>$@.tmp '<?xml version="1.0" encoding="utf-8"?>'
	@echo>>$@.tmp '<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9 http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd">'
	@for f in $(URLLISTEXT); do \
	    echo '<url><loc>'$(URLLISTPREFIX)$$f'</loc><changefreq>hourly</changefreq></url>'; \
	    done >>$@.tmp
	@for f in $(PAGES) $(OTHERPAGES); do \
	    if egrep -q '<!-- *NOINDEX *-->' .$$f; then continue; fi; \
	    n=$$f; if [ "index.html" = "$$n" ]; then n=""; fi; \
	    printf '<url>'; \
            timestampf=$$f; \
            updated="`date -r$$timestampf -u +'%Y-%m-%d'`"; \
            printf '<loc>%s</loc><lastmod>%s</lastmod>' "$(URLLISTPREFIX)$$n" "$$updated"; \
            printf '<xhtml:link rel="alternate" media="only screen and (max-width: 640px)" href="%s"/>' "$(MURLLISTPREFIX)$$n"; \
            echo '</url>'; \
	    done | (export LC_ALL=C; sort) >>$@.tmp
	@echo>>$@.tmp '</urlset>'
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.lock $@.tmp
all:: sitemap.xml

# Atom 1.0 sitemap with most recent new/updated main pages.
# Should be fast to rebuild.
# Should be small and fast to download, especially when fetched gzipped.
# Should be few enough to comfortably 'open all in tabs'.
# Should be large enough not to miss updates, eg if crawled daily.
# Eliminates explicit 'NOINDEX' pages.
# Format: https://webmasters.googleblog.com/2014/10/best-practices-for-xml-sitemaps-rssatom.html
#     <?xml version="1.0" encoding="utf-8"?>
#     <feed xmlns="http://www.w3.org/2005/Atom">
#     <entry><link href="http://example.com/mypage"/><updated>2011-06-27T19:34:00+01:00</updated></entry>
#     </feed>
# Used to use source file timestamp to ignore pure style changes.
# Only for the main canonical pages.
# The most recent entry should be first.
# A tiny bit of (very compressable) semi-human-readable sugar is added.
# Does not cover index.html or any OTHERPAGES.
# Dependent on canonical core pages only (incl pre-compressed) for speed.
# DHD20181007: experimentally adding pgdescription as entry summary.
feeds: sitemap.atom
sitemap.atom: makefile $(SCWPAGES) \
		script/get_article_title \
		.work/script/pgdescription
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@echo>$@.tmp '<?xml version="1.0" encoding="utf-8"?>'
	@echo>>$@.tmp '<feed xmlns="http://www.w3.org/2005/Atom">'
	@echo>>$@.tmp '<title>Earth Notes Basic Feed</title>'
	@echo>>$@.tmp '<updated>'`date -u +'%Y-%m-%dT%H:%M:%SZ'`'</updated>'
	@echo>>$@.tmp '<author><name>Damon Hart-Davis</name></author>'
	@echo>>$@.tmp '<id>$(URLLISTPREFIX)$@</id>'
	@echo>>$@.tmp '<link href="$(URLLISTPREFIX)$@" rel="self" type="application/atom+xml"/>'
	@for f in $(PAGES); do \
	    if egrep -q '<!-- *NOINDEX *-->' .$$f; then continue; fi; \
	    n=$$f; \
            if [ "index.html" = "$$n" ]; then continue; fi; \
	    title="`sh script/get_article_title.sh -noent < .$$f`"; \
	    desc="`.work/script/pgdescription <.$$f`"; \
            timestampf=$$f; \
            updated="`date -r$$timestampf -u +'%Y-%m-%dT%H:%M:%SZ'`"; \
	    printf '<entry><updated>%s</updated><title>%s</title><summary>%s</summary><link href="%s"/><id>%s</id></entry>' "$$updated" "$$title" "$$desc" "$(URLLISTPREFIX)$$n" "$(URLLISTPREFIX)$$n"; \
	    echo ''; \
	    done | sort -r | head -11 >>$@.tmp
	@echo>>$@.tmp '</feed>'
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.lock $@.tmp
all:: sitemap.atom

# Atom 1.0 feed of most-recently-updated social-media-friendly pages.
# Such articles are tagged with EASYREAD.
# Sorted so that the most-recently-updated items are first.
# Should be few enough to comfortably 'open all in tabs'.
# Uses HTML source files' timestamp to ignore pure style changes.
# Only for the canonical pages.
# Attempts to generate a human-friendly feed with titles, etc, but quickly.
# Depends on the makefile-defined list of pages, and page timestamps/content.
# Dependent on canonical core pages only (incl pre-compressed) for speed.
# Easyread feed can usefully be updated ~daily.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
ifeq ($(wildcard $(GRIDRED1DFLAG)),)
feeds: rss/easyread.atom
rss/easyread.atom: $(EASYREAD) \
   $(PAGES) $(SCWPAGES) | ${PAGESVALID} $(EASYREADBUILT)
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@echo>$@.tmp '<?xml version="1.0" encoding="utf-8"?>'
	@echo>>$@.tmp '<feed xmlns="http://www.w3.org/2005/Atom">'
	@echo>>$@.tmp '<title>Easyread Update Feed</title>'
	@echo>>$@.tmp '<updated>'`date -u +'%Y-%m-%dT%H:%M:%SZ'`'</updated>'
	@echo>>$@.tmp '<author><name>Damon Hart-Davis</name></author>'
	@echo>>$@.tmp '<id>$(URLLISTPREFIX)$@</id>'
	@echo>>$@.tmp '<link href="$(URLLISTPREFIX)$@" rel="self" type="application/atom+xml"/>'
	@for f in `sed < $(EASYREAD) -e 's/^[.]//'`; do \
	    if egrep -q '<!-- *NOINDEX *-->' .$$f; then continue; fi; \
	    title="`sh script/get_article_title.sh -noent < .$$f`"; \
	    n=$$f; if [ "index.html" = "$$n" ]; then n=""; fi; \
	    echo '<entry><updated>'`date -r.$$f -u +'%Y-%m-%dT%H:%M:%SZ'`'</updated><title>'$$title'</title><link href="'$(URLLISTPREFIX)$$n'"/><id>'$(URLLISTPREFIX)$$n'</id></entry>'; \
	    done | sort -r | head -25 >>$@.tmp
	@echo>>$@.tmp '</feed>'
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.lock $@.tmp
all:: rss/easyread.atom
endif
endif

# Atom 1.0 feed of most-recently updated data files.
# Could use time window >1M to catch all files updated/summarised each month.
# However, Googlevot seems to visit ~daily unprompted, so shorter window OK.
# Good to have (gzip -6) result fit in a single (1460-MTU) frame.
# Rebuild needs to be force-driven by something that changes approx daily.
# Excludes any files/directories starting with a dot (private).
# Excludes any files with interesting chars (eg whitespace).
# Sorted so that the most-recently-updated items are first.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
ifeq ($(wildcard $(GRIDRED1DFLAG)),)
feeds: rss/datafeed.atom
rss/datafeed.atom: $(wildcard $(DAILY)) makefile data
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@echo>$@.tmp '<?xml version="1.0" encoding="utf-8"?>'
	@echo>>$@.tmp '<feed xmlns="http://www.w3.org/2005/Atom">'
	@echo>>$@.tmp '<title>Earth Notes Data Feed</title>'
	@echo>>$@.tmp '<updated>'`date -u +'%Y-%m-%dT%H:%M:%SZ'`'</updated>'
	@echo>>$@.tmp '<author><name>Damon Hart-Davis</name></author>'
	@echo>>$@.tmp '<id>$(URLLISTPREFIX)$@</id>'
	@echo>>$@.tmp '<link href="$(URLLISTPREFIX)$@" rel="self" type="application/atom+xml"/>'
	@for f in `find data -not \( -name '.?*' -prune \) -a -type f -mtime -8 -print | egrep '^[-_/.a-zA-Z0-9]*$$'`; do \
	    echo '<entry><updated>'`date -r$$f -u +'%Y-%m-%dT%H:%M:%SZ'`'</updated><title>'$$f'</title><link href="'$(URLLISTPREFIX)$$f'"/><id>'$(URLLISTPREFIX)$$f'</id></entry>'; \
	    done | sort -r >>$@.tmp
	@echo>>$@.tmp '</feed>'
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.lock $@.tmp
all:: rss/datafeed.atom
endif
endif

# Ping key search engines with sitemaps.
# eg see:
#     https://www.bing.com/webmaster/help/how-to-submit-sitemaps-82a15bd4
SEARCHENGINEPINGS=\
    https://www.google.com/webmasters/sitemaps/ping?sitemap=

# DHD20220301: Yandex removed temporarily due to world events.
#    https://webmaster.yandex.com/ping?sitemap=
# DHD202212: Bing's connection seems dead as of 2021-12 with 410 Gone:
#    https://www.bing.com/ping?sitemap=

# Ping feedburner with Atom feed.
FEEDBURNERPINGS=\
    https://feedburner.google.com/fb/a/pingSubmit?bloglink=http%3A%2F%2Ffeeds.feedburner.com%2Fearthnotesbasicfeed

# DHD20170804: updated Bing ping URL from:
#     http://www.bing.com/webmaster/ping.aspx?siteMap=
# to:
#     http://www.bing.com/ping?sitemap=
# because the latter is specified in the Bing docs.

# DHD20170731: updated yandex ping URL from:
#     http://blogs.yandex.ru/pings/?status=success&url=
# to:
#     https://yandex.ru/blogs/pings?status=success&url=
# because of observed 301 redirect from former to latter.


# Selectively rebuild just stats pages with live data...
stats:: graphs \
    _off-grid-stats.html \
    _live-grid-tie-stats.html
all:: stats


# Files for 16WW OpenTRV device IDs and names.
OTDeviceIDs= .work/16Devices.ID.txt
OTDeviceShortnames= .work/16Devices.name.txt
OTSensorIDs= .work/16Sensors.ID.txt
OTSensorShortnames= .work/16Sensors.name.txt
OTValveIDs= .work/16Valves.ID.txt
OTValveShortnames= .work/16Valves.name.txt
OTIDs= \
    $(OTDeviceIDs) $(OTDeviceShortnames) \
    $(OTSensorIDs) $(OTSensorShortnames) \
    $(OTValveIDs) $(OTValveShortnames)


# Source live grid-tie PV log directory.
LIVEGTPVDIR=/var/log/SunnyBeam
# File touched for each update.
LIVEGTLASTDATA=${LIVEGTPVDIR}/LASTDATA.flag

# Live grid-tie stats.
# Don't force the calendar-month graph to update here.
_live-grid-tie-stats.html: script/gridtie_PV_power \
			  out/hourly/gridTiePower.png \
			  $(wildcard ${LIVEGTLASTDATA})
	@echo "Building live grid-tie stats page $@  ...  `cat $(LIVEGTLASTDATA)`"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@script/gridtie_PV_power > $@.tmp
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.lock

# Only redraw the grid-tie PV graph if there is new sample data.
out/hourly/gridTiePower.png: graphing/gridtiegen \
               graphing/gnuplotgridtiegen.txt \
	       $(wildcard ${LIVEGTLASTDATA})
	@echo "Building grid-tie PV generation graph $@"
	@$(LOCKFILENRSTD) $@.lock
	@graphing/gridtiegen > .work/gridtiegen.dat
	@gnuplot graphing/gnuplotgridtiegen.txt
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q -o1 $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f .work/gridtiegen.dat
	@script/build_alternate_format_compact_image $@ $@.webp
	@/bin/rm -f $@.lock

# Calendar-month grid-tie PV graph.
# Relatively expensive.
out/daily/gridTiePowercm.png: \
	       .work/gatherLastMonth \
               graphing/gnuplotgridtiegencm.txt \
	       $(wildcard ${LIVEGTLASTDATA})
	@echo "Building grid-tie monthly PV generation graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@.work/gatherLastMonth ${LIVEGTPVDIR} > .work/gridTiePower-cm.dat
	@gnuplot graphing/gnuplotgridtiegencm.txt
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f .work/gridTiePower-cm.dat
	@/bin/rm -f $@.lock
	
# Ugly bit of awk to propagate prev values down from filterJSON when "-".
PROPDOWN=awk '{res="";for(i=1; i<=NF; ++i) { if($$i=="-"){if(prev[i]==""){prev[i]="-";} res=res prev[i];}else{res=res $$i;prev[i]=$$i;} res=res " ";} print res;}'

# Prepare a list of the most recent JSON 'remote' stats files.
# Update when the containing directory does.
STATSREMOTEDIR=data/OpenTRV/.private/stats/remote
# Last 8-ish JSON remote stats files, listed in date order.
STATSREMOTELAST8=${STATSREMOTEDIR}/last.8.txt
# Update quietly, atomically, without a lock.
${STATSREMOTELAST8}: ${STATSREMOTEDIR}
	@-chmod -f u+w $@
	@find data/OpenTRV/.private/stats/remote/ -name '202?????.json' -mtime -8 | sort | tail -8 > $@.$$$$.tmp && \
		chmod a+r $@.$$$$.tmp && mv $@.$$$$.tmp $@


# Graph recent OpenTRV sensor battery voltages.
out/monthly/16WWSensorPower.png: $(OTSensorIDs) $(OTSensorShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building sensor power stats graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- `cat $(OTSensorShortnames)`" > $@.dat.tmp
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -30 | sort | tail -30` | \
	    OpenTRV/scripts/filterJSON -multiID 'B|cV' `cat $(OTSensorIDs)` | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Battery'" \
	    -e "unit='V'" -e scale=0.01 -e "loff='0.25'" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@script/build_alternate_format_compact_image $@ $@.webp
	@/bin/rm -f $@.lock

# List recent OpenTRV sensor battery voltages.
# Daily should be enough to catch sensors about to fail.
# Energy harvesting may be a different kettle of fish.
out/daily/OpenTRV/16WWSensorPower.html: \
		OpenTRV/scripts/filterJSON \
		OpenTRV/scripts/capture16WWSensorPower \
		out/monthly/16WWSensorPower.png \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building sensor power stats summary $@"
	@mkdir -p $(@D) && chmod a+rx $(@D)
	@$(LOCKFILENRSLOW) $@.lock
	@OpenTRV/scripts/capture16WWSensorPower data/OpenTRV/.private/stats/remote > $@.tmp
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.lock

# Graph recent RH%
out/weekly/16WWRH.png: $(OTValveIDs) $(OTValveShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
                ${STATSREMOTELAST8}
	@echo "Building 16WW RH% graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- $$(cat $(OTSensorShortnames))" > $@.dat.tmp
	@cat /dev/null $$(cat ${STATSREMOTELAST8}) | \
	    OpenTRV/scripts/filterJSON -multiID 'H|%' $$(cat $(OTSensorIDs)) | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Rel Humidity'" \
	    -e "unit='%%'" -e "loff='0.1'" \
	    -e "ncol=$$(cat $(OTSensorShortnames) | wc -w)" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@/bin/rm -f $@.lock

ifeq ($(wildcard $(PWRLOWFLAG)),)
# Do not update the boiler control graph if battery state is LOW.
stats:: out/hourly/16WWbc.png
stats:: out/weekly/16WWbc.png
endif
# Graph recent boiler controller output/state.
# Update frequently driven by updated.JSON.flag
out/hourly/16WWbc.png: $(OTDeviceIDs) $(OTDeviceShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
                ${STATSREMOTELAST8} \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building 16WW boiler control graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- $$(cat $(OTDeviceShortnames))" > $@.dat.tmp
	@cat /dev/null $$(tail -2 ${STATSREMOTELAST8}) | \
	    OpenTRV/scripts/filterJSON -multiID 'b' $$(cat $(OTDeviceIDs)) | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Boiler Call'" \
	    -e "unit=''" -e "loff='0.1'" \
	    -e "ncol=$$(cat $(OTDeviceShortnames) | wc -w)" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@/bin/rm -f $@.lock
# Update about daily driven by ${STATSREMOTELAST8}.
out/weekly/16WWbc.png: $(OTDeviceIDs) $(OTDeviceShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
                ${STATSREMOTELAST8}
	@echo "Building 16WW boiler control graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- $$(cat $(OTDeviceShortnames))" > $@.dat.tmp
	@cat /dev/null $$(cat ${STATSREMOTELAST8}) | \
	    OpenTRV/scripts/filterJSON -multiID 'b' $$(cat $(OTDeviceIDs)) | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Boiler Call'" \
	    -e "unit=''" -e "loff='0.1'" \
	    -e "ncol=$$(cat $(OTDeviceShortnames) | wc -w)" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@/bin/rm -f $@.lock

# Graph some more sensitive data manually: originally for valve monitoring 2018Q4!
# out/monthly/16WWal.png
# out/weekly/16WWtT.png

.PHONY: graphs-vm
graphs-vm:: \
	out/hourly/16WWvpc.png \
	out/hourly/16WWmultisensortempL.png \
	out/hourly/16WWbc.png
.PHONY: Pgvm
Pgvm:
	$(MAKE) -j4 -k graphs-vm

# Graph some more sensitive data manually.
# Only run infrequently / manually for privacy!
.PHONY: graphs-manual

# Plot the most recent Local Bytes monitoring plug data.
graphs-manual:: out/tmp/LocalBytes-monitoring-plug.svg
out/tmp/LocalBytes-monitoring-plug.svg: data/.private/tmp \
		graphing/gnuplot-LB-power-plot-v2p1.txt \
		graphing/gnuplot-LB-power-plot-v2p1-powerExtractZ.sh
	@echo "Building Local Bytes recent data as $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.dat $@
	@F="`ls -1 data/.private/tmp/LBplug-20??-??-??.log | tail -1`" && \
	    test -s "$$F" && echo Input data $$F && \
	    sh graphing/gnuplot-LB-power-plot-v2p1-powerExtractZ.sh < $$F > $@.dat && \
		gnuplot -e "infilename='$@.dat'" -e "outfilename='$@'" graphing/gnuplot-LB-power-plot-v2p1.txt
	@chmod a+r $@.dat $@
	@/bin/rm -f $@.lock


# Graph recent valve positions.
graphs-manual:: out/hourly/16WWvpc.png
out/hourly/16WWvpc.png: $(OTValveIDs) $(OTValveShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
                ${STATSREMOTELAST8} \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building 16WW valve positions $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- $$(cat $(OTValveShortnames))" > $@.dat.tmp
	@cat /dev/null $$(tail -2 ${STATSREMOTELAST8}) | \
	    OpenTRV/scripts/filterJSON -multiID 'v|%' $$(cat $(OTValveIDs)) | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Valve Open'" \
	    -e "unit='%%'" -e "loff='0.5'" \
	    -e "ncol=`cat $(OTValveShortnames) | wc -w`" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@/bin/rm -f $@.lock

graphs-manual:: out/monthly/16WWvpc.png
out/monthly/16WWvpc.dat: out/monthly/16WWvpc.png
out/monthly/16WWvpc.png: $(OTValveIDs) $(OTValveShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building 16WW valve positions $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- `cat $(OTValveShortnames)`" > $@.dat.tmp
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -32 | sort | tail -31` | \
	    OpenTRV/scripts/filterJSON -multiID 'v|%' `cat $(OTValveIDs)` | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Valve Open'" \
	    -e "unit='%%'" -e "loff='0.5'" \
	    -e "ncol=`cat $(OTValveShortnames) | wc -w`" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@ out/monthly/16WWvpc.dat
	@-chmod -f 644 $@.tmp out/monthly/16WWvpc.dat
	@/bin/mv $@.tmp $@
	@/bin/mv $@.dat.tmp out/monthly/16WWvpc.dat
	@chmod a+r,a-wx $@ out/monthly/16WWvpc.dat
	@/bin/rm -f $@.dat.tmp $@.tmp
	@script/build_alternate_format_compact_image $@ $@.webp
	@/bin/rm -f $@.lock


# Graph recent valve positions.
# Only run infrequently / manually for privacy!
graphs-manual:: out/weekly/16WWvpc.png
out/weekly/16WWvpc.png: $(OTValveIDs) $(OTValveShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building 16WW valve positions $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- `cat $(OTValveShortnames)`" > $@.dat.tmp
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -8 | sort | tail -8` | \
	    OpenTRV/scripts/filterJSON -multiID 'v|%' `cat $(OTValveIDs)` | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Valve Open'" \
	    -e "unit='%%'" -e "loff='0.5'" \
	    -e "ncol=`cat $(OTValveShortnames) | wc -w`" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@/bin/rm -f $@.lock

# Graph recent valve cumulative movement.
# Only run infrequently / manually for privacy!
graphs-manual:: out/weekly/16WWvpcC.png
out/weekly/16WWvpcC.png: $(OTValveIDs) $(OTValveShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building 16WW valve cumulative movement $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- `cat $(OTValveShortnames)`" > $@.dat.tmp
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -8 | sort | tail -8` | \
	    OpenTRV/scripts/filterJSON -multiID 'vC|%' `cat $(OTValveIDs)` | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Valve Open'" \
	    -e "unit='%%'" \
	    -e "ncol=`cat $(OTValveShortnames) | wc -w`" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@/bin/rm -f $@.lock

# Graph recent target temperatures.
# Only run infrequently / manually for privacy!
graphs-manual:: out/weekly/16WWtT.png
out/weekly/16WWtT.png: $(OTValveIDs) $(OTValveShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building 16WW valve target temperature graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- `cat $(OTValveShortnames)`" > $@.dat.tmp
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -8 | sort | tail -8` | \
	    OpenTRV/scripts/filterJSON -multiID 'tT|C' `cat $(OTValveIDs)` | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Temp Target'" \
	    -e "unit='C'" -e "loff='0.05'" \
	    -e "ncol=`cat $(OTValveShortnames) | wc -w`" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@/bin/rm -f $@.lock

# Graph recent setback temperatures.
# Only run infrequently / manually for privacy!
graphs-manual:: out/weekly/16WWtS.png
out/weekly/16WWtS.png: $(OTValveIDs) $(OTValveShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building 16WW valve setback temperature graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- `cat $(OTValveShortnames)`" > $@.dat.tmp
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -8 | sort | tail -8` | \
	    OpenTRV/scripts/filterJSON -multiID 'tS|C' `cat $(OTValveIDs)` | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Temp Setback'" \
	    -e "unit='C'" -e "loff='0.02'" \
	    -e "ncol=`cat $(OTValveShortnames) | wc -w`" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@/bin/rm -f $@.lock

# Graph recent vacancy.
# Only run infrequently / manually for privacy!
graphs-manual:: out/monthly/16WWvac.png
out/monthly/16WWvac.png: $(OTSensorIDs) $(OTSensorShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building 16WW vacancy graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- `cat $(OTSensorShortnames)`" > $@.dat.tmp
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -8 | sort | tail -8` | \
	    OpenTRV/scripts/filterJSON -multiID 'vac|h' `cat $(OTSensorIDs)` | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Vacancy'" \
	    -e "unit='h'" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@script/build_alternate_format_compact_image $@ $@.webp
	@/bin/rm -f $@.lock

# Graph recent occupancy.
# Only run infrequently / manually for privacy!
graphs-manual:: out/weekly/16WWocc.png
out/weekly/16WWocc.png: $(OTSensorIDs) $(OTSensorShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
                ${STATSREMOTELAST8}
	@echo "Building 16WW occupancy graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- $$(cat $(OTSensorShortnames))" > $@.dat.tmp
	@cat /dev/null $$(cat ${STATSREMOTELAST8}) | \
	    OpenTRV/scripts/filterJSON -multiID 'O' $$(cat $(OTSensorIDs)) | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Occupancy'" \
	    -e "loff='0.02'" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@/bin/rm -f $@.lock

# Graph recent ambient light levels.
# Only run infrequently / manually for privacy!
graphs-manual:: out/weekly/16WWal.png
out/weekly/16WWal.png: $(OTSensorIDs) $(OTSensorShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building 16WW ambient light graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- `cat $(OTSensorShortnames)`" > $@.dat.tmp
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -8 | sort | tail -8` | \
	    OpenTRV/scripts/filterJSON -multiID 'L' `cat $(OTSensorIDs)` | \
	    $(PROPDOWN) >> $@.dat.tmp
	@wc -l $@.dat.tmp; head -1 $@.dat.tmp; tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Light'" \
	    -e "loff='1'" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@/bin/rm -f $@.lock

# Graph recent ambient light levels deltas.
# Only run infrequently / manually for privacy!
graphs-manual:: out/weekly/16WWald.png
out/weekly/16WWald.png: $(OTSensorIDs) $(OTSensorShortnames) \
		OpenTRV/scripts/filterJSON \
		graphing/gnuplot16WWGenericMultisensorPlot.txt \
    		$(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag)
	@echo "Building 16WW ambient light delta graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.dat.tmp $@.dat.tmp.png $@.tmp
	@echo "- `cat $(OTSensorShortnames)`" > $@.dat.tmp
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -8 | sort | tail -8` | \
	    OpenTRV/scripts/filterJSON -multiID 'L' `cat $(OTSensorIDs)` | \
	    $(PROPDOWN) | \
	    awk '{printf("%s ",$$1);for(i=2;i<=NF;++i){printf("%d ",$$i-p[i]);p[i]=$$i;}print""}' >> $@.dat.tmp
	@wc -l $@.dat.tmp; tail -10 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='Light delta'" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.dat.tmp $@.tmp
	@/bin/rm -f $@.lock

# Logger data directory
#LOGDIR=/local/k8055/data
# File for latest sample data (ie changes when there is new data).
LASTDAT=/run/LASTDATA.flag
# Directory containing historical summary data...
#HISTDIR=/local/k8055/summary


# Construct the live stats page (and replace atomically).
# This has to be quick and efficient as it may be invoked frequently.
# Only redraw the battery graph if there is new sample data.
# Uses locking to avoid corruption if two makes get started concurrently.
# Don't force the calendar-month or all-time graphs to update here.
# Don't force the derived data (PVEOalldailykWh+smoothed.dat) to update here.
_off-grid-stats.html: script/est_power \
		     out/hourly/battV.png \
                     $(wildcard $(LASTDAT))
	@echo "Building live stats page $@  ...  `cat $(LASTDAT)`"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $@.tmp
	@script/est_power > $@.tmp
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.lock
ifeq ($(wildcard $(PWRLOWFLAG)),)
# When power is not LOW then update the data file.
_off-grid-stats.html: out/daily/PVEOalldailykWh+smoothed.dat
endif
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# Encourage some timely underlying updates when HIGH.
_off-grid-stats.html: out/daily/battV-cm.png
_off-grid-stats.html: out/monthly/EOall-tn.png out/monthly/EOall.png
_off-grid-stats.html: out/daily/PVEOalldailykWh+smoothed.dat
endif

# Only redraw the battery graph if there is new sample data.
# Wildcard the LASTDAT file to allow make to run on non-server machine,
# eg to allow off-line test builds on a laptop.
out/hourly/battV.png: graphing/battVoltsForDateRPi \
               graphing/gnuplotBattVRPi.txt \
	       $(wildcard $(LASTDAT))
	@echo "Building battery voltage graph $@"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f .work/battV.dat $@.tmp $@.tmp.bak
	@graphing/battVoltsForDateRPi > .work/battV.dat
	@gnuplot graphing/gnuplotBattVRPi.txt
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q -o1 $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f .work/battV.dat $@.tmp
	@/bin/rm -f $@.lock

# Only redraw the calendar-month battery graph with new data.
# Relatively expensive.
# Depends on live data so could be driven by $(LASTDAT),
# but $(DAILY) once-per-day update more reasonable.
# Wildcard the actual log file to allow make to run on non-server machine,
# eg to allow off-line test builds on a laptop.
BATTVLOGDIR=/var/log/powermng
out/daily/battV-cm.png: \
		.work/gatherLastMonth \
		graphing/gnuplotBattVcm.txt \
		$(wildcard $(DAILY))
	@echo "Building calendar-month battery voltage graph $@"
	@$(LOCKFILENRSLOW) $@.lock
	@.work/gatherLastMonth ${BATTVLOGDIR} > .work/battV-cm.dat
	@gnuplot graphing/gnuplotBattVcm.txt
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f .work/battV-cm.dat
	@/bin/rm -f $@.lock


# Cached per-calendar-month filtered battV data location.
BATTVCMCACHEDIR=/tmp/battVcmCache
# Build a partial -cm data based on the data in the 'month' subdir.
# Rely on the month directory being touched on each new data point added within.
$(BATTVCMCACHEDIR)/20%/battV-cm.dat: $(LOGDIR)/20%/
	@echo "Building calendar-month battery voltage sample $@"
	@$(LOCKFILENRSLOW) $@.lock
	@mkdir -p $(@D)
	@ls -1 $(@D:$(BATTVCMCACHEDIR)%=$(LOGDIR)%)/??/????.dat 2>/dev/null | \
	    awk -F/ '{ print $$(NF-3)"/"$$(NF-2)"/"$$(NF-1), $$NF }' | \
	    sort -u | \
	    awk -f graphing/battVoltsCondensed.awk \
	        LOGDIR=$(LOGDIR) \
	        PERIOD=`echo $(@D) | awk -F/ '{print $$(NF-1)"/"$$NF}'` \
		> $@.tmp
	@mv $@.tmp $@
	@/bin/rm -f $@.lock

# Only redraw the heat battery target graph if there is new sample data.
# Wildcard the LASTDAT file to allow make to run on non-server machine,
# eg to allow off-line test builds on a laptop.
HEATBATTARGETLOGDIR=data/heatBattery/log/live/
HEATBATTARGETLOGTODAY=$(HEATBATTARGETLOGDIR)/$(UTCYEAR)$(UTCMONTH)$(UTCDAY).log
out/hourly/heatBatTarget-tn.png: out/hourly/heatBatTarget.png
out/hourly/heatBatTarget.png: graphing/gnuplotHeatBatTarget.txt \
	       $(wildcard $(HEATBATTARGETLOGTODAY))
	@echo "Building heat battery target graph $@ and $(@:%.png=%-tn.png)"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f .work/_heatbatterytarget.log \
		out/tmp/_heatbatterytarget.tmp.png \
		out/tmp/_heatbatterytarget-tn.tmp.png
	@cat /dev/null `date -u --date yesterday +${HEATBATTARGETLOGDIR}/%Y%m%d.log` >> .work/_heatbatterytarget.log
	@cat $(HEATBATTARGETLOGTODAY) >> .work/_heatbatterytarget.log
	@tail -3 .work/_heatbatterytarget.log
	@gnuplot graphing/gnuplotHeatBatTarget.txt
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q -o1 out/tmp/_heatbatterytarget.tmp.png; fi
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q -o1 out/tmp/_heatbatterytarget-tn.tmp.png; fi
	@-chmod -f u+w $@
	@chmod -f 644 out/tmp/_heatbatterytarget.tmp.png
	@/bin/mv out/tmp/_heatbatterytarget.tmp.png $@
	@chmod a+r,a-wx $@
	@-chmod -f u+w $(@:%.png=%-tn.png)
	@chmod -f 644 out/tmp/_heatbatterytarget-tn.tmp.png
	@/bin/mv out/tmp/_heatbatterytarget-tn.tmp.png $(@:%.png=%-tn.png)
	@chmod a+r,a-wx $(@:%.png=%-tn.png)
	@/bin/rm -f .work/_heatbatterytarget.log \
		out/tmp/_heatbatterytarget.tmp.png \
		out/tmp/_heatbatterytarget-tn.tmp.png
	@/bin/rm -f $@.lock
#ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# Do not update the thumbnail unless battery state is HIGH.
#stats:: out/hourly/heatBatTarget-tn.png
#endif
stats:: out/hourly/heatBatTarget.png

# Redraw detailed eheat (heat pump, diversion, boost) at most weekly.
# Does not show anything very recent for privacy reasons...
EDDIEHEATDAILYDIR=data/eddi/log
out/monthly/eheatDailyRecentDetailed.png: \
		graphing/gnuplotEHeatDetailed.txt \
		$(wildcard $(MONTHLY))
	@echo "Building detailed eheat input graph $@"
	@$(LOCKFILENRSTD) $@.lock
	@cp `ls -1 $(EDDIEHEATDAILYDIR)/20????.daily.csv | tail -1` .work/_eheatDetailed.tmp.csv
	@mkdir -p out/tmp
	@gnuplot graphing/gnuplotEHeatDetailed.txt
	@test -s out/tmp/_eheatDetailed.tmp.png
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q -o1 out/tmp/_eheatDetailed.tmp.png; fi
	@-chmod -f u+w $@
	@chmod -f 644 out/tmp/_eheatDetailed.tmp.png
	@/bin/mv out/tmp/_eheatDetailed.tmp.png $@
	@chmod a+r,a-wx $@
	@/bin/rm -f .work/_eheatDetailed.tmp.csv
	@/bin/rm -f $@.lock
	@script/build_alternate_format_compact_image $@ $@.webp
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# Do not make/update the graph unless battery state is HIGH.
out/monthly/eheatDailyRecentDetailed.png: $(wildcard $(WEEKLY))
out/monthly/eheatDailyRecentDetailed.png: $(EDDIEHEATDAILYDIR)
endif
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Force daily check/update if VHIGH.
stats:: out/monthly/eheatDailyRecentDetailed.png
out/monthly/eheatDailyRecentDetailed.png: $(wildcard $(DAILY))
endif
# Redraw all eheat (heat pump, diversion, boost) at most weekly.
# Does not show anything very recent for privacy reasons...
out/monthly/eheatDailyRecent.png: \
		graphing/gnuplotEHeat.txt \
		$(wildcard $(MONTHLY))
	@echo "Building eheat input graph $@"
	@$(LOCKFILENRSTD) $@.lock
	@cp `ls -1 $(EDDIEHEATDAILYDIR)/20????.daily.csv | tail -1` .work/_eheat.tmp.csv
	@mkdir -p out/tmp
	@gnuplot graphing/gnuplotEHeat.txt
	@test -s out/tmp/_eheat.tmp.png
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q -o1 out/tmp/_eheat.tmp.png; fi
	@-chmod -f u+w $@
	@chmod -f 644 out/tmp/_eheat.tmp.png
	@/bin/mv out/tmp/_eheat.tmp.png $@
	@chmod a+r,a-wx $@
	@/bin/rm -f .work/_eheat.tmp.csv
	@/bin/rm -f $@.lock
	@script/build_alternate_format_compact_image $@ $@.webp
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# Do not make/update the graph unless battery state is HIGH.
out/monthly/eheatDailyRecent.png: $(wildcard $(WEEKLY))
out/monthly/eheatDailyRecent.png: $(EDDIEHEATDAILYDIR)
endif
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Force daily check/update if VHIGH.
stats:: out/monthly/eheatDailyRecent.png
out/monthly/eheatDailyRecent.png: $(wildcard $(DAILY))
endif

# Redraw the recent heat battery daily input at most weekly.
# Does not show anything very recent for privacy reasons...
EDDIDIVERTDAILYDIR=data/eddi/log
out/monthly/heatBatteryInputDailyRecent.png: \
		graphing/gnuplotHeatBatDailyInput.txt \
		$(wildcard $(MONTHLY))
	@echo "Building heat battery input graph $@"
	@$(LOCKFILENRSTD) $@.lock
	@cp `ls -1 $(EDDIDIVERTDAILYDIR)/20????.daily.csv | tail -1` .work/_heatbatteryinput.tmp.csv
	@mkdir -p out/tmp
	@gnuplot graphing/gnuplotHeatBatDailyInput.txt
	@test -s out/tmp/_heatbatteryinput.tmp.png
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q -o1 out/tmp/_heatbatteryinput.tmp.png; fi
	@-chmod -f u+w $@
	@chmod -f 644 out/tmp/_heatbatteryinput.tmp.png
	@/bin/mv out/tmp/_heatbatteryinput.tmp.png $@
	@chmod a+r,a-wx $@
	@/bin/rm -f .work/_heatbatteryinput.tmp.csv
	@/bin/rm -f $@.lock
	@script/build_alternate_format_compact_image $@ $@.webp
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
# Do not make/update the graph unless battery state is HIGH.
out/monthly/heatBatteryInputDailyRecent.png: $(wildcard $(WEEKLY))
out/monthly/heatBatteryInputDailyRecent.png: $(EDDIDIVERTDAILYDIR)
endif
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Force daily check/update if VHIGH.
stats:: out/monthly/heatBatteryInputDailyRecent.png
out/monthly/heatBatteryInputDailyRecent.png: $(wildcard $(DAILY))
endif

# Selectively rebuild just the stats pages with live data...
# This is not part of the 'all' task as it is too expensive.
.PHONY: stats-all
stats-all:: stats \
    out/daily/OpenTRV/16WWSensorPower.html
all:: stats-all

# Yearly grid-tie-PV kWh/d CSV data files.
GTkWhCSV= \
    data/WW-PV-roof/E2008.csv \
    data/WW-PV-roof/E2009.csv \
    data/WW-PV-roof/E2010.csv \
    data/WW-PV-roof/E2011.csv \
    data/WW-PV-roof/E2012.csv \
    data/WW-PV-roof/E2013.csv \
    data/WW-PV-roof/E2014.csv \
    data/WW-PV-roof/E2015.csv \
    data/WW-PV-roof/E2016.csv \
    data/WW-PV-roof/E2017.csv \
    data/WW-PV-roof/E2018.csv \
    data/WW-PV-roof/E2019.csv \
    data/WW-PV-roof/E2020.csv \
    data/WW-PV-roof/E2021.csv \
    data/WW-PV-roof/E2022.csv \
    data/WW-PV-roof/E2023.csv \
    data/WW-PV-roof/E2024.csv \
    data/WW-PV-roof/E2025.csv \
    data/WW-PV-roof/E2026.csv

# Yearly off-grid-PV cumulative kWh/d CSV data files.
OGkWhCSV= \
    data/WW-PV-offgrid/EC2016.csv \
    data/WW-PV-offgrid/EC2017.csv \
    data/WW-PV-offgrid/EC2018.csv \
    data/WW-PV-offgrid/EC2019.csv \
    data/WW-PV-offgrid/EC2020.csv \
    data/WW-PV-offgrid/EC2021.csv \
    data/WW-PV-offgrid/EC2022.csv \
    data/WW-PV-offgrid/EC2023.csv \
    data/WW-PV-offgrid/EC2024.csv \
    data/WW-PV-offgrid/EC2025.csv \
    data/WW-PV-offgrid/EC2026.csv

# Yearly HDD12 vs (gas) kWh data files.
gasHDD12kWh= \
    out/monthly/16WW-date-HDD12-kWh-2016.csv \
    out/monthly/16WW-date-HDD12-kWh-2017.csv \
    out/monthly/16WW-date-HDD12-kWh-2018.csv \
    out/monthly/16WW-date-HDD12-kWh-2019.csv \
    out/monthly/16WW-date-HDD12-kWh-2020.csv
gaskWhvsHDD12= $(out/monthly/16WW-date-HDD12-kWh-%.csv=out/monthly/16WW-date-kWh-vs-HDD12-%.csv)

# Rebuild various graphs...
graphs graphs-infreq:: | out out/hourly out/daily out/weekly out/monthly out/yearly
all:: graphs
# Graphs to be updated typically hourly or more often.
# Also included graphs that are cheap to check and do not rebuild often.
.PHONY: graphs
graphs:: out/hourly/battV.png out/hourly/gridTiePower.png \
    out/hourly/16WWmultisensortemp.png out/hourly/16WWmultisensortempL.png \
    out/monthly/16WWMonthlyElectricityConsumption.png
# Graphs on a daily-or-less-frequent update.
# Auto-rebuild only when HIGH.
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
all:: graphs-infreq
endif
.PHONY: graphs-infreq
graphs-infreq:: \
    out/hourly/16WWSampleInternalTemp.png \
    out/daily/battV-cm.png \
    out/daily/B1T.png \
    out/daily/gridTiePowercm.png \
    out/weekly/16WWRH.png \
    out/monthly/16WWSensorPower.png \
    out/monthly/16WWMonthlyGasConsumption.png \
    out/monthly/16WWMonthlyElectricityConsumption.png \
    out/yearly/16WWYearlyGasConsumption.png \
    out/yearly/16WWYearlyElectricityConsumption.png \
    out/yearly/16WWYearlyEnergyCarbonFootprint.png \
    out/monthly/EOall.png out/monthly/EOall-tn.png \
    $(GTkWhCSV:data/WW-PV-roof/E%.csv=out/yearly/E%.png) \
    $(GTkWhCSV:data/WW-PV-roof/E%.csv=out/yearly/E%-tn.png) \
    out/monthly/Eall.png out/monthly/Eall-tn.png
# Some handy graphs for valve debugging, but not all appropriate to automate.
.PHONY: graphs-16WWcore
graphs-16WWcore: out/hourly/16WWmultisensortempL.png \
    out/weekly/16WWtS.png \
    out/weekly/16WWtT.png \
    out/weekly/16WWocc.png \
    out/weekly/16WWRH.png \
    out/weekly/16WWvpc.png \
    out/weekly/16WWvpcC.png \
    out/weekly/16WWbc.png

# Graphs to be run manually, not as part of 'all' or automatically.
# because slow/expensive or possible security hazard if not ad hoc.
graphs-manual::

# Ensure that the output directories are all present and HTTP-accessible.
# Generic output directory for generated items, especially graphs.
# Create if needed.
out out/hourly out/daily out/weekly out/monthly out/yearly:
	mkdir -p $@
	chmod a+rx $@
all:: out out/hourly out/daily out/weekly out/monthly out/yearly

# 16WW PV-generation for one year.
out/yearly/E%.png: data/WW-PV-roof/E%.csv \
		graphing/smoothPV2.awk \
		graphing/gnuplotPV.txt
	@echo "Updating one-year PV generation graph $@"
	@/bin/rm -f $@.tmp .work/E.dat .work/E.png
	@awk < $(@:out/yearly/E%.png=data/WW-PV-roof/E%.csv) > .work/E.dat -f graphing/smoothPV2.awk
	@gnuplot graphing/gnuplotPV.txt
	@if [ -x $(ZOPFLIPNG) ]; then \
	  $(ZOPFLIPNG) $(ZOPFLIPNGEXTFLAGS) .work/E.png $@.tmp2 && /bin/mv $@.tmp2 $@.tmp; \
	 elif [ -x $(OPTIPNG) ]; then \
	  $(OPTIPNG) -q -out $@.tmp .work/E.png; \
	 else \
	  /bin/mv .work/E.png $@.tmp; \
	 fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f .work/E.dat .work/E.png
	@script/build_alternate_format_compact_image $@ $@.webp
out/yearly/E%-tn.png: data/WW-PV-roof/E%.csv \
		graphing/smoothPV2.awk \
		graphing/gnuplotPV-tn.txt
	@echo "Updating one-year PV generation tn $@"
	@/bin/rm -f $@.tmp .work/E.dat .work/E.png
	@awk < $(@:out/yearly/E%-tn.png=data/WW-PV-roof/E%.csv) > .work/E.dat -f graphing/smoothPV2.awk
	@gnuplot graphing/gnuplotPV-tn.txt
	@if [ -x $(ZOPFLIPNG) ]; then \
	  $(ZOPFLIPNG) $(ZOPFLIPNGEXTFLAGS) .work/E.png $@.tmp2 && /bin/mv $@.tmp2 $@.tmp; \
	 elif [ -x $(OPTIPNG) ]; then \
	  $(OPTIPNG) -q -out $@.tmp .work/E.png; \
	 else \
	  /bin/mv .work/E.png $@.tmp; \
	 fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f .work/E.dat .work/E.png
	@script/build_alternate_format_compact_image $@ $@.webp

# 16WW PV generation collected data (+smoothing) for all time.
out/daily/PVEalldailykWh+smoothed.dat: $(GTkWhCSV) \
		graphing/smoothPV2.awk
	@echo "Updating collated PV generation data $@"
	@mkdir -p $(@D)
	@/bin/rm -f $@.tmp
	@cat $(GTkWhCSV) | awk -F, > $@.tmp -f graphing/smoothPV2.awk
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@tail -5 $@
out/monthly/Eall.png: \
                out/daily/PVEalldailykWh+smoothed.dat \
		graphing/gnuplotAllPV.txt
	@echo "Updating all-time PV generation graph $@"
	@/bin/rm -f $@.tmp
	@gnuplot graphing/gnuplotAllPV.txt
	@if [ -x $(ZOPFLIPNG) ]; then \
	  $(ZOPFLIPNG) $@.tmp $@.tmp2 && /bin/mv $@.tmp2 $@.tmp; \
	elif [ -x $(OPTIPNG) ]; then \
	  $(OPTIPNG) -q $(OPTIPNGLOTS) $@.tmp; \
	fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@script/build_alternate_format_compact_image $@ $@.webp
# Since the thumbnail may be used as a live avatar on other sites, compress max.
out/monthly/Eall-tn.png: \
		out/daily/PVEalldailykWh+smoothed.dat \
		graphing/smoothPV2.awk \
		graphing/gnuplotAllPV-tn.txt
	@echo "Updating all-time PV generation tn $@"
	@mkdir -p $(@D)
	@/bin/rm -f $@.tmp
	@gnuplot graphing/gnuplotAllPV-tn.txt
	@if [ -x $(ZOPFLIPNG) ]; then \
	  $(ZOPFLIPNG) $(ZOPFLIPNGEXTFLAGS) $@.tmp $@.tmp2 && /bin/mv $@.tmp2 $@.tmp; \
	elif [ -x $(OPTIPNG) ]; then \
	  $(OPTIPNG) -q $(OPTIPNGMAX) $@.tmp; \
	fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@script/build_alternate_format_compact_image $@ $@.webp

# 16WW off-grid PV generation for all time.
out/daily/PVEOalldailykWh+smoothed.dat: $(OGkWhCSV) \
		graphing/smoothPVOG.awk
	@echo "Updating collated off-grid PV generation data $@"
	@mkdir -p $(@D)
	@/bin/rm -f $@.tmp
	@cat $(OGkWhCSV) | awk -F, > $@.tmp -f graphing/smoothPVOG.awk
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@tail -5 $@
out/monthly/EOall.png: \
                out/daily/PVEOalldailykWh+smoothed.dat \
		graphing/gnuplotAllOGPV.txt
	@echo "Updating all-time off-grid PV generation graph $@"
	@/bin/rm -f $@.tmp
	@gnuplot \
		-e startdate="`awk '{print substr($$1,1,4)substr($$1,6,2)substr($$1,9,2);exit}' < out/daily/PVEOalldailykWh+smoothed.dat`" \
		-e enddate="`awk '{last=substr($$1,1,4)substr($$1,6,2)substr($$1,9,2)}END{print last}' < out/daily/PVEOalldailykWh+smoothed.dat`" \
		graphing/gnuplotAllOGPV.txt
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $(OPTIPNGLOTS) $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@script/build_alternate_format_compact_image $@ $@.webp
# Since the thumbnail may be used as a live avatar on other sites, compress max.
out/monthly/EOall-tn.png: \
		out/daily/PVEOalldailykWh+smoothed.dat \
		graphing/gnuplotAllPVOG-tn.txt
	@echo "Updating all-time off-grid PV generation tn $@"
	@mkdir -p $(@D)
	@/bin/rm -f $@.tmp
	@gnuplot graphing/gnuplotAllPVOG-tn.txt
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $(OPTIPNGLOTS) $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@script/build_alternate_format_compact_image $@ $@.webp


# Monthly gas consumption.
out/monthly/16WWMonthlyGasConsumption.png: \
                        data/16WWMonthlyGasConsumption.dat \
                        graphing/gnuplotG.txt
	@echo "Updating consumption graph $@"
	@/bin/rm -f $@.tmp $@.tmp2
	@gnuplot graphing/gnuplotG.txt
	@if [ -x $(ZOPFLIPNG) ]; then \
	  $(ZOPFLIPNG) $(ZOPFLIPNGEXTFLAGS) $@.tmp $@.tmp2 && /bin/mv $@.tmp2 $@.tmp; \
	elif [ -x $(OPTIPNG) ]; then \
	  $(OPTIPNG) -q $(OPTIPNGMAX) $@.tmp; \
	fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@script/build_alternate_format_compact_image $@ $@.webp

# Monthly electricity consumption.
out/monthly/16WWMonthlyElectricityConsumption.png: \
                        data/16WWMonthlyElectricityConsumption.dat \
                        graphing/gnuplotE.txt
	@echo "Updating consumption graph $@"
	@/bin/rm -f $@.tmp $@.tmp2
	@gnuplot graphing/gnuplotE.txt
	@if [ -x $(ZOPFLIPNG) ]; then \
	  $(ZOPFLIPNG) $(ZOPFLIPNGEXTFLAGS) $@.tmp $@.tmp2 && /bin/mv $@.tmp2 $@.tmp; \
	elif [ -x $(OPTIPNG) ]; then \
	  $(OPTIPNG) -q $(OPTIPNGMAX) $@.tmp; \
	fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@script/build_alternate_format_compact_image $@ $@.webp

# Yearly electricity consumption.
out/yearly/16WWYearlyElectricityConsumption.png: \
                        data/16WWYearlyElectricityConsumption.dat \
                        graphing/gnuplotEY.txt
	@echo "Updating consumption graph $@"
	@/bin/rm -f $@.tmp $@.tmp2
	@gnuplot graphing/gnuplotEY.txt
	@if [ -x $(ZOPFLIPNG) ]; then \
	  $(ZOPFLIPNG) $(ZOPFLIPNGEXTFLAGS) $@.tmp $@.tmp2 && /bin/mv $@.tmp2 $@.tmp; \
	elif [ -x $(OPTIPNG) ]; then \
	  $(OPTIPNG) -q $(OPTIPNGMAX) $@.tmp; \
	fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@script/build_alternate_format_compact_image $@ $@.webp

# Yearly gas consumption.
out/yearly/16WWYearlyGasConsumption.png: \
                        data/16WWYearlyGasConsumption.dat \
                        graphing/gnuplotGY.txt
	@echo "Updating consumption graph $@"
	@/bin/rm -f $@.tmp $@.tmp2
	@gnuplot graphing/gnuplotGY.txt
	@if [ -x $(ZOPFLIPNG) ]; then \
	  $(ZOPFLIPNG) $(ZOPFLIPNGEXTFLAGS) $@.tmp $@.tmp2 && /bin/mv $@.tmp2 $@.tmp; \
	elif [ -x $(OPTIPNG) ]; then \
	  $(OPTIPNG) -q $(OPTIPNGMAX) $@.tmp; \
	fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@script/build_alternate_format_compact_image $@ $@.webp

# Yearly footprint.
# Nominally updated yearly, viewed frequently eg on front page,
# so worth putting in a long of effort to optimise.
# zopflipng takes tens of seconds (RPi2) but saves a significant percentage.
# Generate a .png.webp alongside for content-negotiating newer browsers.
# DHD20251030: add SVG as secondary output, +.svgbr precompression.
# TODO: use svgo on server.
YECFBASE=out/yearly/16WWYearlyEnergyCarbonFootprint
$(YECFBASE).svg: $(YECFBASE).png
$(YECFBASE).png: \
                        data/16WWYearlyEnergyCarbonFootprint.dat \
                        graphing/gnuplotCY.txt
	@echo "Updating emissions graph $@ (and legacy $(YECFBASE).png)"
	@$(LOCKFILENRSTD) $@.lock
	@/bin/rm -f $(YECFBASE).svg.tmp $(YECFBASE).svgbr.tmp
	@/bin/rm -f $(YECFBASE).png.tmp $(YECFBASE).png.tmp2
	@gnuplot graphing/gnuplotCY.txt
	@echo INFO: TODO +++ svgo $(YECFBASE).svg.tmp
	@-chmod -f u+w $(YECFBASE).svg
	@chmod -f 644 $(YECFBASE).svg.tmp
	@/bin/mv $(YECFBASE).svg.tmp $(YECFBASE).svg
	@chmod a+r,a-wx $(YECFBASE).svg
	@brotli < $(YECFBASE).svg > $(YECFBASE).svgbr.tmp
	@-chmod -f u+w $(YECFBASE).svgbr
	@chmod -f 644 $(YECFBASE).svgbr.tmp
	@/bin/mv $(YECFBASE).svgbr.tmp $(YECFBASE).svgbr
	@chmod a+r,a-wx $(YECFBASE).svgbr
	@if [ -x $(ZOPFLIPNG) ]; then \
	  $(ZOPFLIPNG) $(ZOPFLIPNGEXTFLAGS) $(YECFBASE).png.tmp $(YECFBASE).png.tmp2 && /bin/mv $(YECFBASE).png.tmp2 $(YECFBASE).png.tmp; \
	elif [ -x $(OPTIPNG) ]; then \
	  $(OPTIPNG) -q $(OPTIPNGMAX) $(YECFBASE).png.tmp; \
	fi
	@-chmod -f u+w $(YECFBASE).png
	@chmod -f 644 $(YECFBASE).png.tmp
	@/bin/mv $(YECFBASE).png.tmp $(YECFBASE).png
	@chmod a+r,a-wx $(YECFBASE).png
	@script/build_alternate_format_compact_image $(YECFBASE).png $(YECFBASE).png.webp
	@/bin/rm -f $@.lock

# Show live/recent internal 16WW temperature at sample point.
# Based on the last few daily log files.
# Do a lock based on the gnuplot script since it uses a fixed output file.
out/hourly/16WWSampleInternalTemp.png: \
			OpenTRV/scripts/gnuplotLocalTemp.txt \
			data/OpenTRV/.private/stats/localtemp/updated.flag
	@$(LOCKFILENRSTD) OpenTRV/scripts/gnuplotLocalTemp.txt.lock
	@echo "Updating local temperature chart $@"
	@/bin/rm -f $@.tmp $@.tmp.bak samplelocaltemp.png
	@cat `find data/OpenTRV/.private/stats/localtemp/ -name '????????.log' -mtime -8 | sort` | tail -2000 | gnuplot OpenTRV/scripts/gnuplotLocalTemp.txt
	@mv samplelocaltemp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q -o1 $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@/bin/rm -f OpenTRV/scripts/gnuplotLocalTemp.txt.lock

# Graph battery temperature vs outside temperature over a week or so.
# Wildcard the actual log file to allow make to run on non-server machine,
# eg to allow off-line test builds on a laptop.
out/daily/B1T.png: \
            ${STATSREMOTELAST8} \
	    $(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag) \
	    $(wildcard $(LASTDAT))
	@$(LOCKFILENRSLOW) $@.lock
	@echo "Generating B1 vs outside temperature graph $@ ..."
	@/bin/rm -f $@.tmp $@.tmp.bak $@.dat.tmp $@.dat.tmp2
	@cat /dev/null $$(cat ${STATSREMOTELAST8}) | \
	    OpenTRV/scripts/filterJSON 'T|C16' 819C99B4B9BD84BB | \
	    awk '{print $$1, $$3/16, "-"}' \
	        >> $@.dat.tmp2
	@cat `find /var/log/powermng/ -name '????????.log' -mtime -8 | sort | tail -8` | \
	    awk '$$19 > -99 {ts=$$1; gsub("[/]", "-", ts); print ts, "-", $$19}' \
	        >> $@.dat.tmp2
	@echo "- 4o B1T" > $@.dat.tmp
	@sort < $@.dat.tmp2 | ${PROPDOWN} >> $@.dat.tmp
	@head -1 $@.dat.tmp
	@tail -1 $@.dat.tmp
	@gnuplot -e "filename='$@.dat.tmp'" -e "title='B1 temp vs out'" \
	    -e "unit='C'" -e "ncol='2'" \
	    graphing/gnuplot16WWGenericMultisensorPlot.txt
	@mv $@.dat.tmp.png $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv $@.tmp $@
	@chmod a+r,a-wx $@
	@/bin/rm -f $@.tmp $@.tmp.bak $@.dat.tmp $@.dat.tmp2
	@/bin/rm -f $@.lock


# Graph recent multisensor temperature data.
# (Convert to gnuplot-friendly multi-column form first.)
# Lock access to the gnuplotmultisensortemp.txt script and its I/O files.
.PRECIOUS: out/hourly/16WWmultisensortempL.png out/hourly/16WWmultisensortemp.png
out/hourly/16WWmultisensortempL.png: out/hourly/16WWmultisensortemp.png
out/hourly/16WWmultisensortemp.png: \
            ${STATSREMOTELAST8} \
	    $(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag) \
	    OpenTRV/scripts/gnuplotmultisensortemp.txt
	@$(LOCKFILENRSLOW) OpenTRV/scripts/gnuplotmultisensortemp.txt.lock
	@echo "Generating temperature graph $@ ..."
	@/bin/rm -f $@.tmp $@.tmp.bak $@.tmp.bak $@.L.tmp.bak
	@echo "- $$(cat $(OTSensorShortnames))" > .work/multisensortemp.dat
	@cat /dev/null $$(cat ${STATSREMOTELAST8}) | \
	    OpenTRV/scripts/filterJSON -multiID 'T|C16' $$(cat $(OTSensorIDs))|\
	    $(PROPDOWN) >> .work/multisensortemp.dat
	@head -1 .work/multisensortemp.dat
	@tail -1 .work/multisensortemp.dat
	@gnuplot OpenTRV/scripts/gnuplotmultisensortemp.txt
	@/bin/mv .work/multisensortemp.png $@.tmp
	@-chmod -f u+w $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q -o1 $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv -f $@.tmp $@
	@/bin/mv .work/multisensortempL.png $@.L.tmp
	@-chmod -f u+w $@.L.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q -o1 $@.L.tmp; fi
	@-chmod -f u+w out/hourly/16WWmultisensortempL.png
	@chmod -f 644 $@.L.tmp
	@/bin/mv -f $@.L.tmp out/hourly/16WWmultisensortempL.png
	@/bin/rm -f /tmp/remoteTemp8d.dat .work/multisensortemp.dat
	@/bin/rm -f OpenTRV/scripts/gnuplotmultisensortemp.txt.lock


# Some manually-generated graphs (automating may be slow and security hazard).

# Boiler/occupancy/valve% over a week.
# Don't generate automatically for cost/security.
#graphs-manual:: out/daily/occBV.png
out/daily/occBV.png: \
	    $(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag) \
	    OpenTRV/scripts/occBV.sh \
	    OpenTRV/scripts/gnuplotOccBV.txt
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.bak $@.tmp $@.dat.tmp
	@echo "Generating boiler/occupancy/valve graph $@ ..."
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -8 | sort | tail -8` | \
	        sh OpenTRV/scripts/occBV.sh > $@.dat.tmp
	@gnuplot < $@.dat.tmp OpenTRV/scripts/gnuplotOccBV.txt >$@.tmp
	@-chmod -f u+w $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv -f $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp $@.dat.tmp

# Room vacancy hours over a day or two.
# Don't generate automatically for cost/security.
#graphs-manual:: out/daily/vac.png
out/daily/vac.png: \
	    $(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag) \
	    OpenTRV/scripts/vac.sh \
	    OpenTRV/scripts/gnuplotVac.txt
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.bak $@.tmp $@.dat.tmp
	@echo "Generating room vacancy graph $@ ..."
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -2 | sort | tail -2` | \
	        sh OpenTRV/scripts/vac.sh > $@.dat.tmp
	@gnuplot < $@.dat.tmp OpenTRV/scripts/gnuplotVac.txt >$@.tmp
	@-chmod -f u+w $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv -f $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp $@.dat.tmp

# Valve parameters for one key valve under examination for a day or two.
#graphs-manual:: out/hourly/vt.png
VTVALVE=0a45
out/hourly/vt.png: \
	    $(wildcard data/OpenTRV/.private/stats/remote/updated.JSON.flag) \
	    OpenTRV/scripts/valveAndTemps.sh \
	    OpenTRV/scripts/gnuplotValveAndTemps.txt
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.bak $@.tmp $@.dat.tmp
	@echo "Generating room valve-and-temp graph $@ ..."
	@cat `find data/OpenTRV/.private/stats/remote/ -name '20??????.json' -mtime -2 | sort | tail -2` | \
	        sh OpenTRV/scripts/valveAndTemps.sh $(VTVALVE) > $@.dat.tmp
	@gnuplot < $@.dat.tmp OpenTRV/scripts/gnuplotValveAndTemps.txt >$@.tmp
	@-chmod -f u+w $@.tmp
	@if [ -x $(OPTIPNG) ]; then $(OPTIPNG) -q $@.tmp; fi
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv -f $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp $@.dat.tmp


# Alternative data formats from the tap-water temperature HTML source.
MAINSWATERDERIVEDDATA= \
    data/16WW-mains-water-inlet-temperature.csv \
    data/16WW-mains-water-inlet-temperature-month-cadence.csv \
    data/16WW-mains-water-inlet-temperature-month-cadence.mid \
    data/16WW-mains-water-inlet-temperature-month-cadence.house.mid
data:: $(MAINSWATERDERIVEDDATA)
# Force update of extracted data before canonical page goes live.
note-on-data-for-16WW-mains-water-inlet-temperature.html: \
    $(MAINSWATERDERIVEDDATA)
# Extract CSV data file from HTML
# Rebuild is atomic and does not require a lock.
data/16WW-mains-water-inlet-temperature.csv: \
    .note-on-data-for-16WW-mains-water-inlet-temperature.html \
    script/16WW-mains-water-inlet-temperature-toCSV
	@echo "Building $@"
	@t=$@.$$$$.tmp; script/16WW-mains-water-inlet-temperature-toCSV > $$t; chmod a+r $$t; mv $$t $@
	@chmod a+r $@
# Extract regular-cadence (one record per month) data (nominally CSV).
data/16WW-mains-water-inlet-temperature-month-cadence.csv: \
    data/16WW-mains-water-inlet-temperature.csv \
    script/mkaudio/fill-in-months-CSV
	@echo "Building $@"
	@t=$@.$$$$.tmp; script/mkaudio/fill-in-months-CSV < data/16WW-mains-water-inlet-temperature.csv > $$t; chmod a+r $$t; mv $$t $@
# Convert regular-cadence data into simple MIDI file.
data/16WW-mains-water-inlet-temperature-month-cadence.mid: \
    data/16WW-mains-water-inlet-temperature-month-cadence.csv \
    script/mkaudio/house/textToMIDIv2p1-house.sh
	@echo "Building $@"
	@t=$@.$$$$.tmp; sh script/mkaudio/house/textToMIDIv2p1-house.sh data/16WW-mains-water-inlet-temperature-month-cadence.csv > $$t; chmod a+r $$t; mv $$t $@
# Convert regular-cadence data into 'house' MIDI file.
data/16WW-mains-water-inlet-temperature-month-cadence.house.mid: \
    data/16WW-mains-water-inlet-temperature-month-cadence.csv \
    script/mkaudio/house/textToMIDIv2p1-house.sh
	@echo "Building $@"
	@t=$@.$$$$.tmp; sh script/mkaudio/house/textToMIDIv2p1-house.sh -perc house data/16WW-mains-water-inlet-temperature-month-cadence.csv > $$t; chmod a+r $$t; mv $$t $@


# Rebuild some data displays as required.
all:: data
data:: data/WW-PV-roof/raw/index.html
#RAWFILES:=$(shell ls data/WW-PV-roof/W*.csv data/WW-PV-roof/raw/*.txt)
#data/WW-PV-roof/raw/index.html: $(RAWFILES) data/WW-PV-roof/raw/makefile
data/WW-PV-roof/raw/index.html: \
		data/WW-PV-roof/raw/makefile \
		data/WW-PV-roof/raw/makeHTML.sh \
		data/WW-PV-roof/raw
	cd data/WW-PV-roof/raw && $(MAKE)

# 16WW gas vs HDD.
all:: ${gasHDD12kWh}
all:: ${gaskWhvsHDD12}
gasHDD12kWhPNG = $(gasHDD12kWh:.csv=.png) $(gasHDD12kWh:.csv=-tn.png)
gaskWhvsHDD12PNG = $(gaskWhvsHDD12:.csv=.png) $(gaskWhvsHDD12:.csv=-tn.png)
graphs:: ${gasHDD12kWhPNG}
${gasHDD12kWh} ${gasHDD12kWhPNG} ${gaskWhvsHDD12PNG}: \
    graphing/gaskWhPerHDD.sh \
    graphing/dailygaskWh.txt graphing/kWhHDDregression.txt
# Build rebuild rule dependency against one of the key input files.
# Doesn't need a lock.
out/monthly/16WW-date-HDD12-kWh-%.png \
out/monthly/16WW-date-HDD12-kWh-%-tn.png \
out/monthly/16WW-date-kWh-vs-HDD12-%.png \
out/monthly/16WW-date-kWh-vs-HDD12-%-tn.png \
out/monthly/16WW-date-HDD12-kWh-%.csv: data/HDD/EGLL_HDD_12.0C-%.csv
	@echo "Building $@"
	@sh graphing/gaskWhPerHDD.sh `echo $(@F) | sed -e 's/^.*-\(20[0-9][0-9]\)[-.][^0-9]*$$/\1/'`


# To keep mean page performance good, only top few new/popular carry ads.
# Any/most that carry ads are periodically rebuilt in case no longer eligible.
# This can be entirely asynchronous and heuristic without harm!
# Forces weekly update of these pages: $(POPARTS) should update more frequently.
# An import to create a suitable dependency is itself rebuilt periodically.
# Only do this rebuild if VHIGH or FULL.
# Builds atomically without the need for locks.
# DHD20210709: AMP pages now excluded.
# DHD20251008: added MPAGES as now potentially ad-laden c/o soft param.
PAGESWITHADSDEPS=$(WORKTMP)/pages-with-ads.mk
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Only do any of this if storage VHIGH or better.
pages:: $(PAGESWITHADSDEPS)
ifeq (0,$(RPITHROTTLEVAL))
$(PAGESWITHADSDEPS): makefile
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Try to ensure this view of hi-ad pages is up-to-date when abundant energy,
# so that race conditions don't let pages linger an extra week either way.
$(PAGESWITHADSDEPS): $(wildcard $(DAILY))
endif
endif
-include $(PAGESWITHADSDEPS)
$(PAGESWITHADSDEPS): $(wildcard $(WEEKLY))
endif
$(PAGESWITHADSDEPS):
	@echo "Building $@"
	@T=$@.$$$$.tmp; \
	    egrep -l 466711416338192 /dev/null $(sort $(wildcard $(PAGES) $(MPAGES) $(OTHERPAGES))) | \
		awk '{print $$1, ": $$(wildcard $$(WEEKLY))"}' > $$T; \
	    chmod -f u+w $@; \
	    mv -f $$T $@; \
	    chmod og-rwx $@


# Ensure that any pages that are reasonably popular are periodically rebuilt.
# This allows auto-inserted side-content to stay fresh, for example.
# Includes all pages with same number of visits at bottom of the selection.
# The number of pages in scope should be higher than SOFTPADHIGHESTRANK.
# This does not apply to 'lite' pages, which have little such side-content.
# This is somewhat more than the visible top-10.
# This freshening need only happen when there is plenty of energy available.
# Only do this rebuild if HIGH.
# (The list to freshen can be built when HIGH, as should be low-effort.)
# Builds atomically without the need for locks.
# DHD20210709: AMP pages now excluded.
POPPAGESTOREBUILD=$(WORKTMP)/popfresh.mk
.PHONY: pages-pop
pages-pop::
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
pages-pop:: $(POPPAGESTOREBUILD)
pages:: $(POPPAGESTOREBUILD)
$(POPPAGESTOREBUILD): $(POPARTS) $(wildcard $(WEEKLY))
	@echo "Building $@"
	@T=$@.$$$$.tmp; \
	    awk <$(POPARTS) 'NR>17 && ($$1!=prev) {exit} {prev=$$1; print $$2": $$(wildcard $$(WEEKLY))"; print "pages-pop:: "$$2;}' > $$T; \
	    chmod -f u+w $@; \
	    mv -f $$T $@; \
	    chmod og-rwx $@
-include $(POPPAGESTOREBUILD)
endif

# Ensure that any pages with embargoed text are periodically rebuilt.
# Embargo dates are probably not miles in the future, so weekly is fine.
# Only do this rebuild if HIGH.
# Relies on INFO: EMBARGO log messages from desktop (non-WARNING) builds.
# Creates dependencies for all page builds (desktop, lite, offline).
# TODO: filter to retain only embagoes this month or already due to have passed.
EMBARGOED=$(WORKTMP)/embargoed.mk
.PHONY: embargoed
embargoed::
ifneq ($(wildcard $(PWRHIGHFLAG))$(NOPWR),)
embargoed:: $(EMBARGOED)
pages:: $(EMBARGOED)
$(EMBARGOED): $(wildcard $(WEEKLY))
	@echo "Building $@"
	@T=$@.$$$$.tmp; \
	    egrep -l '^INFO: EMBARGO ' /dev/null $(wildcard $(PAGES:%=$(BUILDINFO)/%.info) $(OTHERPAGES:%=$(BUILDINFO)/%.info)) | sort -u | \
		awk '{basename=$$1; gsub("^.*/", "", basename); gsub("[.]info$$", "", basename); print basename " m/" basename " $(OFFLINEDIR)/" basename " : $$(wildcard $$(WEEKLY))"}' > $$T; \
	    chmod -f u+w $@; \
	    mv -f $$T $@; \
	    chmod og-rwx $@
-include $(EMBARGOED)
endif



# Snapshot offline-readable reasonable-efforts archive for main site pages.
# DHD20230827: EXPERIMENTAL - WIP
# DHD20250209: moved under out/monthly/archive and blocked in robots.txt
# Updates monthly; forces sources up to date and validated as far as possible.
# Note that this may depend on mobile includes: ensure dependencies are correct!
# The 'tar' command must be GNU-tar compatible.
# TODO: capture sub-pages in the archives.
MAINPAGESARCHIVE=out/monthly/archive/main-pages-archive.tar.xz
# Secondary less compressed but easier to use ZIP version.
MAINPAGESARCHIVEZIP=out/monthly/archive/main-pages-archive.zip
# Basic (ordered) path-free pages set to use for the offline archive.
MAINPAGESHTML=$(OTHERPAGES) $(PAGES)
# HTML dependencies from offline pages.
MAINPAGESARCHIVEHTMLMANIFEST=out/monthly/archive/main-pages-archive.manifest.html.txt
# Image dependencies from offline pages, whether present or not.
MAINPAGESARCHIVEIMGMANIFEST=out/monthly/archive/main-pages-archive.manifest.img.txt
# No dependencies until battery full enough!
# TODO: avoid picking up stale HTML pages.
$(MAINPAGESARCHIVEHTMLMANIFEST):
	@echo "Building $@"
	@($(FLOCKNB) 9 || exit 1; \
	    mkdir -p $(@D) && \
	    chmod a+rx $(@D) && \
	    (cd $(OFFLINEDIR) && find . -not \( -name '.?*' -prune \) \
		    -a -type f -a -name '*.html' \
		    -a -not -name '* *' \
		    -a -print) | \
		    sed -e 's|^[.]/||' | \
		(export LC_ALL=C; sort) > $(@D)/.$(@F).tmp && \
	    test -s $(@D)/.$(@F).tmp && \
	    if [ -f $@ ]; then chmod -f u+w $@; fi && \
	    chmod -f 644 $(@D)/.$(@F).tmp && \
	    /bin/mv -f $(@D)/.$(@F).tmp $@ \
	) 9>>$(@D)/.$(@F).flock
# No dependencies until battery full enough!
$(MAINPAGESARCHIVEIMGMANIFEST):
	@echo "Building $@"
	@($(FLOCKNB) 9 || exit 1; \
	    mkdir -p $(@D) && \
	    chmod a+rx $(@D) && \
	    cat /dev/null $(wildcard $(MAINPAGESHTML:%=$(OFFLINEDIR)/$(BUILDINFO)/%.info) $(MAINPAGESHTML:%=$(OFFLINEDIR)/$(BUILDINFO)/%.warn)) | \
	    awk '/^INFO: IMGSRC:/ {print $$3}' | sort -u > $(@D)/.$(@F).tmp && \
	    test -s $(@D)/.$(@F).tmp && \
	    if [ -f $@ ]; then chmod -f u+w $@; fi && \
	    chmod -f 644 $(@D)/.$(@F).tmp && \
	    /bin/mv -f $(@D)/.$(@F).tmp $@ \
	) 9>>$(@D)/.$(@F).flock

# Ensure all build scripts are up to date for most compact/accurate result.
#$(MAINPAGESARCHIVEHTMLMANIFEST): allhtmlgendeps
#$(MAINPAGESARCHIVEIMGMANIFEST): allhtmlgendeps
# Do not destroy any existing archive if we cannot make a new one!
.PRECIOUS: $(MAINPAGESARCHIVEHTMLMANIFEST)
.PRECIOUS: $(MAINPAGESARCHIVEIMGMANIFEST)
.PRECIOUS: $(MAINPAGESARCHIVE) $(MAINPAGESARCHIVEZIP)
# Has no direct dependency until battery VHIGH.
$(MAINPAGESARCHIVE): 
	@echo "Building $@"
	@mkdir -p $(@D)
	@chmod a+rx $(@D)
	@-$(MAKE) -k opages fpages
	@$(MAKE) $(MAINPAGESARCHIVEHTMLMANIFEST) $(MAINPAGESARCHIVEIMGMANIFEST) $(MAINPAGESHTML:%=$(OFFLINEDIR)/%)
	@test -s "$(MAINPAGESARCHIVEHTMLMANIFEST)"
	@test -s "$(MAINPAGESARCHIVEIMGMANIFEST)"
	@$(LOCKFILENRSLOW) $@.lock
	@/bin/rm -f $@.tmp
	@tar cfJ $@.tmp --numeric-owner \
            -C $(OFFLINEDIR) \
		$(sort $(wildcard $(shell cat $(MAINPAGESARCHIVEHTMLMANIFEST)))) \
            -C .. \
		$(wildcard $(GRIDINTENSITYHTMLPAGE)) \
		$(sort $(wildcard $(shell cat $(MAINPAGESARCHIVEIMGMANIFEST))))
	@test -s "$@.tmp"
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv -f $@.tmp $@
	@/bin/rm -f $@.lock $@.tmp
# Depends on MAINPAGESARCHIVE for consistency.
$(MAINPAGESARCHIVEZIP): $(MAINPAGESARCHIVE)
	@echo "Building $@"
	@$(LOCKFILENRSLOW) $@.lock
	@mkdir -p $(@D)
	@chmod a+rx $(@D)
	@/bin/rm -f $@.tmp.zip
	@cd $(OFFLINEDIR) && zip -q -9 ../$@.tmp.zip \
		$(sort $(wildcard $(shell cat $(MAINPAGESARCHIVEHTMLMANIFEST))))
	@zip -q -r9 $@.tmp.zip \
		$(wildcard $(GRIDINTENSITYHTMLPAGE)) \
		$(sort $(wildcard $(shell cat $(MAINPAGESARCHIVEIMGMANIFEST))))
	@test -s $@.tmp.zip
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp.zip
	@/bin/mv -f $@.tmp.zip $@
	@/bin/rm -f $@.lock $@.tmp.zip
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Only attempt automatic archive rebuild when VHIGH/FULL.
#$(MAINPAGESARCHIVEIMGMANIFEST): $(MAINPAGESHTML:%=$(OFFLINEDIR)/%)
#$(MAINPAGESARCHIVEHTMLMANIFEST): $(MAINPAGESHTML:%=$(OFFLINEDIR)/%)
$(MAINPAGESARCHIVEIMGMANIFEST): $(wildcard $(MONTHLY))
$(MAINPAGESARCHIVEHTMLMANIFEST): $(wildcard $(MONTHLY))
$(MAINPAGESARCHIVE): $(wildcard $(MONTHLY))
$(MAINPAGESARCHIVE): $(MAINPAGESARCHIVEIMGMANIFEST)
$(MAINPAGESARCHIVE): $(MAINPAGESARCHIVEHTMLMANIFEST)
all:: $(MAINPAGESHTML:%=$(OFFLINEDIR)/%)
all:: $(MAINPAGESARCHIVEIMGMANIFEST)
all:: $(MAINPAGESARCHIVEHTMLMANIFEST)
all:: $(OTHERFPAGES)
all:: $(FPAGES) $(PAGESFVALID)
all:: $(MAINPAGESARCHIVE)
all:: $(MAINPAGESARCHIVEZIP)
endif
mainpagesarchive: $(MAINPAGESARCHIVE) $(MAINPAGESARCHIVEZIP)


# Archive of all (public) data files.
# Updated monthly.
# List of such public data file names (sorted and with some safety exclusions).
PUBLICDATAFILELIST=out/monthly/archive/public-data-files.manifest.txt
# Compressed archive of public data files.
PUBLICDATAFILEARCH=out/monthly/archive/public-data-files.tar.xz
# Has no direct dependency until battery VHIGH.
$(PUBLICDATAFILELIST):
	@echo "Building $@"
	@$(LOCKFILENRSTD) $@.lock
	@mkdir -p $(@D)
	@chmod a+rx $(@D)
	@/bin/rm -f $@.tmp
	find data -not \( -name '.?*' -prune \) \
		-a -not -name '-*' \
		-a -not -name '* *' \
		-a -not -name '*.tmp' \
		-a -type f -print | \
	    (export LC_ALL=C; sort) > $@.tmp
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv -f $@.tmp $@
	@/bin/rm -f $@.lock
$(PUBLICDATAFILEARCH): $(PUBLICDATAFILELIST)
	@echo "Building $@"
	@$(LOCKFILENRSLOW) $@.lock
	@mkdir -p $(@D)
	@chmod a+rx $(@D)
	@tar cfJ $@.tmp --numeric-owner \
            -T $(PUBLICDATAFILELIST)
	@test -s $@.tmp
	@-chmod -f u+w $@
	@chmod -f 644 $@.tmp
	@/bin/mv -f $@.tmp $@
	@/bin/rm -f $@.lock
#
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Only attempt automatic archive rebuild when VHIGH/FULL.
$(PUBLICDATAFILELIST): $(wildcard $(MONTHLY))
all:: $(PUBLICDATAFILELIST)
all:: $(PUBLICDATAFILEARCH)
endif
publicdatafilelist: $(PUBLICDATAFILELIST)
publicdatafilearch: $(PUBLICDATAFILEARCH)



# Update list of EOU site bandwidth hogs weekly (when enough energy).
BANDWIDTHHOGS=$(WORKTMP)/bandwidthHogs.txt
$(BANDWIDTHHOGS): .work/script/bandwidthhogs.sh
	@echo "Building $@"
	@T=$@.$$$$.tmp; \
	    sh .work/script/bandwidthhogs.sh < $(SITELOGPREV) > $$T; \
	    mv -f $$T $@;
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),) 
$(BANDWIDTHHOGS): $(SITELOGPREV)
all:: $(BANDWIDTHHOGS)
endif


# Run unit tests.
# Builds some of the items to be tested before recursing.
all:: tests
tests: test-page.html m/test-page.html \
		testcases/makefile \
		$(HTMLGENDEPS) makefile
	cd testcases && $(MAKE) all
	touch $@


# Force build all hero images.
# This does not need the pages that use them to be rebuilt.
# TODO: fix hard-wired 1200 recommended image width here.
allheroimgs: script/image_list_hero script/get_hero_img_inline
	@echo "Eagerly building hero image caches..."
	@echo "Building $@"
	@$(LOCKFILENRSLOW) $@.lock
	@rm -f $(WORKTMP)/$@.log
	@for h in `script/image_list_hero | sort -R`; \
	  do \
	  script/get_hero_img_inline insTop $$h .X.html "./" false \
		>> $(WORKTMP)/$@.log; \
          script/get_hero_img_inline autogenHiresHeroImgs $$h .X.html "./" false 1200 \
		>> $(WORKTMP)/$@.log; \
          done
	@for h in `script/image_list_hero -SQTN | sort -R`; \
	  do \
          script/get_hero_img_inline autogenHiresHeroImgs $$h .X.html "./" false 1200 \
		>> $(WORKTMP)/$@.log; \
          done
	@touch $@
	@/bin/rm -f $@.lock
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Only attempt (weekly) hero image rebuild when VHIGH/FULL.
allheroimgs: $(wildcard $(WEEKLY))
all:: allheroimgs
endif

# Force build (nearly) all body images.
# This does not need the pages that use them to be rebuilt.
allbodyimgs: script/image_list_body script/get_hero_img_inline
	@echo "Eagerly building body image caches..."
	@echo "Building $@"
	@$(LOCKFILENRSLOW) $@.lock
	@rm -f $(WORKTMP)/$@.log
	@script/image_list_body | sort -R | \
            awk '{print "script/get_hero_img_inline floatImg "$$1" .X.html ./ false \"\" \"\" "$$2}' | \
            sh >> $(WORKTMP)/$@.log
	@touch $@
	@/bin/rm -f $@.lock
ifneq ($(wildcard $(PWRVHIGHFLAG))$(NOPWR),)
# Only attempt (weekly) body image rebuild when VHIGH/FULL.
allbodyimgs: $(wildcard $(WEEKLY))
all:: allbodyimgs
endif


# Special (static) pages not in normal framing/wrapper, nor on mobile.
SPECIALPAGES= \
    Design-Brief-Generator.html \
    _dashboard.html
SPCWPAGES=$(SPECIALPAGES:%=%$(COMPSUFGZIP)) $(SPECIALPAGES:%=%$(COMPSUFBROTLI))
.PHONY: specialpages
specialpages:: $(SPCWPAGES)
	@chmod a+r $(SPCWPAGES)
pages:: specialpages


# Clean up some junk periodically.
.PHONY: tempclean
all:: tempclean
tempclean:
	@find $(WORKTMP)/ -name '*.tmp' -type f -mtime +1 -exec rm {} \;


# Dummy target that is never up-to-date.
# .PHONY: FORCE
# FORCE:
