OSDN Git Service

Address MinGW-Issue #39902; correct minizip dependencies.
[mingw/mingw-dist.git] / Makefile.comm.in
index 0d8ba73..201c1db 100644 (file)
@@ -3,7 +3,7 @@
 # $Id$
 #
 # Written by Keith Marshall <keithmarshall@users.sourceforge.net>
-# Copyright (C) 2010, MinGW Project
+# Copyright (C) 2010, 2011, 2013, MinGW.org Project
 #
 #
 # Makefile template for generating mingw-get distribution manifests.
 # MinGW Project, accept liability for any damages, however caused,
 # arising from the use of this software.
 #
-VPATH = ${srcdir}
+VPATH = ${srcdir}:${top_srcdir}:${top_builddir}
 
 # Define hooks for invoking system tools.
 #
 LN_S = @LN_S@
 
-all: all-sync all-distfiles
+all: all-distfiles
+
+# Capture repository infrastructure changes.  The following rules ensure
+# that configure, config.status, and all working makefiles will be updated,
+# when any of their respective sources are modified.
+#
+configure: configure.ac makeopts.m4
+       cd ${top_srcdir}; autoconf
+
+config.status: configure
+       cd ${top_builddir}; ./config.status --recheck
+
+Makefile: config.status Makefile.in Makefile.stub.in Makefile.comm.in
+       cd ${top_builddir}; ./config.status
 
 # To accommodate a top-level make on just a single subdirectory, we list
 # each of the managed subdirectories as an independent goal, invoking the
@@ -49,8 +62,9 @@ Makefile.sub: ${srcdir}/*.xml
        for file in $^; do echo "$$file.lzma \\" | sed 's,.*/,  ,' >> $@; done
        echo '  $$(EXTRA_DISTFILES)' >> $@
 
-include Makefile.sub
-all-distfiles: unpublished $(DISTFILES) $(auto-distfiles)
+sinclude Makefile.sub
+all-distfiles: unpublished issue.sed $(DISTFILES) $(auto-distfiles)
+@SET_MAKE@
 
 # Distributed manifests are serialised by incorporating a date-stamped
 # issue number, of the form YYYYMMDDNN; we track issue numbers using the
@@ -58,8 +72,25 @@ all-distfiles: unpublished $(DISTFILES) $(auto-distfiles)
 # can share a common issue number registry).
 #
 issue_number = YYYYMMDDNN
+issue_key = awk '$$3 == "$*.xml" { print $$1 }' issue.new
 issue_log = ${srcdir}/issue.log
 
+update_issue_number = \
+  test -n "$$issue" || issue=0; \
+  test $$issue -lt $${mark="`date -u +%Y%m%d`00"} && \
+    issue=$$mark || issue=`expr $$issue + 1`
+generate_catalogue = -f ${top_builddir}/issue.sed -e $(store_issue_number)
+store_issue_number = "s/@$(issue_number)@/$(issue)/"
+
+# We use SHA1 hashes to determine when source files have been changed
+# from the last published version, as recorded in `issue.log'; the hash
+# is computed by openssl, after filtering the source through awk; (this
+# ensures that the computed hash is not influenced by any unintentional
+# pollution due to accidental insertion of CRLF line endings).
+#
+sha1hash = awk '{ sub( "\r$$", "" ); print }' $$catalogue | $(sha1sum)
+sha1sum = openssl sha1 | awk '{print $$NF}'
+
 # Formatting within issue.log is controlled by PAD and TAB settings.
 #
 TAB = [         ]
@@ -77,20 +108,22 @@ PAD = "  "
 # any updated manifest is uploaded to the repository server.
 #
 %.xml.lzma: %.xml
+       @echo
        >> $(issue_log)
-       sed '/^$(TAB)*$$/d;/^$(TAB)*#/d' $(issue_log) > issue.tmp
-       sed -n '/^$(TAB)*$$/q;/^$(TAB)*[^#]/q;p' $(issue_log) > issue.new
-       test x$${issue="`sed -n 's/^$(TAB)*$*.xml://p' $(issue_log)`"} = x && \
-         issue=0 && echo $(PAD)"$*.xml:0" >> issue.tmp; \
-       test $$issue -lt $${mark="`date -u +%Y%m%d`00"} && \
-         issue=$$mark || issue=`expr $$issue + 1`; \
-       sed "s/^\($(TAB)*$*.xml:\).*/\1$$issue/" issue.tmp | sort >> issue.new; \
-       sed "s/@$(issue_number)@/$$issue/" $< | lzma -c > $@
-       sed -n 's/^$(TAB)*$$/break/;1,/^$(TAB)*[^#]/d;/^$(TAB)*#/p' \
-         $(issue_log) >> issue.new
-       rm -f $(issue_log) issue.tmp
-       mv issue.new $(issue_log)
-       cd unpublished && $(LN_S) ../$@ .
+       rm -f issue.new issue.tmp
+       test -f ${top_builddir}/issue.sed || $(MAKE) issue.sed
+       sed '/^$(TAB)*$$/d;/^$(TAB)*#/d' $(issue_log) > issue.new
+       @catalogue="$<" issue=`awk '$$3 == "$*.xml" { print $$2 }' issue.new`; \
+         if test x$${hash="`$(sha1hash)`"} != x"`$(issue_key)`"; then \
+           $(update_issue_number); \
+           $(RMAKE) refname=$*.xml issue=$$issue hash=$$hash issue.tmp; \
+           fi; \
+         $(RMAKE) catalogue=$@ source=$< issue=$$issue generate-catalogue
+       @if test -f issue.tmp; then \
+         $(RMAKE) refname=$@ select-for-publication; \
+         $(RMAKE) update-issue-log; \
+         fi
+       rm -f issue.new issue.tmp
 
 .PHONY: FORCE
 # An internal target, to specify a dependency which must always be updated.
@@ -103,40 +136,103 @@ FORCE:
 # processed, even on explicit file-by-file request).
 #
 ${srcdir}/*.xml: unpublished
-unpublished: FORCE
-       test -d unpublished || mkdir unpublished
-
-# Before creating any set of files for publication, the local working copy
-# of the entire publication data set needs to be synchronised with the state
-# of any externally published copies.  This is accomplished with reference
-# to the publication state as recorded in the `issue.log' files which may
-# be found in each publication subdirectory; the `all-sync-offline' and
-# `all-sync-to-cvs' targets fulfil this requirement, allowing the user to
-# select between synchronising to the locally recorded publication state as
-# is, or (preferably) after first synchronising the local state with the
-# state as recorded in the master CVS repository, respectively.
-#
-cvsroot = `cat ${srcdir}/CVS/Root`
-repository = `cat ${srcdir}/CVS/Repository`
-
-all-sync-offline: all-sync-offline-begin all-sync-to-cvs-or-offline
-all-sync all-sync-to-cvs: all-sync-to-cvs-begin all-sync-to-cvs-or-offline
-
-all-sync-begin: FORCE
+unpublished:
+       test -d $@ || mkdir $@
+
+# Package lists are dynamically updated, to record the latest issues of each
+# referenced package catalogue; the "sed" script used to accomplish this must
+# be generated, and driven from, within the top build directory.  Similarly,
+# the procedure for publishing updated catalogues to the file release system
+# must be directed to the top build directory, to process the entire tree.
+#
+published update-references issue.sed: FORCE
+       cd ${top_builddir}; $(MAKE) $@
+
+# Requests to update references will be redirected back to each sub-directory
+# in turn, to be processed by the following recursive make rule.
+#
+update-local-references: unpublished issue.sed
+       rm -rf tmp; mkdir tmp; rm -f issue.tmp
+       sed '/^$(TAB)*$$/d;/^$(TAB)*#/d' $(issue_log) > issue.new
+       @for catalogue in `grep -l 'catalogue=' ${srcdir}/*.xml`; \
+         do refname=`echo $$catalogue | sed 's,^${srcdir}/,,'` \
+            issue=`awk '$$3 == "'$$refname'" { print $$2 }' issue.new`; \
+            $(RMAKE) refname=$$refname issue=$$issue generate-reference; \
+            lzma -dc $$refname.lzma | cmp -s - tmp/$$refname || \
+              { $(update_issue_number); \
+                $(RMAKE) dir=${top_builddir} issue.chk; \
+                $(RMAKE) refname=$$refname issue=$$issue \
+                  hash=`$(sha1hash)` issue.tmp; \
+                $(RMAKE) catalogue=$$refname.lzma source=$$catalogue \
+                  issue=$$issue generate-catalogue; \
+                $(RMAKE) refname=$$refname.lzma select-for-publication; \
+              }; \
+         done
+       @test -f issue.tmp && $(RMAKE) update-issue-log || true
+       rm -f ${top_builddir}/issue.sed issue.new issue.tmp
        rm -rf tmp
 
-all-sync-offline-begin: all-sync-begin
-       mkdir tmp
+# The preceding rules for compiling catalogues for publication, and resolving
+# "package-list" references, use several recursive make hooks to perform common
+# sub-tasks.  GNU make tends to be very verbose about entering and leaving the
+# directories in which these sub-tasks are performed, even when there is no
+# change of directory involved; to make it quieter, we prefer this form of
+# recursive invocation for local sub-tasks.
+#
+RMAKE = $(MAKE)@NO_PRINT_DIRECTORY@
 
-all-sync-to-cvs-begin: all-sync-begin
-       cvs -z3 -Q -d ${cvsroot} checkout -A -d tmp ${repository}/issue.log
-       rm -f $(issue_log); mv tmp/issue.log $(issue_log)
+# The following sub-task rule creates a tentative "publication-ready" version
+# of each catalogue.
+#
+generate-catalogue: FORCE
+       sed $(generate_catalogue) $(source) \
+         | lzma -c > $(catalogue)
 
-all-sync-to-cvs-or-offline: FORCE
-       $(LN_S) ../$(issue_log) tmp
-       for file in ${srcdir}/*.xml *.xml.lzma; do \
-         if test -f $$file; then $(LN_S) ../$$file tmp; fi; done
-       cd tmp && $(MAKE) -f ../../Makefile.sync $@
-       rm -rf tmp
+# The following sub-task rule creates a reference copy of each catalogue, for
+# comparison with the tentative "publication-ready" version, when checking for
+# changes introduced by resolution of "package-list" references.
+#
+generate-reference: FORCE
+       sed $(generate_catalogue) ${srcdir}/$(refname) \
+         > tmp/$(refname)
+
+# When any generated catalogue has been found to differ from its previously
+# published version, (if any), the following rule adds it to the schedule of
+# catalogues which should be republished, (or published for the first time).
+#
+select-for-publication: FORCE
+       cd unpublished && $(LN_S) -f ../$(refname) .
+
+# The remaining macros and rules define the sub-tasks for management of the
+# temporary files used to control the iterative resolution of "package-list"
+# references, and to capture modifications to be recorded in the issue log...
+#
+extract_log_file_header = '/^$(TAB)*$$/q;/^$(TAB)*[^\#]/q;p'
+extract_log_file_footer = 's/^$(TAB)*$$/break/;1,/^$(TAB)*[^\#]/d;/^$(TAB)*\#/p'
+
+issue.chk: FORCE
+       test -f issue.tmp && mv -f issue.tmp issue.new || true
+       test -f ${dir}/$@ && echo check > ${dir}/$@ || true
+
+issue.new: FORCE
+issue.tmp: issue.new
+       awk '$$3 != "$(refname)"' $^ > $@
+       echo $(PAD)$(hash) $(issue) $(refname) >> $@
+
+# ...with this final rule completing the recording of the current state of
+# publication, in the permanent issue log files.
+#
+update-issue-log: FORCE
+       sed -n $(extract_log_file_header) $(issue_log) > issue.new
+       LC_COLLATE=POSIX sort -k3 issue.tmp >> issue.new;
+       sed -n $(extract_log_file_footer) $(issue_log) >> issue.new;
+       rm -f $(issue_log) && mv issue.new $(issue_log)
+
+# The following goals may be specified, when building as a sub-project
+# of mingw-get-setup; make each a no-op here.
+#
+NO_OP_GOALS = build.tag dist clean distclean maintainer-clean
+.PHONY: $(NO_OP_GOALS)
+$(NO_OP_GOALS):
 
 # $RCSfile$: end of file