From 6bde5bd7302d9eeca63d55c8b57a6c544e56fe8c Mon Sep 17 00:00:00 2001 From: Waldemar Brodkorb Date: Sat, 8 Oct 2011 11:42:06 +0200 Subject: push pacemaker stuff in a usable state. --- package/Pacemaker-Python-GUI/Makefile | 47 +++++++++++++++++++ .../patches/patch-mgmt_daemon_mgmt_crm_c | 32 +++++++++++++ package/base-files/Makefile | 2 +- package/base-files/src/init | 1 + package/busybox/config/coreutils/Config.in | 2 +- package/cluster-glue/Makefile | 5 +- package/corosync/Makefile | 7 +-- package/corosync/patches/patch-Makefile_in | 11 ----- .../corosync/patches/patch-tools_corosync-keygen_c | 4 +- package/libxslt/Makefile | 5 +- package/openais/files/openais.init | 7 ++- package/pacemaker/Makefile | 23 ++++++--- package/pacemaker/files/pacemaker.postinst | 8 ++++ package/pacemaker/patches/patch-configure | 4 +- package/pacemaker/patches/patch-configure.orig | 54 ---------------------- package/pam/Makefile | 32 +++++++++++++ package/pam/patches/patch-Makefile_in | 24 ++++++++++ package/pam/patches/patch-conf_Makefile_in | 11 +++++ package/resource-agents/Makefile | 8 +++- package/resource-agents/patches/patch-configure | 11 +++++ 20 files changed, 210 insertions(+), 88 deletions(-) create mode 100644 package/Pacemaker-Python-GUI/Makefile create mode 100644 package/Pacemaker-Python-GUI/patches/patch-mgmt_daemon_mgmt_crm_c delete mode 100644 package/corosync/patches/patch-Makefile_in create mode 100644 package/pacemaker/files/pacemaker.postinst delete mode 100644 package/pacemaker/patches/patch-configure.orig create mode 100644 package/pam/Makefile create mode 100644 package/pam/patches/patch-Makefile_in create mode 100644 package/pam/patches/patch-conf_Makefile_in create mode 100644 package/resource-agents/patches/patch-configure diff --git a/package/Pacemaker-Python-GUI/Makefile b/package/Pacemaker-Python-GUI/Makefile new file mode 100644 index 000000000..616fa9360 --- /dev/null +++ b/package/Pacemaker-Python-GUI/Makefile @@ -0,0 +1,47 @@ +# This file is part of the OpenADK project. OpenADK is copyrighted +# material, please see the LICENCE file in the top-level directory. + +include $(TOPDIR)/rules.mk + +PKG_NAME:= Pacemaker-Python-GUI +PKG_VERSION:= 2.1.0 +PKG_RELEASE:= 1 +PKG_MD5SUM:= 22379f78409cec9df10c25983783ef6c +PKG_DESCR:= Pacemaker GUI +PKG_SECTION:= ha +PKG_BUILDDEP:= pacemaker gnutls pam +PKG_URL:= http://hg.clusterlabs.org/pacemaker/pygui/ +PKG_SITES:= http://openadk.org/distfiles/ + +PKG_SUBPKGS:= PACEMAKER_PYTHON_GUI PACEMAKER_MGMTD +PKGSD_PACEMAKER_MGMTD:= Management daemon for Pacemaker GUI + +include $(TOPDIR)/mk/package.mk + +$(eval $(call PKG_template,PACEMAKER_PYTHON_GUI,pacemaker-python-gui,$(PKG_VERSION)-${PKG_RELEASE},${PKG_DEPENDS},${PKG_DESCR},${PKG_SECTION})) +$(eval $(call PKG_template,PACEMAKER_MGMTD,pacemaker-mgmtd,$(PKG_VERSION)-${PKG_RELEASE},${PKG_DEPENDS},${PKG_DESCR},${PKG_SECTION})) + +CONFIGURE_ARGS+= --with-initdir=/etc/init.d \ + --with-ocf-root=/usr/lib/ocf \ + --disable-fatal-warnings +TARGET_CFLAGS+= -I$(STAGING_DIR)/usr/include/python2.7 \ + -I$(STAGING_DIR)/usr/include/pacemaker \ + -I$(STAGING_DIR)/usr/include/heartbeat + +pacemaker-python-gui-install: + $(INSTALL_DIR) $(IDIR_PACEMAKER_PYTHON_GUI)/usr/bin + $(INSTALL_DIR) $(IDIR_PACEMAKER_PYTHON_GUI)/usr/lib/heartbeat-gui + $(CP) $(WRKINST)/usr/lib/heartbeat-gui/* \ + $(IDIR_PACEMAKER_PYTHON_GUI)/usr/lib/heartbeat-gui + $(INSTALL_BIN) $(WRKINST)/usr/bin/hb_gui \ + $(IDIR_PACEMAKER_PYTHON_GUI)/usr/bin + +pacemaker-mgmtd-install: + $(INSTALL_DIR) $(IDIR_PACEMAKER_MGMTD)/usr/lib + $(CP) $(WRKINST)/usr/lib/libhbmgmt*so* \ + $(IDIR_PACEMAKER_MGMTD)/usr/lib + $(INSTALL_DIR) $(IDIR_PACEMAKER_MGMTD)/usr/lib/heartbeat + $(INSTALL_BIN) $(WRKINST)/usr/lib/heartbeat/mgmtd \ + $(IDIR_PACEMAKER_MGMTD)/usr/lib/heartbeat + +include ${TOPDIR}/mk/pkg-bottom.mk diff --git a/package/Pacemaker-Python-GUI/patches/patch-mgmt_daemon_mgmt_crm_c b/package/Pacemaker-Python-GUI/patches/patch-mgmt_daemon_mgmt_crm_c new file mode 100644 index 000000000..1882093d5 --- /dev/null +++ b/package/Pacemaker-Python-GUI/patches/patch-mgmt_daemon_mgmt_crm_c @@ -0,0 +1,32 @@ +--- Pacemaker-Python-GUI-2.1.0.orig/mgmt/daemon/mgmt_crm.c 2011-08-03 13:54:23.000000000 +0200 ++++ Pacemaker-Python-GUI-2.1.0/mgmt/daemon/mgmt_crm.c 2011-10-07 16:15:59.000000000 +0200 +@@ -1393,7 +1393,7 @@ on_cleanup_rsc(char* argv[], int argc) + argv[1], cib_error2string(rc)); + } else { + buffer = crm_concat("fail-count", argv[2], '-'); +- delete_attr(cib_conn, cib_sync_call, XML_CIB_TAG_STATUS, dest_node, NULL, NULL, ++ delete_attr(cib_conn, cib_sync_call, XML_CIB_TAG_STATUS, dest_node, NULL, + NULL, buffer, NULL, FALSE); + crm_free(dest_node); + crm_free(buffer); +@@ -1403,7 +1403,7 @@ on_cleanup_rsc(char* argv[], int argc) + sleep(2); /* wait for the refresh */ + now_s = crm_itoa(now); + update_attr(cib_conn, cib_sync_call, +- XML_CIB_TAG_CRMCONFIG, NULL, NULL, NULL, NULL, "last-lrm-refresh", now_s, FALSE); ++ XML_CIB_TAG_CRMCONFIG, NULL, NULL, NULL, "last-lrm-refresh", now_s, FALSE); + crm_free(now_s); + + crmd_channel->ops->destroy(crmd_channel); +@@ -1509,9 +1509,11 @@ on_get_rsc_status(char* argv[], int argc + strncat(buf, " (orphaned)", sizeof(buf)-strlen(buf)-1); + } + ++ /* + if(is_set(rsc->flags, pe_rsc_failure_ignored)) { + strncat(buf, " (failure ignored)", sizeof(buf)-strlen(buf)-1); + } ++ */ + + ret = mgmt_msg_append(ret, buf); + break; diff --git a/package/base-files/Makefile b/package/base-files/Makefile index 27ecdaa7c..fa1fecd86 100644 --- a/package/base-files/Makefile +++ b/package/base-files/Makefile @@ -6,7 +6,7 @@ include $(TOPDIR)/mk/rootfs.mk PKG_NAME:= base-files PKG_VERSION:= 1.0 -PKG_RELEASE:= 47 +PKG_RELEASE:= 48 PKG_SECTION:= base PKG_DESCR:= basic files and scripts diff --git a/package/base-files/src/init b/package/base-files/src/init index a9564011d..c57149fe3 100755 --- a/package/base-files/src/init +++ b/package/base-files/src/init @@ -12,6 +12,7 @@ mount -o nosuid,nodev,noexec -t sysfs sysfs /sys mount -o remount,nosuid,size=128k,mode=0755 -t tmpfs mdev /dev [ -d /dev/pts ] || mkdir /dev/pts [ -d /dev/shm ] || mkdir /dev/shm +mount -o nosuid,noexec -t tmpfs tmpfs /dev/shm mount -o nosuid,noexec -t devpts devpts /dev/pts exec 0<>/dev/console >&0 2>&0 echo >/dev/mdev.seq diff --git a/package/busybox/config/coreutils/Config.in b/package/busybox/config/coreutils/Config.in index 9c55d74aa..1b2d9d5fd 100644 --- a/package/busybox/config/coreutils/Config.in +++ b/package/busybox/config/coreutils/Config.in @@ -776,7 +776,7 @@ config BUSYBOX_TRUE config BUSYBOX_TTY bool "tty" - default n + default y help tty is used to print the name of the current terminal to standard output. diff --git a/package/cluster-glue/Makefile b/package/cluster-glue/Makefile index 140526ebc..170ae3a08 100644 --- a/package/cluster-glue/Makefile +++ b/package/cluster-glue/Makefile @@ -14,7 +14,6 @@ PKG_URL:= http://www.linux-ha.org/wiki/Cluster_Glue PKG_SITES:= http://openadk.org/distfiles/ PKG_CFLINE_CLUSTER_GLUE:= depends on ADK_TARGET_LIB_GLIBC || ADK_TARGET_LIB_EGLIBC -PKG_HOST_DEPENDS:= !darwin PKG_ARCH_DEPENDS:= x86 x86_64 include $(TOPDIR)/mk/package.mk @@ -28,8 +27,10 @@ CONFIGURE_ARGS+= --disable-bundled-ltdl \ --enable-fatal-warnings=no cluster-glue-install: - $(INSTALL_DIR) $(IDIR_CLUSTER_GLUE)/usr/lib + $(INSTALL_DIR) $(IDIR_CLUSTER_GLUE)/usr/lib/heartbeat $(CP) $(WRKINST)/usr/lib/*.so* \ $(IDIR_CLUSTER_GLUE)/usr/lib + $(CP) $(WRKINST)/usr/lib/heartbeat/* \ + $(IDIR_CLUSTER_GLUE)/usr/lib/heartbeat include ${TOPDIR}/mk/pkg-bottom.mk diff --git a/package/corosync/Makefile b/package/corosync/Makefile index 378fd618a..eb5ae8197 100644 --- a/package/corosync/Makefile +++ b/package/corosync/Makefile @@ -4,13 +4,14 @@ include $(TOPDIR)/rules.mk PKG_NAME:= corosync -PKG_VERSION:= 1.3.1 +PKG_VERSION:= 1.4.1 PKG_RELEASE:= 1 -PKG_MD5SUM:= c58459a009a3a9d0b9c00e276a190d90 +PKG_MD5SUM:= 66231146af210637393748add091021d PKG_DESCR:= Cluster Engine PKG_SECTION:= ha +PKG_DEPENDS:= cluster-glue PKG_URL:= http://www.corosync.org/ -PKG_SITES:= http://openadk.org/distfiles/ +PKG_SITES:= ftp://ftp:downloads@ftp.corosync.org/downloads/corosync-1.4.1/ PKG_CFLINE_COROSYNC:= select ADK_KERNEL_IP_MULTICAST diff --git a/package/corosync/patches/patch-Makefile_in b/package/corosync/patches/patch-Makefile_in deleted file mode 100644 index 4fc1ef74a..000000000 --- a/package/corosync/patches/patch-Makefile_in +++ /dev/null @@ -1,11 +0,0 @@ ---- corosync-1.3.1.orig/Makefile.in 2011-04-25 04:39:52.000000000 +0200 -+++ corosync-1.3.1/Makefile.in 2011-06-25 21:58:51.012672595 +0200 -@@ -320,7 +320,7 @@ corosysconf_DATA = conf/corosync.conf.ex - conf/corosync.conf.example.udpu - - SUBDIRS = include lcr lib exec services tools test pkgconfig \ -- man init -+ init - - RPMBUILDOPTS = --define "_sourcedir $(abs_builddir)" \ - --define "_specdir $(abs_builddir)" \ diff --git a/package/corosync/patches/patch-tools_corosync-keygen_c b/package/corosync/patches/patch-tools_corosync-keygen_c index 6e147930c..eecf92026 100644 --- a/package/corosync/patches/patch-tools_corosync-keygen_c +++ b/package/corosync/patches/patch-tools_corosync-keygen_c @@ -1,5 +1,5 @@ ---- corosync-1.3.1.orig/tools/corosync-keygen.c 2011-04-25 04:37:50.000000000 +0200 -+++ corosync-1.3.1/tools/corosync-keygen.c 2011-05-26 07:27:22.000000000 +0200 +--- corosync-1.4.1.orig/tools/corosync-keygen.c 2011-07-26 10:08:43.000000000 +0200 ++++ corosync-1.4.1/tools/corosync-keygen.c 2011-10-04 21:03:05.000000000 +0200 @@ -65,11 +65,11 @@ int main (void) { } } diff --git a/package/libxslt/Makefile b/package/libxslt/Makefile index b82a80281..b4306c66d 100644 --- a/package/libxslt/Makefile +++ b/package/libxslt/Makefile @@ -5,7 +5,7 @@ include ${TOPDIR}/rules.mk PKG_NAME:= libxslt PKG_VERSION:= 1.1.24 -PKG_RELEASE:= 1 +PKG_RELEASE:= 2 PKG_MD5SUM:= e83ec5d27fc4c10c6f612879bea9a153 PKG_DESCR:= XSLT Library PKG_SECTION:= libs @@ -42,7 +42,8 @@ xsltproc-install: libxslt-install: ${INSTALL_DIR} ${IDIR_LIBXSLT}/usr/lib - ${CP} ${WRKINST}/usr/lib/lib?xslt.so* ${IDIR_LIBXSLT}/usr/lib + ${CP} ${WRKINST}/usr/lib/libxslt.so* ${IDIR_LIBXSLT}/usr/lib + ${CP} ${WRKINST}/usr/lib/libexslt.so* ${IDIR_LIBXSLT}/usr/lib libxslt-dev-install: ${INSTALL_DIR} ${IDIR_LIBXSLT_DEV}/usr/include diff --git a/package/openais/files/openais.init b/package/openais/files/openais.init index f36121c7a..25b2d8974 100644 --- a/package/openais/files/openais.init +++ b/package/openais/files/openais.init @@ -10,13 +10,16 @@ autostart) exec sh $0 start ;; start) - mount -t tmpfs -o size=4M tmpfs /dev/shm [ -d /var/lib/corosync ] || mkdir -p /var/lib/corosync + mkdir -p /var/lib/heartbeat/crm + chown hacluster:haclient /var/lib/heartbeat/crm + chmod 775 /var/lib/heartbeat/crm + mkdir -p /var/lib/pengine + chown hacluster /var/lib/pengine /usr/sbin/aisexec ;; stop) kill $(pgrep -f corosync) - umount /dev/shm ;; restart) sh $0 stop diff --git a/package/pacemaker/Makefile b/package/pacemaker/Makefile index 82b217f17..25dc896ff 100644 --- a/package/pacemaker/Makefile +++ b/package/pacemaker/Makefile @@ -9,12 +9,13 @@ PKG_RELEASE:= 1 PKG_MD5SUM:= bfe2127d31e5244d0106a5c290fd0dc2 PKG_DESCR:= scalable High-Availability cluster resource manager PKG_SECTION:= ha -PKG_DEPENDS:= python2 +PKG_BUILDDEP:= resource-agents cluster-glue pam gnutls +PKG_DEPENDS:= python2 python2-readline python2-bzip2 cluster-glue +PKG_DEPENDS+= bash resource-agents pam libgnutls glib libxslt libxml2 PKG_URL:= http://www.clusterlabs.org/ PKG_SITES:= http://openadk.org/distfiles/ PKG_CFLINE_PACEMAKER:= depends on ADK_TARGET_LIB_GLIBC || ADK_TARGET_LIB_EGLIBC -PKG_HOST_DEPENDS:= !darwin PKG_ARCH_DEPENDS:= x86 include $(TOPDIR)/mk/package.mk @@ -24,15 +25,25 @@ $(eval $(call PKG_template,PACEMAKER,pacemaker,$(PKG_VERSION)-${PKG_RELEASE},${P # disable honour cflags stuff XAKE_FLAGS+= GCC_HONOUR_COPTS=s -CONFIGURE_ARGS+= --includedir=$(STAGING_TARGET_DIR)/usr/include \ - --disable-fatal-warnings +CONFIGURE_ARGS+= --disable-fatal-warnings CONFIGURE_ENV+= ac_cv_path_HELP2MAN='' -CFLAGS_FOR_BUILD+= -I$(STAGING_TARGET_DIR)/usr/include/heartbeat +CFLAGS_FOR_BUILD+= -I$(STAGING_DIR)/usr/include/heartbeat + +pre-configure: + $(SED) "s#@@STAGING_DIR@@#$(STAGING_DIR)#" $(WRKBUILD)/configure pacemaker-install: - $(INSTALL_DIR) $(IDIR_PACEMAKER)/usr/lib + $(INSTALL_DIR) $(IDIR_PACEMAKER)/usr/lib/heartbeat + $(INSTALL_DIR) $(IDIR_PACEMAKER)/usr/lib/ocf $(CP) $(WRKINST)/usr/lib/lib*.so* \ $(IDIR_PACEMAKER)/usr/lib + $(CP) $(WRKINST)/usr/lib/heartbeat/* \ + $(IDIR_PACEMAKER)/usr/lib/heartbeat + $(CP) $(WRKINST)/usr/lib/ocf/* \ + $(IDIR_PACEMAKER)/usr/lib/ocf + $(INSTALL_DIR) $(IDIR_PACEMAKER)/usr/share/pacemaker + $(CP) $(WRKINST)/usr/share/pacemaker/* \ + $(IDIR_PACEMAKER)/usr/share/pacemaker $(INSTALL_DIR) $(IDIR_PACEMAKER)/usr/sbin $(CP) $(WRKINST)/usr/sbin/* \ $(IDIR_PACEMAKER)/usr/sbin diff --git a/package/pacemaker/files/pacemaker.postinst b/package/pacemaker/files/pacemaker.postinst new file mode 100644 index 000000000..acda216de --- /dev/null +++ b/package/pacemaker/files/pacemaker.postinst @@ -0,0 +1,8 @@ +#!/bin/sh +. $IPKG_INSTROOT/etc/functions.sh + +gid=$(get_next_gid) +gid2=$(get_next_gid) +add_user hacluster $(get_next_uid) $gid /tmp +add_group hacluster $gid +add_group haclient $gid2 diff --git a/package/pacemaker/patches/patch-configure b/package/pacemaker/patches/patch-configure index 5206c0b3b..320b06924 100644 --- a/package/pacemaker/patches/patch-configure +++ b/package/pacemaker/patches/patch-configure @@ -1,5 +1,5 @@ --- pacemaker-1.0.11.orig/configure 2011-05-24 17:00:37.000000000 +0200 -+++ pacemaker-1.0.11/configure 2011-06-25 22:21:57.912681957 +0200 ++++ pacemaker-1.0.11/configure 2011-10-07 12:41:11.000000000 +0200 @@ -12484,7 +12484,7 @@ $as_echo_n "checking for $2 in $1... " > printf "#include \n" > ${Cfile}.c printf "#include <%s>\n" $1 >> ${Cfile}.c @@ -49,7 +49,7 @@ -esac - -CFLAGS="$CFLAGS -I${prefix}/include/heartbeat" -+CFLAGS="$CFLAGS -I${includedir}/heartbeat" ++CFLAGS="$CFLAGS -I@@STAGING_DIR@@/usr/include/heartbeat" diff --git a/package/pacemaker/patches/patch-configure.orig b/package/pacemaker/patches/patch-configure.orig deleted file mode 100644 index a3eca9a9d..000000000 --- a/package/pacemaker/patches/patch-configure.orig +++ /dev/null @@ -1,54 +0,0 @@ ---- pacemaker-1.0.11.orig/configure 2011-05-24 17:00:37.000000000 +0200 -+++ pacemaker-1.0.11/configure 2011-05-24 17:39:43.000000000 +0200 -@@ -12484,7 +12484,7 @@ $as_echo_n "checking for $2 in $1... " > - printf "#include \n" > ${Cfile}.c - printf "#include <%s>\n" $1 >> ${Cfile}.c - printf "int main(int argc, char **argv) { printf(\"%%s\", %s); return 0; }\n" $2 >> ${Cfile}.c -- $CC $CFLAGS ${Cfile}.c -o ${Cfile} -+ $CC_FOR_BUILD $CFLAGS_FOR_BUILD ${Cfile}.c -o ${Cfile} - value=`${Cfile}` - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $value" >&5 - $as_echo "$value" >&6; } -@@ -12713,33 +12713,11 @@ $as_echo "$as_me: WARNING: $j directory - done - - --case "$host_os" in --*bsd*) LIBS="-L/usr/local/lib" -- CPPFLAGS="$CPPFLAGS -I/usr/local/include" -- INIT_EXT=".sh" -- ;; --*solaris*) -- ;; --*linux*) -- - cat >>confdefs.h <<_ACEOF - #define ON_LINUX 1 - _ACEOF - -- CFLAGS="$CFLAGS -I${prefix}/include" -- ;; --darwin*) -- --cat >>confdefs.h <<_ACEOF --#define ON_DARWIN 1 --_ACEOF -- -- LIBS="$LIBS -L${prefix}/lib" -- CFLAGS="$CFLAGS -I${prefix}/include" -- ;; --esac -- --CFLAGS="$CFLAGS -I${prefix}/include/heartbeat" -+CFLAGS="$CFLAGS -I${includedir}/heartbeat" - - - -@@ -17159,7 +17137,6 @@ else - - # We had to eliminate -Wnested-externs because of libtool changes - EXTRA_FLAGS="-fgnu89-inline -- -fstack-protector-all - -Wall - -Waggregate-return - -Wbad-function-cast diff --git a/package/pam/Makefile b/package/pam/Makefile new file mode 100644 index 000000000..a9381495e --- /dev/null +++ b/package/pam/Makefile @@ -0,0 +1,32 @@ +# This file is part of the OpenADK project. OpenADK is copyrighted +# material, please see the LICENCE file in the top-level directory. + +include $(TOPDIR)/rules.mk + +PKG_NAME:= pam +PKG_VERSION:= 1.1.4 +PKG_RELEASE:= 1 +PKG_MD5SUM:= ff8f3c4382b78ac211e11bcd56ab17bf +PKG_DESCR:= Pluggable Authentication Modules +PKG_BUILDDEP:= flex +PKG_SECTION:= misc +PKG_SITES:= http://openadk.org/distfiles/ + +DISTFILES:= Linux-PAM-$(PKG_VERSION).tar.gz +WRKDIST= ${WRKDIR}/Linux-PAM-${PKG_VERSION} + +include $(TOPDIR)/mk/package.mk + +$(eval $(call PKG_template,PAM,pam,$(PKG_VERSION)-${PKG_RELEASE},${PKG_DEPENDS},${PKG_DESCR},${PKG_SECTION})) + +pam-install: + $(INSTALL_DIR) $(IDIR_PAM)/etc/security + $(CP) $(WRKINST)/etc/security/* $(IDIR_PAM)/etc/security + $(INSTALL_DIR) $(IDIR_PAM)/lib/security + $(CP) $(WRKINST)/lib/libpam*.so* $(IDIR_PAM)/lib + $(CP) $(WRKINST)/lib/security/*.so* $(IDIR_PAM)/lib/security + $(CP) $(WRKBUILD)/conf/pam.conf $(IDIR_PAM)/etc + # /lib is not automatically installed to staging area + $(CP) $(WRKINST)/lib/libpam*.so* $(STAGING_DIR)/usr/lib + +include ${TOPDIR}/mk/pkg-bottom.mk diff --git a/package/pam/patches/patch-Makefile_in b/package/pam/patches/patch-Makefile_in new file mode 100644 index 000000000..12574da06 --- /dev/null +++ b/package/pam/patches/patch-Makefile_in @@ -0,0 +1,24 @@ +--- Linux-PAM-1.1.4.orig/Makefile.in 2011-06-24 12:48:16.000000000 +0200 ++++ Linux-PAM-1.1.4/Makefile.in 2011-10-07 11:42:21.000000000 +0200 +@@ -75,8 +75,8 @@ RECURSIVE_CLEAN_TARGETS = mostlyclean-re + distclean-recursive maintainer-clean-recursive + ETAGS = etags + CTAGS = ctags +-DIST_SUBDIRS = libpam tests libpamc libpam_misc modules po conf doc \ +- examples xtests ++DIST_SUBDIRS = libpam tests libpamc libpam_misc modules po conf \ ++ examples + DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) + distdir = $(PACKAGE)-$(VERSION) + top_distdir = $(distdir) +@@ -258,8 +258,8 @@ top_build_prefix = @top_build_prefix@ + top_builddir = @top_builddir@ + top_srcdir = @top_srcdir@ + AUTOMAKE_OPTIONS = 1.9 gnu dist-bzip2 check-news +-@STATIC_MODULES_FALSE@SUBDIRS = libpam tests libpamc libpam_misc modules po conf doc examples xtests +-@STATIC_MODULES_TRUE@SUBDIRS = modules libpam libpamc libpam_misc tests po conf doc examples xtests ++@STATIC_MODULES_FALSE@SUBDIRS = libpam tests libpamc libpam_misc modules po conf examples ++@STATIC_MODULES_TRUE@SUBDIRS = modules libpam libpamc libpam_misc tests po conf examples + CLEANFILES = *~ + M4_FILES = m4/gettext.m4 m4/iconv.m4 m4/intlmacosx.m4 \ + m4/japhar_grep_cflags.m4 m4/jh_path_xml_catalog.m4 \ diff --git a/package/pam/patches/patch-conf_Makefile_in b/package/pam/patches/patch-conf_Makefile_in new file mode 100644 index 000000000..b4a0d7afb --- /dev/null +++ b/package/pam/patches/patch-conf_Makefile_in @@ -0,0 +1,11 @@ +--- Linux-PAM-1.1.4.orig/conf/Makefile.in 2011-06-24 12:48:02.000000000 +0200 ++++ Linux-PAM-1.1.4/conf/Makefile.in 2011-10-07 11:32:37.000000000 +0200 +@@ -236,7 +236,7 @@ target_alias = @target_alias@ + top_build_prefix = @top_build_prefix@ + top_builddir = @top_builddir@ + top_srcdir = @top_srcdir@ +-SUBDIRS = pam_conv1 ++SUBDIRS = + CLEANFILES = *~ + EXTRA_DIST = install_conf md5itall pam.conf + all: all-recursive diff --git a/package/resource-agents/Makefile b/package/resource-agents/Makefile index 6611a579c..657529326 100644 --- a/package/resource-agents/Makefile +++ b/package/resource-agents/Makefile @@ -5,24 +5,28 @@ include $(TOPDIR)/rules.mk PKG_NAME:= resource-agents PKG_VERSION:= 3.9.0 -PKG_RELEASE:= 1 +PKG_RELEASE:= 2 PKG_MD5SUM:= b5d0c178082ff186c36ab145b5bbf5df PKG_DESCR:= Resource agents for clusters PKG_SECTION:= ha +PKG_BUILDDEP:= glib cluster-glue PKG_URL:= https://github.com/ClusterLabs/resource-agents PKG_SITES:= http://openadk.org/distfiles/ PKG_CFLINE_RESOURCE_AGENTS:= depends on ADK_TARGET_LIB_GLIBC || ADK_TARGET_LIB_EGLIBC -PKG_HOST_DEPENDS:= !darwin PKG_ARCH_DEPENDS:= x86 include $(TOPDIR)/mk/package.mk $(eval $(call PKG_template,RESOURCE_AGENTS,resource-agents,$(PKG_VERSION)-${PKG_RELEASE},${PKG_DEPENDS},${PKG_DESCR},${PKG_SECTION})) +CONFIGURE_ARGS+= --with-ocf-root=/usr/lib/ocf + resource-agents-install: $(INSTALL_DIR) $(IDIR_RESOURCE_AGENTS)/usr/share/cluster $(CP) $(WRKINST)/usr/share/cluster/*.sh \ $(IDIR_RESOURCE_AGENTS)/usr/share/cluster + $(INSTALL_DIR) $(IDIR_RESOURCE_AGENTS)/usr/lib + $(CP) $(WRKINST)/usr/lib/* $(IDIR_RESOURCE_AGENTS)/usr/lib include ${TOPDIR}/mk/pkg-bottom.mk diff --git a/package/resource-agents/patches/patch-configure b/package/resource-agents/patches/patch-configure new file mode 100644 index 000000000..62d1beba5 --- /dev/null +++ b/package/resource-agents/patches/patch-configure @@ -0,0 +1,11 @@ +--- resource-agents-3.9.0.orig/configure 2011-05-24 15:37:07.000000000 +0200 ++++ resource-agents-3.9.0/configure 2011-10-07 20:11:45.000000000 +0200 +@@ -6171,7 +6171,7 @@ $as_echo_n "checking for $2 in $1... " > + printf "#include \n" > ${Cfile}.c + printf "#include <%s>\n" $1 >> ${Cfile}.c + printf "int main(int argc, char **argv) { printf(\"%%s\", %s); return 0; }\n" $2 >> ${Cfile}.c +- $CC $CFLAGS ${Cfile}.c -o ${Cfile} ++ $CC_FOR_BUILD $CFLAGS_FOR_BUILD ${Cfile}.c -o ${Cfile} + value=`${Cfile}` + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $value" >&5 + $as_echo "$value" >&6; } -- cgit v1.2.3 From 7f6d8cbed9b750aaae116f1d444d1f77469957e2 Mon Sep 17 00:00:00 2001 From: Waldemar Brodkorb Date: Sat, 8 Oct 2011 22:15:26 +0200 Subject: fix cross-compile on Darwin --- package/gnutls/Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/package/gnutls/Makefile b/package/gnutls/Makefile index c4c5e0cb1..40e93d15e 100644 --- a/package/gnutls/Makefile +++ b/package/gnutls/Makefile @@ -40,6 +40,7 @@ CONFIGURE_ARGS+= --without-libopencdk-prefix \ --without-libz-prefix \ --without-p11-kit \ --without-nettle-prefix +CONFIGURE_ENV+= ac_cv_prog_AR='$(TARGET_CROSS)ar' post-install: ${INSTALL_DIR} ${IDIR_LIBGNUTLS}/usr/lib -- cgit v1.2.3 From bbdeb66b02904db8f592b5de5164898054dc8a92 Mon Sep 17 00:00:00 2001 From: Waldemar Brodkorb Date: Mon, 10 Oct 2011 13:08:03 +0200 Subject: update to latest upstream. my statvfs changes seems to be not compatible to normal behaviour --- package/xfsprogs/Makefile | 10 +- package/xfsprogs/patches/patch-configure | 380 +-------------------- package/xfsprogs/patches/patch-include_buildmacros | 11 + package/xfsprogs/patches/patch-libxfs_linux_c | 35 -- package/xfsprogs/patches/patch-ltmain_sh | 11 - 5 files changed, 33 insertions(+), 414 deletions(-) create mode 100644 package/xfsprogs/patches/patch-include_buildmacros delete mode 100644 package/xfsprogs/patches/patch-libxfs_linux_c delete mode 100644 package/xfsprogs/patches/patch-ltmain_sh diff --git a/package/xfsprogs/Makefile b/package/xfsprogs/Makefile index f22b8aec2..5ebc65d32 100644 --- a/package/xfsprogs/Makefile +++ b/package/xfsprogs/Makefile @@ -4,13 +4,13 @@ include ${TOPDIR}/rules.mk PKG_NAME:= xfsprogs -PKG_VERSION:= 3.1.4 +PKG_VERSION:= 3.1.5 PKG_RELEASE:= 1 -PKG_MD5SUM:= 74081975f148bcabcab26c4c3496ede9 +PKG_MD5SUM:= b1db37749e2b4149a0dd178abff956be PKG_DESCR:= Utilities for XFS filesystems PKG_SECTION:= fs -PKG_DEPENDS:= libuuid libpthread PKG_BUILDDEP:= e2fsprogs +PKG_DEPENDS:= libuuid libpthread PKG_URL:= http://oss.sgi.com/projects/xfs PKG_SITES:= ftp://oss.sgi.com/projects/xfs/cmd_tars/ \ ftp://oss.sgi.com/projects/xfs/previous/cmd_tars/ @@ -28,7 +28,7 @@ ALL_TARGET= do-install: ${INSTALL_DIR} ${IDIR_XFSPROGS}/usr/sbin - ${INSTALL_BIN} ${WRKBUILD}/mkfs/mkfs.xfs ${IDIR_XFSPROGS}/usr/sbin/ - ${INSTALL_BIN} ${WRKBUILD}/repair/xfs_repair ${IDIR_XFSPROGS}/usr/sbin/ + ${INSTALL_BIN} ${WRKBUILD}/mkfs/mkfs.xfs ${IDIR_XFSPROGS}/usr/sbin + ${INSTALL_BIN} ${WRKBUILD}/repair/xfs_repair ${IDIR_XFSPROGS}/usr/sbin include ${TOPDIR}/mk/pkg-bottom.mk diff --git a/package/xfsprogs/patches/patch-configure b/package/xfsprogs/patches/patch-configure index ea30c4cdb..0eaf41253 100644 --- a/package/xfsprogs/patches/patch-configure +++ b/package/xfsprogs/patches/patch-configure @@ -1,366 +1,20 @@ ---- xfsprogs-3.1.4.orig/configure 2010-11-11 18:38:31.000000000 +0100 -+++ xfsprogs-3.1.4/configure 2011-02-14 20:26:03.000000000 +0100 -@@ -11214,11 +11214,11 @@ ac_compiler_gnu=$ac_cv_c_compiler_gnu - test -z "$INSTALL_GROUP" || pkg_group="$INSTALL_GROUP" +--- xfsprogs-3.1.5.orig/configure 2011-03-31 05:11:43.000000000 +0200 ++++ xfsprogs-3.1.5/configure 2011-10-09 18:15:57.858626068 +0200 +@@ -245,7 +245,7 @@ $0: the script under such a shell if you + fi + fi + fi +-SHELL=${CONFIG_SHELL-/bin/sh} ++SHELL=${CONFIG_SHELL-/bin/bash} + export SHELL + # Unset more variables known to interfere with behavior of common tools. + CLICOLOR_FORCE= GREP_OPTIONS= +@@ -6414,7 +6414,7 @@ fi + LIBTOOL_DEPS="$ltmain" + + # Always use our own libtool. +-LIBTOOL='$(SHELL) $(top_builddir)/libtool' ++LIBTOOL='$(CONFIG_SHELL) $(top_builddir)/libtool' -- pkg_distribution=`uname -s` -+ pkg_distribution=Linux - test -z "$DISTRIBUTION" || pkg_distribution="$DISTRIBUTION" - -- pkg_platform=`uname -s | tr 'A-Z' 'a-z' | tr -d / | sed -e 's/irix64/irix/'` -+ pkg_platform=linux - test -z "$PLATFORM" || pkg_platform="$PLATFORM" - - -@@ -12604,349 +12604,6 @@ fi - - - --for ac_header in aio.h --do --as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` --if { as_var=$as_ac_Header; eval "test \"\${$as_var+set}\" = set"; }; then -- { $as_echo "$as_me:$LINENO: checking for $ac_header" >&5 --$as_echo_n "checking for $ac_header... " >&6; } --if { as_var=$as_ac_Header; eval "test \"\${$as_var+set}\" = set"; }; then -- $as_echo_n "(cached) " >&6 --fi --ac_res=`eval 'as_val=${'$as_ac_Header'} -- $as_echo "$as_val"'` -- { $as_echo "$as_me:$LINENO: result: $ac_res" >&5 --$as_echo "$ac_res" >&6; } --else -- # Is the header compilable? --{ $as_echo "$as_me:$LINENO: checking $ac_header usability" >&5 --$as_echo_n "checking $ac_header usability... " >&6; } --cat >conftest.$ac_ext <<_ACEOF --/* confdefs.h. */ --_ACEOF --cat confdefs.h >>conftest.$ac_ext --cat >>conftest.$ac_ext <<_ACEOF --/* end confdefs.h. */ --$ac_includes_default --#include <$ac_header> --_ACEOF --rm -f conftest.$ac_objext --if { (ac_try="$ac_compile" --case "(($ac_try" in -- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -- *) ac_try_echo=$ac_try;; --esac --eval ac_try_echo="\"\$as_me:$LINENO: $ac_try_echo\"" --$as_echo "$ac_try_echo") >&5 -- (eval "$ac_compile") 2>conftest.er1 -- ac_status=$? -- grep -v '^ *+' conftest.er1 >conftest.err -- rm -f conftest.er1 -- cat conftest.err >&5 -- $as_echo "$as_me:$LINENO: \$? = $ac_status" >&5 -- (exit $ac_status); } && { -- test -z "$ac_c_werror_flag" || -- test ! -s conftest.err -- } && test -s conftest.$ac_objext; then -- ac_header_compiler=yes --else -- $as_echo "$as_me: failed program was:" >&5 --sed 's/^/| /' conftest.$ac_ext >&5 -- -- ac_header_compiler=no --fi -- --rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext --{ $as_echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 --$as_echo "$ac_header_compiler" >&6; } -- --# Is the header present? --{ $as_echo "$as_me:$LINENO: checking $ac_header presence" >&5 --$as_echo_n "checking $ac_header presence... " >&6; } --cat >conftest.$ac_ext <<_ACEOF --/* confdefs.h. */ --_ACEOF --cat confdefs.h >>conftest.$ac_ext --cat >>conftest.$ac_ext <<_ACEOF --/* end confdefs.h. */ --#include <$ac_header> --_ACEOF --if { (ac_try="$ac_cpp conftest.$ac_ext" --case "(($ac_try" in -- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -- *) ac_try_echo=$ac_try;; --esac --eval ac_try_echo="\"\$as_me:$LINENO: $ac_try_echo\"" --$as_echo "$ac_try_echo") >&5 -- (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 -- ac_status=$? -- grep -v '^ *+' conftest.er1 >conftest.err -- rm -f conftest.er1 -- cat conftest.err >&5 -- $as_echo "$as_me:$LINENO: \$? = $ac_status" >&5 -- (exit $ac_status); } >/dev/null && { -- test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || -- test ! -s conftest.err -- }; then -- ac_header_preproc=yes --else -- $as_echo "$as_me: failed program was:" >&5 --sed 's/^/| /' conftest.$ac_ext >&5 -- -- ac_header_preproc=no --fi -- --rm -f conftest.err conftest.$ac_ext --{ $as_echo "$as_me:$LINENO: result: $ac_header_preproc" >&5 --$as_echo "$ac_header_preproc" >&6; } -- --# So? What about this header? --case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -- yes:no: ) -- { $as_echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 --$as_echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { $as_echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 --$as_echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -- ac_header_preproc=yes -- ;; -- no:yes:* ) -- { $as_echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 --$as_echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { $as_echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --$as_echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -- { $as_echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 --$as_echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -- { $as_echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 --$as_echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} -- { $as_echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --$as_echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- { $as_echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 --$as_echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} -- -- ;; --esac --{ $as_echo "$as_me:$LINENO: checking for $ac_header" >&5 --$as_echo_n "checking for $ac_header... " >&6; } --if { as_var=$as_ac_Header; eval "test \"\${$as_var+set}\" = set"; }; then -- $as_echo_n "(cached) " >&6 --else -- eval "$as_ac_Header=\$ac_header_preproc" --fi --ac_res=`eval 'as_val=${'$as_ac_Header'} -- $as_echo "$as_val"'` -- { $as_echo "$as_me:$LINENO: result: $ac_res" >&5 --$as_echo "$ac_res" >&6; } -- --fi --as_val=`eval 'as_val=${'$as_ac_Header'} -- $as_echo "$as_val"'` -- if test "x$as_val" = x""yes; then -- cat >>confdefs.h <<_ACEOF --#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 --_ACEOF -- --fi -- --done -- -- if test $ac_cv_header_aio_h = no; then -- echo -- echo 'FATAL ERROR: could not find a valid header.' -- exit 1 -- fi -- -- --for ac_func in lio_listio --do --as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` --{ $as_echo "$as_me:$LINENO: checking for $ac_func" >&5 --$as_echo_n "checking for $ac_func... " >&6; } --if { as_var=$as_ac_var; eval "test \"\${$as_var+set}\" = set"; }; then -- $as_echo_n "(cached) " >&6 --else -- cat >conftest.$ac_ext <<_ACEOF --/* confdefs.h. */ --_ACEOF --cat confdefs.h >>conftest.$ac_ext --cat >>conftest.$ac_ext <<_ACEOF --/* end confdefs.h. */ --/* Define $ac_func to an innocuous variant, in case declares $ac_func. -- For example, HP-UX 11i declares gettimeofday. */ --#define $ac_func innocuous_$ac_func -- --/* System header to define __stub macros and hopefully few prototypes, -- which can conflict with char $ac_func (); below. -- Prefer to if __STDC__ is defined, since -- exists even on freestanding compilers. */ -- --#ifdef __STDC__ --# include --#else --# include --#endif -- --#undef $ac_func -- --/* Override any GCC internal prototype to avoid an error. -- Use char because int might match the return type of a GCC -- builtin and then its argument prototype would still apply. */ --#ifdef __cplusplus --extern "C" --#endif --char $ac_func (); --/* The GNU C library defines this for functions which it implements -- to always fail with ENOSYS. Some functions are actually named -- something starting with __ and the normal name is an alias. */ --#if defined __stub_$ac_func || defined __stub___$ac_func --choke me --#endif -- --int --main () --{ --return $ac_func (); -- ; -- return 0; --} --_ACEOF --rm -f conftest.$ac_objext conftest$ac_exeext --if { (ac_try="$ac_link" --case "(($ac_try" in -- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -- *) ac_try_echo=$ac_try;; --esac --eval ac_try_echo="\"\$as_me:$LINENO: $ac_try_echo\"" --$as_echo "$ac_try_echo") >&5 -- (eval "$ac_link") 2>conftest.er1 -- ac_status=$? -- grep -v '^ *+' conftest.er1 >conftest.err -- rm -f conftest.er1 -- cat conftest.err >&5 -- $as_echo "$as_me:$LINENO: \$? = $ac_status" >&5 -- (exit $ac_status); } && { -- test -z "$ac_c_werror_flag" || -- test ! -s conftest.err -- } && test -s conftest$ac_exeext && { -- test "$cross_compiling" = yes || -- $as_test_x conftest$ac_exeext -- }; then -- eval "$as_ac_var=yes" --else -- $as_echo "$as_me: failed program was:" >&5 --sed 's/^/| /' conftest.$ac_ext >&5 -- -- eval "$as_ac_var=no" --fi -- --rm -rf conftest.dSYM --rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ -- conftest$ac_exeext conftest.$ac_ext --fi --ac_res=`eval 'as_val=${'$as_ac_var'} -- $as_echo "$as_val"'` -- { $as_echo "$as_me:$LINENO: result: $ac_res" >&5 --$as_echo "$ac_res" >&6; } --as_val=`eval 'as_val=${'$as_ac_var'} -- $as_echo "$as_val"'` -- if test "x$as_val" = x""yes; then -- cat >>confdefs.h <<_ACEOF --#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 --_ACEOF -- --fi --done -- -- if test $ac_cv_func_lio_listio = yes; then -- librt="" -- else -- --{ $as_echo "$as_me:$LINENO: checking for lio_listio in -lrt" >&5 --$as_echo_n "checking for lio_listio in -lrt... " >&6; } --if test "${ac_cv_lib_rt_lio_listio+set}" = set; then -- $as_echo_n "(cached) " >&6 --else -- ac_check_lib_save_LIBS=$LIBS --LIBS="-lrt -lpthread $LIBS" --cat >conftest.$ac_ext <<_ACEOF --/* confdefs.h. */ --_ACEOF --cat confdefs.h >>conftest.$ac_ext --cat >>conftest.$ac_ext <<_ACEOF --/* end confdefs.h. */ -- --/* Override any GCC internal prototype to avoid an error. -- Use char because int might match the return type of a GCC -- builtin and then its argument prototype would still apply. */ --#ifdef __cplusplus --extern "C" --#endif --char lio_listio (); --int --main () --{ --return lio_listio (); -- ; -- return 0; --} --_ACEOF --rm -f conftest.$ac_objext conftest$ac_exeext --if { (ac_try="$ac_link" --case "(($ac_try" in -- *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -- *) ac_try_echo=$ac_try;; --esac --eval ac_try_echo="\"\$as_me:$LINENO: $ac_try_echo\"" --$as_echo "$ac_try_echo") >&5 -- (eval "$ac_link") 2>conftest.er1 -- ac_status=$? -- grep -v '^ *+' conftest.er1 >conftest.err -- rm -f conftest.er1 -- cat conftest.err >&5 -- $as_echo "$as_me:$LINENO: \$? = $ac_status" >&5 -- (exit $ac_status); } && { -- test -z "$ac_c_werror_flag" || -- test ! -s conftest.err -- } && test -s conftest$ac_exeext && { -- test "$cross_compiling" = yes || -- $as_test_x conftest$ac_exeext -- }; then -- ac_cv_lib_rt_lio_listio=yes --else -- $as_echo "$as_me: failed program was:" >&5 --sed 's/^/| /' conftest.$ac_ext >&5 -- -- ac_cv_lib_rt_lio_listio=no --fi -- --rm -rf conftest.dSYM --rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ -- conftest$ac_exeext conftest.$ac_ext --LIBS=$ac_check_lib_save_LIBS --fi --{ $as_echo "$as_me:$LINENO: result: $ac_cv_lib_rt_lio_listio" >&5 --$as_echo "$ac_cv_lib_rt_lio_listio" >&6; } --if test "x$ac_cv_lib_rt_lio_listio" = x""yes; then -- cat >>confdefs.h <<_ACEOF --#define HAVE_LIBRT 1 --_ACEOF -- -- LIBS="-lrt $LIBS" -- --else -- -- echo -- echo 'FATAL ERROR: could not find a library with lio_listio.' -- exit 1 --fi -- -- librt="-lrt" -- fi -- -- -- -- -- -- - for ac_header in uuid.h sys/uuid.h uuid/uuid.h - do - as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` diff --git a/package/xfsprogs/patches/patch-include_buildmacros b/package/xfsprogs/patches/patch-include_buildmacros new file mode 100644 index 000000000..e6b9ae21f --- /dev/null +++ b/package/xfsprogs/patches/patch-include_buildmacros @@ -0,0 +1,11 @@ +--- xfsprogs-3.1.5.orig/include/buildmacros 2011-03-31 00:25:47.000000000 +0200 ++++ xfsprogs-3.1.5/include/buildmacros 2011-10-09 18:12:27.668626203 +0200 +@@ -32,7 +32,7 @@ OBJECTS = $(ASFILES:.s=.o) \ + + INSTALL = $(TOPDIR)/install-sh -o $(PKG_USER) -g $(PKG_GROUP) + +-SHELL = /bin/sh ++SHELL ?= /bin/bash + IMAGES_DIR = $(TOPDIR)/all-images + DIST_DIR = $(TOPDIR)/dist + diff --git a/package/xfsprogs/patches/patch-libxfs_linux_c b/package/xfsprogs/patches/patch-libxfs_linux_c deleted file mode 100644 index 586dab521..000000000 --- a/package/xfsprogs/patches/patch-libxfs_linux_c +++ /dev/null @@ -1,35 +0,0 @@ ---- xfsprogs-3.1.4.orig/libxfs/linux.c 2010-01-29 20:46:13.000000000 +0100 -+++ xfsprogs-3.1.4/libxfs/linux.c 2011-01-22 20:27:29.458658270 +0100 -@@ -16,12 +16,10 @@ - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - */ - --#define ustat __kernel_ustat - #include - #include - #include --#undef ustat --#include -+#include - #include - #include - #include -@@ -49,8 +47,7 @@ static int max_block_alignment; - int - platform_check_ismounted(char *name, char *block, struct stat64 *s, int verbose) - { -- /* Pad ust; pre-2.6.28 linux copies out too much in 32bit compat mode */ -- struct ustat ust[2]; -+ struct statvfs info; - struct stat64 st; - - if (!s) { -@@ -61,7 +58,7 @@ platform_check_ismounted(char *name, cha - s = &st; - } - -- if (ustat(s->st_rdev, ust) >= 0) { -+ if (statvfs(name, &info) >= 0) { - if (verbose) - fprintf(stderr, - _("%s: %s contains a mounted filesystem\n"), diff --git a/package/xfsprogs/patches/patch-ltmain_sh b/package/xfsprogs/patches/patch-ltmain_sh deleted file mode 100644 index 6c5c11607..000000000 --- a/package/xfsprogs/patches/patch-ltmain_sh +++ /dev/null @@ -1,11 +0,0 @@ ---- xfsprogs-3.1.4.orig/ltmain.sh 2010-11-11 18:38:26.000000000 +0100 -+++ xfsprogs-3.1.4/ltmain.sh 2011-04-01 20:20:00.951819025 +0200 -@@ -4765,7 +4765,7 @@ func_mode_link () - # -p, -pg, --coverage, -fprofile-* pass through profiling flag for GCC - # @file GCC response files - -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \ -- -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*) -+ -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-fstack-protector*|-flto*) - func_quote_for_eval "$arg" - arg="$func_quote_for_eval_result" - func_append compile_command " $arg" -- cgit v1.2.3 From 7d7d0ad0d61e22e7fd5c938f2aaf90b00416e6b7 Mon Sep 17 00:00:00 2001 From: Waldemar Brodkorb Date: Mon, 10 Oct 2011 13:08:33 +0200 Subject: misc finetuning --- package/font-util/Makefile | 6 +++--- package/section.lst | 2 +- package/sox/Makefile | 2 ++ target/linux/config/Config.in.fs | 5 +++-- target/packages/pkg-available/development | 11 ++++++++++- target/packages/pkg-available/laptop | 10 +++++++++- 6 files changed, 28 insertions(+), 8 deletions(-) diff --git a/package/font-util/Makefile b/package/font-util/Makefile index af4f4d5e0..150a5561c 100644 --- a/package/font-util/Makefile +++ b/package/font-util/Makefile @@ -26,12 +26,12 @@ $(eval $(call PKG_template,FONT_UTIL_DEV,font-util-dev,${PKG_VERSION}-${PKG_RELE # especially as all font packages still need patching to avoid # calling pkg-config (which we don't provide) in order to # automatically find the mappings. -fonts-hack: +post-build: ${INSTALL_DIR} ${STAGING_DIR}/usr/share/fonts/X11/util - ${CP} ${WRKINST}/usr/share/fonts/X11/util/* \ + ${CP} ${WRKBUILD}/map-* \ ${STAGING_DIR}/usr/share/fonts/X11/util -font-util-install: fonts-hack +font-util-install: ${INSTALL_DIR} ${IDIR_FONT_UTIL}/usr/bin ${INSTALL_BIN} ${WRKINST}/usr/bin/ucs2any \ ${IDIR_FONT_UTIL}/usr/bin diff --git a/package/section.lst b/package/section.lst index d1fa5ebf3..b29d9dcb5 100644 --- a/package/section.lst +++ b/package/section.lst @@ -45,6 +45,6 @@ x11/apps X applications x11/drivers X drivers x11/server X server x11/libs X libraries -x11/misc X Misc +x11/misc X misc x11/fonts X fonts x11/devel X headers diff --git a/package/sox/Makefile b/package/sox/Makefile index 4f633b2a6..2f5436146 100644 --- a/package/sox/Makefile +++ b/package/sox/Makefile @@ -18,6 +18,8 @@ include $(TOPDIR)/mk/package.mk $(eval $(call PKG_template,SOX,sox,$(PKG_VERSION)-${PKG_RELEASE},${PKG_DEPENDS},${PKG_DESCR},${PKG_SECTION})) +CONFIGURE_ARGS+= --without-ffmpeg + sox-install: $(INSTALL_DIR) $(IDIR_SOX)/usr/lib ${CP} ${WRKINST}/usr/lib/libsox.so* $(IDIR_SOX)/usr/lib diff --git a/target/linux/config/Config.in.fs b/target/linux/config/Config.in.fs index 60b331755..88bf02f62 100644 --- a/target/linux/config/Config.in.fs +++ b/target/linux/config/Config.in.fs @@ -45,7 +45,7 @@ config ADK_KERNEL_FAT_DEFAULT_IOCHARSET default "iso8859-1" config ADK_KERNEL_SQUASHFS - prompt ".................................. SquashFS filesystem" + prompt "squashfs.......................... SquashFS filesystem" boolean select ADK_KERNEL_MISC_FILESYSTEMS default n @@ -181,12 +181,13 @@ config ADK_KPACKAGE_KMOD_VFAT_FS config ADK_KERNEL_XFS_FS + prompt "xfs............................... XFS filesystem support (kernel)" boolean select ADK_KERNEL_EXPORTFS default n config ADK_KPACKAGE_KMOD_XFS_FS - prompt "kmod-fs-xfs....................... XFS filesystem support" + prompt "kmod-fs-xfs....................... XFS filesystem support (module)" tristate select ADK_KERNEL_EXPORTFS depends on !ADK_KERNEL_XFS_FS diff --git a/target/packages/pkg-available/development b/target/packages/pkg-available/development index e058676db..449e1a5b5 100644 --- a/target/packages/pkg-available/development +++ b/target/packages/pkg-available/development @@ -10,6 +10,7 @@ config ADK_PKG_DEVELOPMENT select ADK_PACKAGE_GCC select ADK_PACKAGE_GIT select ADK_PACKAGE_GREP + select ADK_PACKAGE_TAR select ADK_PACKAGE_UCLIBC_DEV if ADK_TARGET_LIB_UCLIBC select ADK_PACKAGE_EGLIBC_DEV if ADK_TARGET_LIB_EGLIBC select ADK_PACKAGE_GLIBC_DEV if ADK_TARGET_LIB_GLIBC @@ -30,9 +31,17 @@ config ADK_PKG_DEVELOPMENT select ADK_PACKAGE_ZLIB select ADK_PACKAGE_ZLIB_DEV select ADK_PACKAGE_XZ + # optional + select ADK_PACKAGE_STRACE + select ADK_PACKAGE_GDB + select ADK_PACKAGE_FLEX + select ADK_PACKAGE_BISON + select ADK_PACKAGE_AUTOCONF + select ADK_PACKAGE_AUTOMAKE + select ADK_PACKAGE_LIBTOOL help After bootstrapping a Linux system you might want to switch to native builds with your target. - If you choose this option, all necessary software needed + If you choose this option, all software required for native building will be selected by this option. diff --git a/target/packages/pkg-available/laptop b/target/packages/pkg-available/laptop index 9663690fe..79e701127 100644 --- a/target/packages/pkg-available/laptop +++ b/target/packages/pkg-available/laptop @@ -1,5 +1,5 @@ config ADK_PKG_LAPTOP - bool "Choose packages for a laptop system with X" + bool "Choose packages for my laptop system with X" default n select ADK_PACKAGE_SCREEN select ADK_PACKAGE_VIM @@ -16,10 +16,18 @@ config ADK_PKG_LAPTOP select ADK_PACKAGE_XORG_SERVER_WITH_DRI select ADK_PACKAGE_XF86_INPUT_KEYBOARD select ADK_PACKAGE_XF86_INPUT_MOUSE + select ADK_PACKAGE_XF86_VIDEO_SILICONMOTION if ADK_TARGET_SYSTEM_LEMOTE_YEELONG + select ADK_PACKAGE_FONT_MISC_MISC select ADK_PACKAGE_SETXKBMAP select ADK_PACKAGE_XBINDKEYS select ADK_PACKAGE_DISPLAY + select ADK_PACKAGE_FEH select ADK_PACKAGE_XINIT select ADK_PACKAGE_EVILWM select ADK_PACKAGE_FIREFOX + select ADK_PACKAGE_DILLO select ADK_PACKAGE_MPLAYER + select ADK_PACKAGE_MPC + select ADK_PACKAGE_WPA_SUPPLICANT + select ADK_PACKAGE_WPA_SUPPLICANT_WITH_OPENSSL + select ADK_KPACKAGE_KMOD_USB_STORAGE -- cgit v1.2.3 From 8dc9983aee254647b438609a6f915efc10d1fd83 Mon Sep 17 00:00:00 2001 From: Waldemar Brodkorb Date: Mon, 10 Oct 2011 20:49:22 +0200 Subject: add fix for CPU hangs. --- target/config/Config.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/target/config/Config.in b/target/config/Config.in index ab651028f..36cad132f 100644 --- a/target/config/Config.in +++ b/target/config/Config.in @@ -371,7 +371,7 @@ config ADK_TARGET_CFLAGS default "-march=amdfam10" if ADK_CPU_AMDFAM10 default "-mcpu=v8" if ADK_CPU_SPARC_V8 default "-m64 -mcpu=v9" if ADK_CPU_SPARC_V9 - default "-march=loongson2f" if ADK_CPU_LOONGSON2F + default "-march=loongson2f -Wa,-mfix-loongson2f-nop" if ADK_CPU_LOONGSON2F default "-march=mips32" if ADK_CPU_MIPS32 default "-march=mips64" if ADK_CPU_MIPS64 default "-march=armv5te -mtune=arm926ej-s" if ADK_CPU_ARMV5 -- cgit v1.2.3 From 559e46aaeab8b018a01e218aac5441424cefe266 Mon Sep 17 00:00:00 2001 From: Waldemar Brodkorb Date: Mon, 10 Oct 2011 20:49:48 +0200 Subject: add suid bits after update --- package/base-files/src/sbin/adkupdate | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/package/base-files/src/sbin/adkupdate b/package/base-files/src/sbin/adkupdate index 619faf04f..b5477412f 100755 --- a/package/base-files/src/sbin/adkupdate +++ b/package/base-files/src/sbin/adkupdate @@ -89,6 +89,14 @@ case $1 in ;; esac +# fix permissions +if [ -f /usr/bin/sudo ];then + chmod 4755 /usr/bin/sudo +fi +if [ -f /usr/bin/Xorg ];then + chmod 4755 /usr/bin/Xorg +fi + sync if [ -x /sbin/cfgfs ];then umount /etc -- cgit v1.2.3 From 0ed5cf08edbe2c35e0e68b7895aaca6569eb20b1 Mon Sep 17 00:00:00 2001 From: Waldemar Brodkorb Date: Mon, 10 Oct 2011 21:20:25 +0200 Subject: add libltdl dependency --- package/ImageMagick/Makefile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/package/ImageMagick/Makefile b/package/ImageMagick/Makefile index 46abb9280..2f50ddf20 100644 --- a/package/ImageMagick/Makefile +++ b/package/ImageMagick/Makefile @@ -24,7 +24,7 @@ PKGSC_CONVERT:= multimedia PKGSS_CONVERT:= libimagemagick PKGSD_DISPLAY:= image display application PKGSC_DISPLAY:= multimedia -PKGSS_DISPLAY:= libimagemagick libX11 libXau libXdmcp libICE libSM fontconfig libXt zlib libpng +PKGSS_DISPLAY:= libimagemagick libX11 libXau libXdmcp libICE libSM fontconfig libXt zlib libpng libltdl PKGSB_DISPLAY:= libX11 DISTFILES:= ${PKG_NAME}-${PKG_VERSION}-${PKG_EXTRAVER}.tar.gz @@ -36,7 +36,6 @@ $(eval $(call PKG_template,LIBIMAGEMAGICK,libimagemagick,$(PKG_VERSION)-${PKG_RE $(eval $(call PKG_template,CONVERT,convert,$(PKG_VERSION)-${PKG_RELEASE},${PKGSS_CONVERT},${PKGSD_CONVERT},${PKGSC_CONVERT})) $(eval $(call PKG_template,DISPLAY,display,$(PKG_VERSION)-${PKG_RELEASE},${PKGSS_DISPLAY},${PKGSD_DISPLAY},${PKGSC_DISPLAY})) -TARGET_LDFLAGS+= -L$(STAGING_TARGET_DIR)/lib -L$(STAGING_TARGET_DIR)/usr/lib TARGET_CFLAGS:= $(filter-out -flto,$(TARGET_CFLAGS)) CONFIGURE_ENV+= ac_cv_sys_file_offset_bits=yes CONFIGURE_ARGS+= --with-magick-plus-plus=no \ -- cgit v1.2.3 From ebbb7f7e7edd29ddcfede17b8bc4300ff481c10e Mon Sep 17 00:00:00 2001 From: Waldemar Brodkorb Date: Mon, 10 Oct 2011 21:20:55 +0200 Subject: xrdb is useful for Xresource management --- target/packages/pkg-available/laptop | 1 + 1 file changed, 1 insertion(+) diff --git a/target/packages/pkg-available/laptop b/target/packages/pkg-available/laptop index 79e701127..0193703de 100644 --- a/target/packages/pkg-available/laptop +++ b/target/packages/pkg-available/laptop @@ -24,6 +24,7 @@ config ADK_PKG_LAPTOP select ADK_PACKAGE_FEH select ADK_PACKAGE_XINIT select ADK_PACKAGE_EVILWM + select ADK_PACKAGE_XRDB select ADK_PACKAGE_FIREFOX select ADK_PACKAGE_DILLO select ADK_PACKAGE_MPLAYER -- cgit v1.2.3 From 123ac5fa80c94080c531fb4bf69d7eff6db1f4d0 Mon Sep 17 00:00:00 2001 From: Waldemar Brodkorb Date: Mon, 10 Oct 2011 21:43:59 +0200 Subject: add fix from python upstream to allow host build on ubuntu --- package/python2/Makefile | 3 + package/python2/files/setup.py | 2076 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 2079 insertions(+) create mode 100644 package/python2/files/setup.py diff --git a/package/python2/Makefile b/package/python2/Makefile index 6ab804935..53e770f2d 100644 --- a/package/python2/Makefile +++ b/package/python2/Makefile @@ -42,6 +42,8 @@ CONFIGURE_ARGS:= --with-threads \ --without-cxx-main post-extract: + $(CP) ${WRKBUILD}/setup.py ${WRKBUILD}/setup.py.sav + $(CP) ./files/setup.py ${WRKBUILD}/setup.py $(CP) ./files/posixmodule.c ${WRKBUILD}/Modules/posixmodule.c $(CP) ./files/python-config.in ${WRKBUILD}/Misc/python-config.in (cd ${WRKBUILD}; rm -rf config.{cache,status} ; \ @@ -56,6 +58,7 @@ post-extract: ${CP} ${WRKBUILD}/Parser/pgen ${WRKBUILD}/Parser/hostpgen ${CP} ${WRKBUILD}/python ${WRKBUILD}/hostpython $(MAKE) -C ${WRKBUILD} distclean + $(CP) ${WRKBUILD}/setup.py.sav ${WRKBUILD}/setup.py pre-configure: $(SED) "s#@@CPU_ARCH@@#$(CPU_ARCH)#" ${WRKBUILD}/configure diff --git a/package/python2/files/setup.py b/package/python2/files/setup.py new file mode 100644 index 000000000..baf694e16 --- /dev/null +++ b/package/python2/files/setup.py @@ -0,0 +1,2076 @@ +# Autodetecting setup.py script for building the Python extensions +# + +__version__ = "$Revision: 86041 $" + +import sys, os, imp, re, optparse +from glob import glob +from platform import machine as platform_machine +import sysconfig + +from distutils import log +from distutils import text_file +from distutils.errors import * +from distutils.core import Extension, setup +from distutils.command.build_ext import build_ext +from distutils.command.install import install +from distutils.command.install_lib import install_lib +from distutils.spawn import find_executable + +# Were we compiled --with-pydebug or with #define Py_DEBUG? +COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') + +# This global variable is used to hold the list of modules to be disabled. +disabled_module_list = [] + +def add_dir_to_list(dirlist, dir): + """Add the directory 'dir' to the list 'dirlist' (at the front) if + 1) 'dir' is not already in 'dirlist' + 2) 'dir' actually exists, and is a directory.""" + if dir is not None and os.path.isdir(dir) and dir not in dirlist: + dirlist.insert(0, dir) + +def macosx_sdk_root(): + """ + Return the directory of the current OSX SDK, + or '/' if no SDK was specified. + """ + cflags = sysconfig.get_config_var('CFLAGS') + m = re.search(r'-isysroot\s+(\S+)', cflags) + if m is None: + sysroot = '/' + else: + sysroot = m.group(1) + return sysroot + +def is_macosx_sdk_path(path): + """ + Returns True if 'path' can be located in an OSX SDK + """ + return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/') + +def find_file(filename, std_dirs, paths): + """Searches for the directory where a given file is located, + and returns a possibly-empty list of additional directories, or None + if the file couldn't be found at all. + + 'filename' is the name of a file, such as readline.h or libcrypto.a. + 'std_dirs' is the list of standard system directories; if the + file is found in one of them, no additional directives are needed. + 'paths' is a list of additional locations to check; if the file is + found in one of them, the resulting list will contain the directory. + """ + if sys.platform == 'darwin': + # Honor the MacOSX SDK setting when one was specified. + # An SDK is a directory with the same structure as a real + # system, but with only header files and libraries. + sysroot = macosx_sdk_root() + + # Check the standard locations + for dir in std_dirs: + f = os.path.join(dir, filename) + + if sys.platform == 'darwin' and is_macosx_sdk_path(dir): + f = os.path.join(sysroot, dir[1:], filename) + + if os.path.exists(f): return [] + + # Check the additional directories + for dir in paths: + f = os.path.join(dir, filename) + + if sys.platform == 'darwin' and is_macosx_sdk_path(dir): + f = os.path.join(sysroot, dir[1:], filename) + + if os.path.exists(f): + return [dir] + + # Not found anywhere + return None + +def find_library_file(compiler, libname, std_dirs, paths): + result = compiler.find_library_file(std_dirs + paths, libname) + if result is None: + return None + + if sys.platform == 'darwin': + sysroot = macosx_sdk_root() + + # Check whether the found file is in one of the standard directories + dirname = os.path.dirname(result) + for p in std_dirs: + # Ensure path doesn't end with path separator + p = p.rstrip(os.sep) + + if sys.platform == 'darwin' and is_macosx_sdk_path(p): + if os.path.join(sysroot, p[1:]) == dirname: + return [ ] + + if p == dirname: + return [ ] + + # Otherwise, it must have been in one of the additional directories, + # so we have to figure out which one. + for p in paths: + # Ensure path doesn't end with path separator + p = p.rstrip(os.sep) + + if sys.platform == 'darwin' and is_macosx_sdk_path(p): + if os.path.join(sysroot, p[1:]) == dirname: + return [ p ] + + if p == dirname: + return [p] + else: + assert False, "Internal error: Path not found in std_dirs or paths" + +def module_enabled(extlist, modname): + """Returns whether the module 'modname' is present in the list + of extensions 'extlist'.""" + extlist = [ext for ext in extlist if ext.name == modname] + return len(extlist) + +def find_module_file(module, dirlist): + """Find a module in a set of possible folders. If it is not found + return the unadorned filename""" + list = find_file(module, [], dirlist) + if not list: + return module + if len(list) > 1: + log.info("WARNING: multiple copies of %s found"%module) + return os.path.join(list[0], module) + +class PyBuildExt(build_ext): + + def __init__(self, dist): + build_ext.__init__(self, dist) + self.failed = [] + + def build_extensions(self): + + # Detect which modules should be compiled + missing = self.detect_modules() + + # Remove modules that are present on the disabled list + extensions = [ext for ext in self.extensions + if ext.name not in disabled_module_list] + # move ctypes to the end, it depends on other modules + ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) + if "_ctypes" in ext_map: + ctypes = extensions.pop(ext_map["_ctypes"]) + extensions.append(ctypes) + self.extensions = extensions + + # Fix up the autodetected modules, prefixing all the source files + # with Modules/ and adding Python's include directory to the path. + (srcdir,) = sysconfig.get_config_vars('srcdir') + if not srcdir: + # Maybe running on Windows but not using CYGWIN? + raise ValueError("No source directory; cannot proceed.") + srcdir = os.path.abspath(srcdir) + moddirlist = [os.path.join(srcdir, 'Modules')] + + # Platform-dependent module source and include directories + incdirlist = [] + platform = self.get_platform() + if platform == 'darwin' and ("--disable-toolbox-glue" not in + sysconfig.get_config_var("CONFIG_ARGS")): + # Mac OS X also includes some mac-specific modules + macmoddir = os.path.join(srcdir, 'Mac/Modules') + moddirlist.append(macmoddir) + incdirlist.append(os.path.join(srcdir, 'Mac/Include')) + + # Fix up the paths for scripts, too + self.distribution.scripts = [os.path.join(srcdir, filename) + for filename in self.distribution.scripts] + + # Python header files + headers = [sysconfig.get_config_h_filename()] + headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h")) + for ext in self.extensions[:]: + ext.sources = [ find_module_file(filename, moddirlist) + for filename in ext.sources ] + if ext.depends is not None: + ext.depends = [find_module_file(filename, moddirlist) + for filename in ext.depends] + else: + ext.depends = [] + # re-compile extensions if a header file has been changed + ext.depends.extend(headers) + + # platform specific include directories + ext.include_dirs.extend(incdirlist) + + # If a module has already been built statically, + # don't build it here + if ext.name in sys.builtin_module_names: + self.extensions.remove(ext) + + # Parse Modules/Setup and Modules/Setup.local to figure out which + # modules are turned on in the file. + remove_modules = [] + for filename in ('Modules/Setup', 'Modules/Setup.local'): + input = text_file.TextFile(filename, join_lines=1) + while 1: + line = input.readline() + if not line: break + line = line.split() + remove_modules.append(line[0]) + input.close() + + for ext in self.extensions[:]: + if ext.name in remove_modules: + self.extensions.remove(ext) + + # When you run "make CC=altcc" or something similar, you really want + # those environment variables passed into the setup.py phase. Here's + # a small set of useful ones. + compiler = os.environ.get('CC') + args = {} + # unfortunately, distutils doesn't let us provide separate C and C++ + # compilers + if compiler is not None: + (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') + args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags + self.compiler.set_executables(**args) + + build_ext.build_extensions(self) + + longest = max([len(e.name) for e in self.extensions]) + if self.failed: + longest = max(longest, max([len(name) for name in self.failed])) + + def print_three_column(lst): + lst.sort(key=str.lower) + # guarantee zip() doesn't drop anything + while len(lst) % 3: + lst.append("") + for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): + print "%-*s %-*s %-*s" % (longest, e, longest, f, + longest, g) + + if missing: + print + print ("Python build finished, but the necessary bits to build " + "these modules were not found:") + print_three_column(missing) + print ("To find the necessary bits, look in setup.py in" + " detect_modules() for the module's name.") + print + + if self.failed: + failed = self.failed[:] + print + print "Failed to build these modules:" + print_three_column(failed) + print + + def build_extension(self, ext): + + if ext.name == '_ctypes': + if not self.configure_ctypes(ext): + return + + try: + build_ext.build_extension(self, ext) + except (CCompilerError, DistutilsError), why: + self.announce('WARNING: building of extension "%s" failed: %s' % + (ext.name, sys.exc_info()[1])) + self.failed.append(ext.name) + return + # Workaround for Mac OS X: The Carbon-based modules cannot be + # reliably imported into a command-line Python + if 'Carbon' in ext.extra_link_args: + self.announce( + 'WARNING: skipping import check for Carbon-based "%s"' % + ext.name) + return + + if self.get_platform() == 'darwin' and ( + sys.maxint > 2**32 and '-arch' in ext.extra_link_args): + # Don't bother doing an import check when an extension was + # build with an explicit '-arch' flag on OSX. That's currently + # only used to build 32-bit only extensions in a 4-way + # universal build and loading 32-bit code into a 64-bit + # process will fail. + self.announce( + 'WARNING: skipping import check for "%s"' % + ext.name) + return + + # Workaround for Cygwin: Cygwin currently has fork issues when many + # modules have been imported + if self.get_platform() == 'cygwin': + self.announce('WARNING: skipping import check for Cygwin-based "%s"' + % ext.name) + return + ext_filename = os.path.join( + self.build_lib, + self.get_ext_filename(self.get_ext_fullname(ext.name))) + try: + imp.load_dynamic(ext.name, ext_filename) + except ImportError, why: + self.failed.append(ext.name) + self.announce('*** WARNING: renaming "%s" since importing it' + ' failed: %s' % (ext.name, why), level=3) + assert not self.inplace + basename, tail = os.path.splitext(ext_filename) + newname = basename + "_failed" + tail + if os.path.exists(newname): + os.remove(newname) + os.rename(ext_filename, newname) + + # XXX -- This relies on a Vile HACK in + # distutils.command.build_ext.build_extension(). The + # _built_objects attribute is stored there strictly for + # use here. + # If there is a failure, _built_objects may not be there, + # so catch the AttributeError and move on. + try: + for filename in self._built_objects: + os.remove(filename) + except AttributeError: + self.announce('unable to remove files (ignored)') + except: + exc_type, why, tb = sys.exc_info() + self.announce('*** WARNING: importing extension "%s" ' + 'failed with %s: %s' % (ext.name, exc_type, why), + level=3) + self.failed.append(ext.name) + + def get_platform(self): + # Get value of sys.platform + for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']: + if sys.platform.startswith(platform): + return platform + return sys.platform + + def add_multiarch_paths(self): + # Debian/Ubuntu multiarch support. + # https://wiki.ubuntu.com/MultiarchSpec + if not find_executable('dpkg-architecture'): + return + tmpfile = os.path.join(self.build_temp, 'multiarch') + if not os.path.exists(self.build_temp): + os.makedirs(self.build_temp) + ret = os.system( + 'dpkg-architecture -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % + tmpfile) + try: + if ret >> 8 == 0: + with open(tmpfile) as fp: + multiarch_path_component = fp.readline().strip() + add_dir_to_list(self.compiler.library_dirs, + '/usr/lib/' + multiarch_path_component) + add_dir_to_list(self.compiler.include_dirs, + '/usr/include/' + multiarch_path_component) + finally: + os.unlink(tmpfile) + + def detect_modules(self): + # Ensure that /usr/local is always used + add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') + add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') + self.add_multiarch_paths() + + # Add paths specified in the environment variables LDFLAGS and + # CPPFLAGS for header and library files. + # We must get the values from the Makefile and not the environment + # directly since an inconsistently reproducible issue comes up where + # the environment variable is not set even though the value were passed + # into configure and stored in the Makefile (issue found on OS X 10.3). + for env_var, arg_name, dir_list in ( + ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), + ('LDFLAGS', '-L', self.compiler.library_dirs), + ('CPPFLAGS', '-I', self.compiler.include_dirs)): + env_val = sysconfig.get_config_var(env_var) + if env_val: + # To prevent optparse from raising an exception about any + # options in env_val that it doesn't know about we strip out + # all double dashes and any dashes followed by a character + # that is not for the option we are dealing with. + # + # Please note that order of the regex is important! We must + # strip out double-dashes first so that we don't end up with + # substituting "--Long" to "-Long" and thus lead to "ong" being + # used for a library directory. + env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], + ' ', env_val) + parser = optparse.OptionParser() + # Make sure that allowing args interspersed with options is + # allowed + parser.allow_interspersed_args = True + parser.error = lambda msg: None + parser.add_option(arg_name, dest="dirs", action="append") + options = parser.parse_args(env_val.split())[0] + if options.dirs: + for directory in reversed(options.dirs): + add_dir_to_list(dir_list, directory) + + if os.path.normpath(sys.prefix) != '/usr' \ + and not sysconfig.get_config_var('PYTHONFRAMEWORK'): + # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework + # (PYTHONFRAMEWORK is set) to avoid # linking problems when + # building a framework with different architectures than + # the one that is currently installed (issue #7473) + add_dir_to_list(self.compiler.library_dirs, + sysconfig.get_config_var("LIBDIR")) + add_dir_to_list(self.compiler.include_dirs, + sysconfig.get_config_var("INCLUDEDIR")) + + try: + have_unicode = unicode + except NameError: + have_unicode = 0 + + # lib_dirs and inc_dirs are used to search for files; + # if a file is found in one of those directories, it can + # be assumed that no additional -I,-L directives are needed. + lib_dirs = self.compiler.library_dirs + [ + '/lib64', '/usr/lib64', + '/lib', '/usr/lib', + ] + inc_dirs = self.compiler.include_dirs + ['/usr/include'] + exts = [] + missing = [] + + config_h = sysconfig.get_config_h_filename() + config_h_vars = sysconfig.parse_config_h(open(config_h)) + + platform = self.get_platform() + srcdir = sysconfig.get_config_var('srcdir') + + # Check for AtheOS which has libraries in non-standard locations + if platform == 'atheos': + lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] + lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) + inc_dirs += ['/system/include', '/atheos/autolnk/include'] + inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) + + # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) + if platform in ['osf1', 'unixware7', 'openunix8']: + lib_dirs += ['/usr/ccs/lib'] + + if platform == 'darwin': + # This should work on any unixy platform ;-) + # If the user has bothered specifying additional -I and -L flags + # in OPT and LDFLAGS we might as well use them here. + # NOTE: using shlex.split would technically be more correct, but + # also gives a bootstrap problem. Let's hope nobody uses directories + # with whitespace in the name to store libraries. + cflags, ldflags = sysconfig.get_config_vars( + 'CFLAGS', 'LDFLAGS') + for item in cflags.split(): + if item.startswith('-I'): + inc_dirs.append(item[2:]) + + for item in ldflags.split(): + if item.startswith('-L'): + lib_dirs.append(item[2:]) + + # Check for MacOS X, which doesn't need libm.a at all + math_libs = ['m'] + if platform in ['darwin', 'beos']: + math_libs = [] + + # XXX Omitted modules: gl, pure, dl, SGI-specific modules + + # + # The following modules are all pretty straightforward, and compile + # on pretty much any POSIXish platform. + # + + # Some modules that are normally always on: + #exts.append( Extension('_weakref', ['_weakref.c']) ) + + # array objects + exts.append( Extension('array', ['arraymodule.c']) ) + # complex math library functions + exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], + depends=['_math.h'], + libraries=math_libs) ) + # math library functions, e.g. sin() + exts.append( Extension('math', ['mathmodule.c', '_math.c'], + depends=['_math.h'], + libraries=math_libs) ) + # fast string operations implemented in C + exts.append( Extension('strop', ['stropmodule.c']) ) + # time operations and variables + exts.append( Extension('time', ['timemodule.c'], + libraries=math_libs) ) + exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], + libraries=math_libs) ) + # fast iterator tools implemented in C + exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) + # code that will be builtins in the future, but conflict with the + # current builtins + exts.append( Extension('future_builtins', ['future_builtins.c']) ) + # random number generator implemented in C + exts.append( Extension("_random", ["_randommodule.c"]) ) + # high-performance collections + exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) + # bisect + exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) + # heapq + exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) + # operator.add() and similar goodies + exts.append( Extension('operator', ['operator.c']) ) + # Python 3.1 _io library + exts.append( Extension("_io", + ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", + "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], + depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) + # _functools + exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) + # _json speedups + exts.append( Extension("_json", ["_json.c"]) ) + # Python C API test module + exts.append( Extension('_testcapi', ['_testcapimodule.c'], + depends=['testcapi_long.h']) ) + # profilers (_lsprof is for cProfile.py) + exts.append( Extension('_hotshot', ['_hotshot.c']) ) + exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) + # static Unicode character database + if have_unicode: + exts.append( Extension('unicodedata', ['unicodedata.c']) ) + else: + missing.append('unicodedata') + # access to ISO C locale support + data = open('pyconfig.h').read() + m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) + if m is not None: + locale_libs = ['intl'] + else: + locale_libs = [] + if platform == 'darwin': + locale_extra_link_args = ['-framework', 'CoreFoundation'] + else: + locale_extra_link_args = [] + + + exts.append( Extension('_locale', ['_localemodule.c'], + libraries=locale_libs, + extra_link_args=locale_extra_link_args) ) + + # Modules with some UNIX dependencies -- on by default: + # (If you have a really backward UNIX, select and socket may not be + # supported...) + + # fcntl(2) and ioctl(2) + libs = [] + if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): + # May be necessary on AIX for flock function + libs = ['bsd'] + exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) + # pwd(3) + exts.append( Extension('pwd', ['pwdmodule.c']) ) + # grp(3) + exts.append( Extension('grp', ['grpmodule.c']) ) + # spwd, shadow passwords + if (config_h_vars.get('HAVE_GETSPNAM', False) or + config_h_vars.get('HAVE_GETSPENT', False)): + exts.append( Extension('spwd', ['spwdmodule.c']) ) + else: + missing.append('spwd') + + # select(2); not on ancient System V + exts.append( Extension('select', ['selectmodule.c']) ) + + # Fred Drake's interface to the Python parser + exts.append( Extension('parser', ['parsermodule.c']) ) + + # cStringIO and cPickle + exts.append( Extension('cStringIO', ['cStringIO.c']) ) + exts.append( Extension('cPickle', ['cPickle.c']) ) + + # Memory-mapped files (also works on Win32). + if platform not in ['atheos']: + exts.append( Extension('mmap', ['mmapmodule.c']) ) + else: + missing.append('mmap') + + # Lance Ellinghaus's syslog module + # syslog daemon interface + exts.append( Extension('syslog', ['syslogmodule.c']) ) + + # George Neville-Neil's timing module: + # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html + # http://mail.python.org/pipermail/python-dev/2006-January/060023.html + #exts.append( Extension('timing', ['timingmodule.c']) ) + + # + # Here ends the simple stuff. From here on, modules need certain + # libraries, are platform-specific, or present other surprises. + # + + # Multimedia modules + # These don't work for 64-bit platforms!!! + # These represent audio samples or images as strings: + + # Operations on audio samples + # According to #993173, this one should actually work fine on + # 64-bit platforms. + exts.append( Extension('audioop', ['audioop.c']) ) + + # Disabled on 64-bit platforms + if sys.maxint != 9223372036854775807L: + # Operations on images + exts.append( Extension('imageop', ['imageop.c']) ) + else: + missing.extend(['imageop']) + + # readline + do_readline = self.compiler.find_library_file(lib_dirs, 'readline') + readline_termcap_library = "" + curses_library = "" + # Determine if readline is already linked against curses or tinfo. + if do_readline and find_executable('ldd'): + fp = os.popen("ldd %s" % do_readline) + ldd_output = fp.readlines() + ret = fp.close() + if ret is None or ret >> 8 == 0: + for ln in ldd_output: + if 'curses' in ln: + readline_termcap_library = re.sub( + r'.*lib(n?cursesw?)\.so.*', r'\1', ln + ).rstrip() + break + if 'tinfo' in ln: # termcap interface split out from ncurses + readline_termcap_library = 'tinfo' + break + # Issue 7384: If readline is already linked against curses, + # use the same library for the readline and curses modules. + if 'curses' in readline_termcap_library: + curses_library = readline_termcap_library + elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): + curses_library = 'ncursesw' + elif self.compiler.find_library_file(lib_dirs, 'ncurses'): + curses_library = 'ncurses' + elif self.compiler.find_library_file(lib_dirs, 'curses'): + curses_library = 'curses' + + if platform == 'darwin': + os_release = int(os.uname()[2].split('.')[0]) + dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') + if dep_target and dep_target.split('.') < ['10', '5']: + os_release = 8 + if os_release < 9: + # MacOSX 10.4 has a broken readline. Don't try to build + # the readline module unless the user has installed a fixed + # readline package + if find_file('readline/rlconf.h', inc_dirs, []) is None: + do_readline = False + if do_readline: + if platform == 'darwin' and os_release < 9: + # In every directory on the search path search for a dynamic + # library and then a static library, instead of first looking + # for dynamic libraries on the entiry path. + # This way a staticly linked custom readline gets picked up + # before the (possibly broken) dynamic library in /usr/lib. + readline_extra_link_args = ('-Wl,-search_paths_first',) + else: + readline_extra_link_args = () + + readline_libs = ['readline'] + if readline_termcap_library: + pass # Issue 7384: Already linked against curses or tinfo. + elif curses_library: + readline_libs.append(curses_library) + elif self.compiler.find_library_file(lib_dirs + + ['/usr/lib/termcap'], + 'termcap'): + readline_libs.append('termcap') + exts.append( Extension('readline', ['readline.c'], + library_dirs=['/usr/lib/termcap'], + extra_link_args=readline_extra_link_args, + libraries=readline_libs) ) + else: + missing.append('readline') + + # crypt module. + + if self.compiler.find_library_file(lib_dirs, 'crypt'): + libs = ['crypt'] + else: + libs = [] + exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) + + # CSV files + exts.append( Extension('_csv', ['_csv.c']) ) + + # socket(2) + exts.append( Extension('_socket', ['socketmodule.c'], + depends = ['socketmodule.h']) ) + # Detect SSL support for the socket module (via _ssl) + search_for_ssl_incs_in = [ + '/usr/local/ssl/include', + '/usr/contrib/ssl/include/' + ] + ssl_incs = find_file('openssl/ssl.h', inc_dirs, + search_for_ssl_incs_in + ) + if ssl_incs is not None: + krb5_h = find_file('krb5.h', inc_dirs, + ['/usr/kerberos/include']) + if krb5_h: + ssl_incs += krb5_h + ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, + ['/usr/local/ssl/lib', + '/usr/contrib/ssl/lib/' + ] ) + + if (ssl_incs is not None and + ssl_libs is not None): + exts.append( Extension('_ssl', ['_ssl.c'], + include_dirs = ssl_incs, + library_dirs = ssl_libs, + libraries = ['ssl', 'crypto'], + depends = ['socketmodule.h']), ) + else: + missing.append('_ssl') + + # find out which version of OpenSSL we have + openssl_ver = 0 + openssl_ver_re = re.compile( + '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) + + # look for the openssl version header on the compiler search path. + opensslv_h = find_file('openssl/opensslv.h', [], + inc_dirs + search_for_ssl_incs_in) + if opensslv_h: + name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') + if sys.platform == 'darwin' and is_macosx_sdk_path(name): + name = os.path.join(macosx_sdk_root(), name[1:]) + try: + incfile = open(name, 'r') + for line in incfile: + m = openssl_ver_re.match(line) + if m: + openssl_ver = eval(m.group(1)) + except IOError, msg: + print "IOError while reading opensshv.h:", msg + pass + + min_openssl_ver = 0x00907000 + have_any_openssl = ssl_incs is not None and ssl_libs is not None + have_usable_openssl = (have_any_openssl and + openssl_ver >= min_openssl_ver) + + if have_any_openssl: + if have_usable_openssl: + # The _hashlib module wraps optimized implementations + # of hash functions from the OpenSSL library. + exts.append( Extension('_hashlib', ['_hashopenssl.c'], + include_dirs = ssl_incs, + library_dirs = ssl_libs, + libraries = ['ssl', 'crypto']) ) + else: + print ("warning: openssl 0x%08x is too old for _hashlib" % + openssl_ver) + missing.append('_hashlib') + if COMPILED_WITH_PYDEBUG or not have_usable_openssl: + # The _sha module implements the SHA1 hash algorithm. + exts.append( Extension('_sha', ['shamodule.c']) ) + # The _md5 module implements the RSA Data Security, Inc. MD5 + # Message-Digest Algorithm, described in RFC 1321. The + # necessary files md5.c and md5.h are included here. + exts.append( Extension('_md5', + sources = ['md5module.c', 'md5.c'], + depends = ['md5.h']) ) + + min_sha2_openssl_ver = 0x00908000 + if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: + # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash + exts.append( Extension('_sha256', ['sha256module.c']) ) + exts.append( Extension('_sha512', ['sha512module.c']) ) + + # Modules that provide persistent dictionary-like semantics. You will + # probably want to arrange for at least one of them to be available on + # your machine, though none are defined by default because of library + # dependencies. The Python module anydbm.py provides an + # implementation independent wrapper for these; dumbdbm.py provides + # similar functionality (but slower of course) implemented in Python. + + # Sleepycat^WOracle Berkeley DB interface. + # http://www.oracle.com/database/berkeley-db/db/index.html + # + # This requires the Sleepycat^WOracle DB code. The supported versions + # are set below. Visit the URL above to download + # a release. Most open source OSes come with one or more + # versions of BerkeleyDB already installed. + + max_db_ver = (4, 8) + min_db_ver = (4, 1) + db_setup_debug = False # verbose debug prints from this script? + + def allow_db_ver(db_ver): + """Returns a boolean if the given BerkeleyDB version is acceptable. + + Args: + db_ver: A tuple of the version to verify. + """ + if not (min_db_ver <= db_ver <= max_db_ver): + return False + # Use this function to filter out known bad configurations. + if (4, 6) == db_ver[:2]: + # BerkeleyDB 4.6.x is not stable on many architectures. + arch = platform_machine() + if arch not in ('i386', 'i486', 'i586', 'i686', + 'x86_64', 'ia64'): + return False + return True + + def gen_db_minor_ver_nums(major): + if major == 4: + for x in range(max_db_ver[1]+1): + if allow_db_ver((4, x)): + yield x + elif major == 3: + for x in (3,): + if allow_db_ver((3, x)): + yield x + else: + raise ValueError("unknown major BerkeleyDB version", major) + + # construct a list of paths to look for the header file in on + # top of the normal inc_dirs. + db_inc_paths = [ + '/usr/include/db4', + '/usr/local/include/db4', + '/opt/sfw/include/db4', + '/usr/include/db3', + '/usr/local/include/db3', + '/opt/sfw/include/db3', + # Fink defaults (http://fink.sourceforge.net/) + '/sw/include/db4', + '/sw/include/db3', + ] + # 4.x minor number specific paths + for x in gen_db_minor_ver_nums(4): + db_inc_paths.append('/usr/include/db4%d' % x) + db_inc_paths.append('/usr/include/db4.%d' % x) + db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) + db_inc_paths.append('/usr/local/include/db4%d' % x) + db_inc_paths.append('/pkg/db-4.%d/include' % x) + db_inc_paths.append('/opt/db-4.%d/include' % x) + # MacPorts default (http://www.macports.org/) + db_inc_paths.append('/opt/local/include/db4%d' % x) + # 3.x minor number specific paths + for x in gen_db_minor_ver_nums(3): + db_inc_paths.append('/usr/include/db3%d' % x) + db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) + db_inc_paths.append('/usr/local/include/db3%d' % x) + db_inc_paths.append('/pkg/db-3.%d/include' % x) + db_inc_paths.append('/opt/db-3.%d/include' % x) + + # Add some common subdirectories for Sleepycat DB to the list, + # based on the standard include directories. This way DB3/4 gets + # picked up when it is installed in a non-standard prefix and + # the user has added that prefix into inc_dirs. + std_variants = [] + for dn in inc_dirs: + std_variants.append(os.path.join(dn, 'db3')) + std_variants.append(os.path.join(dn, 'db4')) + for x in gen_db_minor_ver_nums(4): + std_variants.append(os.path.join(dn, "db4%d"%x)) + std_variants.append(os.path.join(dn, "db4.%d"%x)) + for x in gen_db_minor_ver_nums(3): + std_variants.append(os.path.join(dn, "db3%d"%x)) + std_variants.append(os.path.join(dn, "db3.%d"%x)) + + db_inc_paths = std_variants + db_inc_paths + db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] + + db_ver_inc_map = {} + + if sys.platform == 'darwin': + sysroot = macosx_sdk_root() + + class db_found(Exception): pass + try: + # See whether there is a Sleepycat header in the standard + # search path. + for d in inc_dirs + db_inc_paths: + f = os.path.join(d, "db.h") + + if sys.platform == 'darwin' and is_macosx_sdk_path(d): + f = os.path.join(sysroot, d[1:], "db.h") + + if db_setup_debug: print "db: looking for db.h in", f + if os.path.exists(f): + f = open(f).read() + m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) + if m: + db_major = int(m.group(1)) + m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) + db_minor = int(m.group(1)) + db_ver = (db_major, db_minor) + + # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug + if db_ver == (4, 6): + m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) + db_patch = int(m.group(1)) + if db_patch < 21: + print "db.h:", db_ver, "patch", db_patch, + print "being ignored (4.6.x must be >= 4.6.21)" + continue + + if ( (db_ver not in db_ver_inc_map) and + allow_db_ver(db_ver) ): + # save the include directory with the db.h version + # (first occurrence only) + db_ver_inc_map[db_ver] = d + if db_setup_debug: + print "db.h: found", db_ver, "in", d + else: + # we already found a header for this library version + if db_setup_debug: print "db.h: ignoring", d + else: + # ignore this header, it didn't contain a version number + if db_setup_debug: + print "db.h: no version number version in", d + + db_found_vers = db_ver_inc_map.keys() + db_found_vers.sort() + + while db_found_vers: + db_ver = db_found_vers.pop() + db_incdir = db_ver_inc_map[db_ver] + + # check lib directories parallel to the location of the header + db_dirs_to_check = [ + db_incdir.replace("include", 'lib64'), + db_incdir.replace("include", 'lib'), + ] + + if sys.platform != 'darwin': + db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) + + else: + # Same as other branch, but takes OSX SDK into account + tmp = [] + for dn in db_dirs_to_check: + if is_macosx_sdk_path(dn): + if os.path.isdir(os.path.join(sysroot, dn[1:])): + tmp.append(dn) + else: + if os.path.isdir(dn): + tmp.append(dn) + db_dirs_to_check = tmp + + # Look for a version specific db-X.Y before an ambiguoius dbX + # XXX should we -ever- look for a dbX name? Do any + # systems really not name their library by version and + # symlink to more general names? + for dblib in (('db-%d.%d' % db_ver), + ('db%d%d' % db_ver), + ('db%d' % db_ver[0])): + dblib_file = self.compiler.find_library_file( + db_dirs_to_check + lib_dirs, dblib ) + if dblib_file: + dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] + raise db_found + else: + if db_setup_debug: print "db lib: ", dblib, "not found" + + except db_found: + if db_setup_debug: + print "bsddb using BerkeleyDB lib:", db_ver, dblib + print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir + db_incs = [db_incdir] + dblibs = [dblib] + # We add the runtime_library_dirs argument because the + # BerkeleyDB lib we're linking against often isn't in the + # system dynamic library search path. This is usually + # correct and most trouble free, but may cause problems in + # some unusual system configurations (e.g. the directory + # is on an NFS server that goes away). + exts.append(Extension('_bsddb', ['_bsddb.c'], + depends = ['bsddb.h'], + library_dirs=dblib_dir, + runtime_library_dirs=dblib_dir, + include_dirs=db_incs, + libraries=dblibs)) + else: + if db_setup_debug: print "db: no appropriate library found" + db_incs = None + dblibs = [] + dblib_dir = None + missing.append('_bsddb') + + # The sqlite interface + sqlite_setup_debug = False # verbose debug prints from this script? + + # We hunt for #define SQLITE_VERSION "n.n.n" + # We need to find >= sqlite version 3.0.8 + sqlite_incdir = sqlite_libdir = None + sqlite_inc_paths = [ '/usr/include', + '/usr/include/sqlite', + '/usr/include/sqlite3', + '/usr/local/include', + '/usr/local/include/sqlite', + '/usr/local/include/sqlite3', + ] + MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) + MIN_SQLITE_VERSION = ".".join([str(x) + for x in MIN_SQLITE_VERSION_NUMBER]) + + # Scan the default include directories before the SQLite specific + # ones. This allows one to override the copy of sqlite on OSX, + # where /usr/include contains an old version of sqlite. + if sys.platform == 'darwin': + sysroot = macosx_sdk_root() + + for d in inc_dirs + sqlite_inc_paths: + f = os.path.join(d, "sqlite3.h") + + if sys.platform == 'darwin' and is_macosx_sdk_path(d): + f = os.path.join(sysroot, d[1:], "sqlite3.h") + + if os.path.exists(f): + if sqlite_setup_debug: print "sqlite: found %s"%f + incf = open(f).read() + m = re.search( + r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf) + if m: + sqlite_version = m.group(1) + sqlite_version_tuple = tuple([int(x) + for x in sqlite_version.split(".")]) + if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: + # we win! + if sqlite_setup_debug: + print "%s/sqlite3.h: version %s"%(d, sqlite_version) + sqlite_incdir = d + break + else: + if sqlite_setup_debug: + print "%s: version %d is too old, need >= %s"%(d, + sqlite_version, MIN_SQLITE_VERSION) + elif sqlite_setup_debug: + print "sqlite: %s had no SQLITE_VERSION"%(f,) + + if sqlite_incdir: + sqlite_dirs_to_check = [ + os.path.join(sqlite_incdir, '..', 'lib64'), + os.path.join(sqlite_incdir, '..', 'lib'), + os.path.join(sqlite_incdir, '..', '..', 'lib64'), + os.path.join(sqlite_incdir, '..', '..', 'lib'), + ] + sqlite_libfile = self.compiler.find_library_file( + sqlite_dirs_to_check + lib_dirs, 'sqlite3') + if sqlite_libfile: + sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] + + if sqlite_incdir and sqlite_libdir: + sqlite_srcs = ['_sqlite/cache.c', + '_sqlite/connection.c', + '_sqlite/cursor.c', + '_sqlite/microprotocols.c', + '_sqlite/module.c', + '_sqlite/prepare_protocol.c', + '_sqlite/row.c', + '_sqlite/statement.c', + '_sqlite/util.c', ] + + sqlite_defines = [] + if sys.platform != "win32": + sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) + else: + sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) + + # Comment this out if you want the sqlite3 module to be able to load extensions. + sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) + + if sys.platform == 'darwin': + # In every directory on the search path search for a dynamic + # library and then a static library, instead of first looking + # for dynamic libraries on the entiry path. + # This way a staticly linked custom sqlite gets picked up + # before the dynamic library in /usr/lib. + sqlite_extra_link_args = ('-Wl,-search_paths_first',) + else: + sqlite_extra_link_args = () + + exts.append(Extension('_sqlite3', sqlite_srcs, + define_macros=sqlite_defines, + include_dirs=["Modules/_sqlite", + sqlite_incdir], + library_dirs=sqlite_libdir, + runtime_library_dirs=sqlite_libdir, + extra_link_args=sqlite_extra_link_args, + libraries=["sqlite3",])) + else: + missing.append('_sqlite3') + + # Look for Berkeley db 1.85. Note that it is built as a different + # module name so it can be included even when later versions are + # available. A very restrictive search is performed to avoid + # accidentally building this module with a later version of the + # underlying db library. May BSD-ish Unixes incorporate db 1.85 + # symbols into libc and place the include file in /usr/include. + # + # If the better bsddb library can be built (db_incs is defined) + # we do not build this one. Otherwise this build will pick up + # the more recent berkeleydb's db.h file first in the include path + # when attempting to compile and it will fail. + f = "/usr/include/db.h" + + if sys.platform == 'darwin': + if is_macosx_sdk_path(f): + sysroot = macosx_sdk_root() + f = os.path.join(sysroot, f[1:]) + + if os.path.exists(f) and not db_incs: + data = open(f).read() + m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) + if m is not None: + # bingo - old version used hash file format version 2 + ### XXX this should be fixed to not be platform-dependent + ### but I don't have direct access to an osf1 platform and + ### seemed to be muffing the search somehow + libraries = platform == "osf1" and ['db'] or None + if libraries is not None: + exts.append(Extension('bsddb185', ['bsddbmodule.c'], + libraries=libraries)) + else: + exts.append(Extension('bsddb185', ['bsddbmodule.c'])) + else: + missing.append('bsddb185') + else: + missing.append('bsddb185') + + dbm_order = ['gdbm'] + # The standard Unix dbm module: + if platform not in ['cygwin']: + config_args = [arg.strip("'") + for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] + dbm_args = [arg for arg in config_args + if arg.startswith('--with-dbmliborder=')] + if dbm_args: + dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") + else: + dbm_order = "ndbm:gdbm:bdb".split(":") + dbmext = None + for cand in dbm_order: + if cand == "ndbm": + if find_file("ndbm.h", inc_dirs, []) is not None: + # Some systems have -lndbm, others don't + if self.compiler.find_library_file(lib_dirs, + 'ndbm'): + ndbm_libs = ['ndbm'] + else: + ndbm_libs = [] + print "building dbm using ndbm" + dbmext = Extension('dbm', ['dbmmodule.c'], + define_macros=[ + ('HAVE_NDBM_H',None), + ], + libraries=ndbm_libs) + break + + elif cand == "gdbm": + if self.compiler.find_library_file(lib_dirs, 'gdbm'): + gdbm_libs = ['gdbm'] + if self.compiler.find_library_file(lib_dirs, + 'gdbm_compat'): + gdbm_libs.append('gdbm_compat') + if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: + print "building dbm using gdbm" + dbmext = Extension( + 'dbm', ['dbmmodule.c'], + define_macros=[ + ('HAVE_GDBM_NDBM_H', None), + ], + libraries = gdbm_libs) + break + if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: + print "building dbm using gdbm" + dbmext = Extension( + 'dbm', ['dbmmodule.c'], + define_macros=[ + ('HAVE_GDBM_DASH_NDBM_H', None), + ], + libraries = gdbm_libs) + break + elif cand == "bdb": + if db_incs is not None: + print "building dbm using bdb" + dbmext = Extension('dbm', ['dbmmodule.c'], + library_dirs=dblib_dir, + runtime_library_dirs=dblib_dir, + include_dirs=db_incs, + define_macros=[ + ('HAVE_BERKDB_H', None), + ('DB_DBM_HSEARCH', None), + ], + libraries=dblibs) + break + if dbmext is not None: + exts.append(dbmext) + else: + missing.append('dbm') + + # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: + if ('gdbm' in dbm_order and + self.compiler.find_library_file(lib_dirs, 'gdbm')): + exts.append( Extension('gdbm', ['gdbmmodule.c'], + libraries = ['gdbm'] ) ) + else: + missing.append('gdbm') + + # Unix-only modules + if platform not in ['win32']: + # Steen Lumholt's termios module + exts.append( Extension('termios', ['termios.c']) ) + # Jeremy Hylton's rlimit interface + if platform not in ['atheos']: + exts.append( Extension('resource', ['resource.c']) ) + else: + missing.append('resource') + + # Sun yellow pages. Some systems have the functions in libc. + if (platform not in ['cygwin', 'atheos', 'qnx6'] and + find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): + if (self.compiler.find_library_file(lib_dirs, 'nsl')): + libs = ['nsl'] + else: + libs = [] + exts.append( Extension('nis', ['nismodule.c'], + libraries = libs) ) + else: + missing.append('nis') + else: + missing.extend(['nis', 'resource', 'termios']) + + # Curses support, requiring the System V version of curses, often + # provided by the ncurses library. + panel_library = 'panel' + if curses_library.startswith('ncurses'): + if curses_library == 'ncursesw': + # Bug 1464056: If _curses.so links with ncursesw, + # _curses_panel.so must link with panelw. + panel_library = 'panelw' + curses_libs = [curses_library] + exts.append( Extension('_curses', ['_cursesmodule.c'], + libraries = curses_libs) ) + elif curses_library == 'curses' and platform != 'darwin': + # OSX has an old Berkeley curses, not good enough for + # the _curses module. + if (self.compiler.find_library_file(lib_dirs, 'terminfo')): + curses_libs = ['curses', 'terminfo'] + elif (self.compiler.find_library_file(lib_dirs, 'termcap')): + curses_libs = ['curses', 'termcap'] + else: + curses_libs = ['curses'] + + exts.append( Extension('_curses', ['_cursesmodule.c'], + libraries = curses_libs) ) + else: + missing.append('_curses') + + # If the curses module is enabled, check for the panel module + if (module_enabled(exts, '_curses') and + self.compiler.find_library_file(lib_dirs, panel_library)): + exts.append( Extension('_curses_panel', ['_curses_panel.c'], + libraries = [panel_library] + curses_libs) ) + else: + missing.append('_curses_panel') + + # Andrew Kuchling's zlib module. Note that some versions of zlib + # 1.1.3 have security problems. See CERT Advisory CA-2002-07: + # http://www.cert.org/advisories/CA-2002-07.html + # + # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to + # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For + # now, we still accept 1.1.3, because we think it's difficult to + # exploit this in Python, and we'd rather make it RedHat's problem + # than our problem . + # + # You can upgrade zlib to version 1.1.4 yourself by going to + # http://www.gzip.org/zlib/ + zlib_inc = find_file('zlib.h', [], inc_dirs) + have_zlib = False + if zlib_inc is not None: + zlib_h = zlib_inc[0] + '/zlib.h' + version = '"0.0.0"' + version_req = '"1.1.3"' + fp = open(zlib_h) + while 1: + line = fp.readline() + if not line: + break + if line.startswith('#define ZLIB_VERSION'): + version = line.split()[2] + break + if version >= version_req: + if (self.compiler.find_library_file(lib_dirs, 'z')): + if sys.platform == "darwin": + zlib_extra_link_args = ('-Wl,-search_paths_first',) + else: + zlib_extra_link_args = () + exts.append( Extension('zlib', ['zlibmodule.c'], + libraries = ['z'], + extra_link_args = zlib_extra_link_args)) + have_zlib = True + else: + missing.append('zlib') + else: + missing.append('zlib') + else: + missing.append('zlib') + + # Helper module for various ascii-encoders. Uses zlib for an optimized + # crc32 if we have it. Otherwise binascii uses its own. + if have_zlib: + extra_compile_args = ['-DUSE_ZLIB_CRC32'] + libraries = ['z'] + extra_link_args = zlib_extra_link_args + else: + extra_compile_args = [] + libraries = [] + extra_link_args = [] + exts.append( Extension('binascii', ['binascii.c'], + extra_compile_args = extra_compile_args, + libraries = libraries, + extra_link_args = extra_link_args) ) + + # Gustavo Niemeyer's bz2 module. + if (self.compiler.find_library_file(lib_dirs, 'bz2')): + if sys.platform == "darwin": + bz2_extra_link_args = ('-Wl,-search_paths_first',) + else: + bz2_extra_link_args = () + exts.append( Extension('bz2', ['bz2module.c'], + libraries = ['bz2'], + extra_link_args = bz2_extra_link_args) ) + else: + missing.append('bz2') + + # Interface to the Expat XML parser + # + # Expat was written by James Clark and is now maintained by a group of + # developers on SourceForge; see www.libexpat.org for more information. + # The pyexpat module was written by Paul Prescod after a prototype by + # Jack Jansen. The Expat source is included in Modules/expat/. Usage + # of a system shared libexpat.so is possible with --with-system-expat + # configure option. + # + # More information on Expat can be found at www.libexpat.org. + # + if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): + expat_inc = [] + define_macros = [] + expat_lib = ['expat'] + expat_sources = [] + else: + expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] + define_macros = [ + ('HAVE_EXPAT_CONFIG_H', '1'), + ] + expat_lib = [] + expat_sources = ['expat/xmlparse.c', + 'expat/xmlrole.c', + 'expat/xmltok.c'] + + exts.append(Extension('pyexpat', + define_macros = define_macros, + include_dirs = expat_inc, + libraries = expat_lib, + sources = ['pyexpat.c'] + expat_sources + )) + + # Fredrik Lundh's cElementTree module. Note that this also + # uses expat (via the CAPI hook in pyexpat). + + if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): + define_macros.append(('USE_PYEXPAT_CAPI', None)) + exts.append(Extension('_elementtree', + define_macros = define_macros, + include_dirs = expat_inc, + libraries = expat_lib, + sources = ['_elementtree.c'], + )) + else: + missing.append('_elementtree') + + # Hye-Shik Chang's CJKCodecs modules. + if have_unicode: + exts.append(Extension('_multibytecodec', + ['cjkcodecs/multibytecodec.c'])) + for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): + exts.append(Extension('_codecs_%s' % loc, + ['cjkcodecs/_codecs_%s.c' % loc])) + else: + missing.append('_multibytecodec') + for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): + missing.append('_codecs_%s' % loc) + + # Dynamic loading module + if sys.maxint == 0x7fffffff: + # This requires sizeof(int) == sizeof(long) == sizeof(char*) + dl_inc = find_file('dlfcn.h', [], inc_dirs) + if (dl_inc is not None) and (platform not in ['atheos']): + exts.append( Extension('dl', ['dlmodule.c']) ) + else: + missing.append('dl') + else: + missing.append('dl') + + # Thomas Heller's _ctypes module + self.detect_ctypes(inc_dirs, lib_dirs) + + # Richard Oudkerk's multiprocessing module + if platform == 'win32': # Windows + macros = dict() + libraries = ['ws2_32'] + + elif platform == 'darwin': # Mac OSX + macros = dict() + libraries = [] + + elif platform == 'cygwin': # Cygwin + macros = dict() + libraries = [] + + elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): + # FreeBSD's P1003.1b semaphore support is very experimental + # and has many known problems. (as of June 2008) + macros = dict() + libraries = [] + + elif platform.startswith('openbsd'): + macros = dict() + libraries = [] + + elif platform.startswith('netbsd'): + macros = dict() + libraries = [] + + else: # Linux and other unices + macros = dict() + libraries = ['rt'] + + if platform == 'win32': + multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', + '_multiprocessing/semaphore.c', + '_multiprocessing/pipe_connection.c', + '_multiprocessing/socket_connection.c', + '_multiprocessing/win32_functions.c' + ] + + else: + multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', + '_multiprocessing/socket_connection.c' + ] + if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not + sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): + multiprocessing_srcs.append('_multiprocessing/semaphore.c') + + if sysconfig.get_config_var('WITH_THREAD'): + exts.append ( Extension('_multiprocessing', multiprocessing_srcs, + define_macros=macros.items(), + include_dirs=["Modules/_multiprocessing"])) + else: + missing.append('_multiprocessing') + + # End multiprocessing + + + # Platform-specific libraries + if platform == 'linux2': + # Linux-specific modules + exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) + else: + missing.append('linuxaudiodev') + + if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', + 'freebsd7', 'freebsd8') + or platform.startswith("gnukfreebsd")): + exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) + else: + missing.append('ossaudiodev') + + if platform == 'sunos5': + # SunOS specific modules + exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) + else: + missing.append('sunaudiodev') + + if platform == 'darwin': + # _scproxy + exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], + extra_link_args= [ + '-framework', 'SystemConfiguration', + '-framework', 'CoreFoundation' + ])) + + + if platform == 'darwin' and ("--disable-toolbox-glue" not in + sysconfig.get_config_var("CONFIG_ARGS")): + + if int(os.uname()[2].split('.')[0]) >= 8: + # We're on Mac OS X 10.4 or later, the compiler should + # support '-Wno-deprecated-declarations'. This will + # surpress deprecation warnings for the Carbon extensions, + # these extensions wrap the Carbon APIs and even those + # parts that are deprecated. + carbon_extra_compile_args = ['-Wno-deprecated-declarations'] + else: + carbon_extra_compile_args = [] + + # Mac OS X specific modules. + def macSrcExists(name1, name2=''): + if not name1: + return None + names = (name1,) + if name2: + names = (name1, name2) + path = os.path.join(srcdir, 'Mac', 'Modules', *names) + return os.path.exists(path) + + def addMacExtension(name, kwds, extra_srcs=[]): + dirname = '' + if name[0] == '_': + dirname = name[1:].lower() + cname = name + '.c' + cmodulename = name + 'module.c' + # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c + if macSrcExists(cname): + srcs = [cname] + elif macSrcExists(cmodulename): + srcs = [cmodulename] + elif macSrcExists(dirname, cname): + # XXX(nnorwitz): If all the names ended with module, we + # wouldn't need this condition. ibcarbon is the only one. + srcs = [os.path.join(dirname, cname)] + elif macSrcExists(dirname, cmodulename): + srcs = [os.path.join(dirname, cmodulename)] + else: + raise RuntimeError("%s not found" % name) + + # Here's the whole point: add the extension with sources + exts.append(Extension(name, srcs + extra_srcs, **kwds)) + + # Core Foundation + core_kwds = {'extra_compile_args': carbon_extra_compile_args, + 'extra_link_args': ['-framework', 'CoreFoundation'], + } + addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) + addMacExtension('autoGIL', core_kwds) + + + + # Carbon + carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, + 'extra_link_args': ['-framework', 'Carbon'], + } + CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', + 'OSATerminology', 'icglue', + # All these are in subdirs + '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', + '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', + '_Help', '_Icn', '_IBCarbon', '_List', + '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', + '_Scrap', '_Snd', '_TE', + ] + for name in CARBON_EXTS: + addMacExtension(name, carbon_kwds) + + # Workaround for a bug in the version of gcc shipped with Xcode 3. + # The _Win extension should build just like the other Carbon extensions, but + # this actually results in a hard crash of the linker. + # + if '-arch ppc64' in cflags and '-arch ppc' in cflags: + win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], + 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], + } + addMacExtension('_Win', win_kwds) + else: + addMacExtension('_Win', carbon_kwds) + + + # Application Services & QuickTime + app_kwds = {'extra_compile_args': carbon_extra_compile_args, + 'extra_link_args': ['-framework','ApplicationServices'], + } + addMacExtension('_Launch', app_kwds) + addMacExtension('_CG', app_kwds) + + exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], + extra_compile_args=carbon_extra_compile_args, + extra_link_args=['-framework', 'QuickTime', + '-framework', 'Carbon']) ) + + + self.extensions.extend(exts) + + # Call the method for detecting whether _tkinter can be compiled + self.detect_tkinter(inc_dirs, lib_dirs) + + if '_tkinter' not in [e.name for e in self.extensions]: + missing.append('_tkinter') + + return missing + + def detect_tkinter_darwin(self, inc_dirs, lib_dirs): + # The _tkinter module, using frameworks. Since frameworks are quite + # different the UNIX search logic is not sharable. + from os.path import join, exists + framework_dirs = [ + '/Library/Frameworks', + '/System/Library/Frameworks/', + join(os.getenv('HOME'), '/Library/Frameworks') + ] + + sysroot = macosx_sdk_root() + + # Find the directory that contains the Tcl.framework and Tk.framework + # bundles. + # XXX distutils should support -F! + for F in framework_dirs: + # both Tcl.framework and Tk.framework should be present + + + for fw in 'Tcl', 'Tk': + if is_macosx_sdk_path(F): + if not exists(join(sysroot, F[1:], fw + '.framework')): + break + else: + if not exists(join(F, fw + '.framework')): + break + else: + # ok, F is now directory with both frameworks. Continure + # building + break + else: + # Tk and Tcl frameworks not found. Normal "unix" tkinter search + # will now resume. + return 0 + + # For 8.4a2, we must add -I options that point inside the Tcl and Tk + # frameworks. In later release we should hopefully be able to pass + # the -F option to gcc, which specifies a framework lookup path. + # + include_dirs = [ + join(F, fw + '.framework', H) + for fw in 'Tcl', 'Tk' + for H in 'Headers', 'Versions/Current/PrivateHeaders' + ] + + # For 8.4a2, the X11 headers are not included. Rather than include a + # complicated search, this is a hard-coded path. It could bail out + # if X11 libs are not found... + include_dirs.append('/usr/X11R6/include') + frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] + + # All existing framework builds of Tcl/Tk don't support 64-bit + # architectures. + cflags = sysconfig.get_config_vars('CFLAGS')[0] + archs = re.findall('-arch\s+(\w+)', cflags) + + if is_macosx_sdk_path(F): + fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) + else: + fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) + + detected_archs = [] + for ln in fp: + a = ln.split()[-1] + if a in archs: + detected_archs.append(ln.split()[-1]) + fp.close() + + for a in detected_archs: + frameworks.append('-arch') + frameworks.append(a) + + ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], + define_macros=[('WITH_APPINIT', 1)], + include_dirs = include_dirs, + libraries = [], + extra_compile_args = frameworks[2:], + extra_link_args = frameworks, + ) + self.extensions.append(ext) + return 1 + + + def detect_tkinter(self, inc_dirs, lib_dirs): + # The _tkinter module. + + # Rather than complicate the code below, detecting and building + # AquaTk is a separate method. Only one Tkinter will be built on + # Darwin - either AquaTk, if it is found, or X11 based Tk. + platform = self.get_platform() + if (platform == 'darwin' and + self.detect_tkinter_darwin(inc_dirs, lib_dirs)): + return + + # Assume we haven't found any of the libraries or include files + # The versions with dots are used on Unix, and the versions without + # dots on Windows, for detection by cygwin. + tcllib = tklib = tcl_includes = tk_includes = None + for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', + '8.2', '82', '8.1', '81', '8.0', '80']: + tklib = self.compiler.find_library_file(lib_dirs, + 'tk' + version) + tcllib = self.compiler.find_library_file(lib_dirs, + 'tcl' + version) + if tklib and tcllib: + # Exit the loop when we've found the Tcl/Tk libraries + break + + # Now check for the header files + if tklib and tcllib: + # Check for the include files on Debian and {Free,Open}BSD, where + # they're put in /usr/include/{tcl,tk}X.Y + dotversion = version + if '.' not in dotversion and "bsd" in sys.platform.lower(): + # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, + # but the include subdirs are named like .../include/tcl8.3. + dotversion = dotversion[:-1] + '.' + dotversion[-1] + tcl_include_sub = [] + tk_include_sub = [] + for dir in inc_dirs: + tcl_include_sub += [dir + os.sep + "tcl" + dotversion] + tk_include_sub += [dir + os.sep + "tk" + dotversion] + tk_include_sub += tcl_include_sub + tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) + tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) + + if (tcllib is None or tklib is None or + tcl_includes is None or tk_includes is None): + self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) + return + + # OK... everything seems to be present for Tcl/Tk. + + include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] + for dir in tcl_includes + tk_includes: + if dir not in include_dirs: + include_dirs.append(dir) + + # Check for various platform-specific directories + if platform == 'sunos5': + include_dirs.append('/usr/openwin/include') + added_lib_dirs.append('/usr/openwin/lib') + elif os.path.exists('/usr/X11R6/include'): + include_dirs.append('/usr/X11R6/include') + added_lib_dirs.append('/usr/X11R6/lib64') + added_lib_dirs.append('/usr/X11R6/lib') + elif os.path.exists('/usr/X11R5/include'): + include_dirs.append('/usr/X11R5/include') + added_lib_dirs.append('/usr/X11R5/lib') + else: + # Assume default location for X11 + include_dirs.append('/usr/X11/include') + added_lib_dirs.append('/usr/X11/lib') + + # If Cygwin, then verify that X is installed before proceeding + if platform == 'cygwin': + x11_inc = find_file('X11/Xlib.h', [], include_dirs) + if x11_inc is None: + return + + # Check for BLT extension + if self.compiler.find_library_file(lib_dirs + added_lib_dirs, + 'BLT8.0'): + defs.append( ('WITH_BLT', 1) ) + libs.append('BLT8.0') + elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, + 'BLT'): + defs.append( ('WITH_BLT', 1) ) + libs.append('BLT') + + # Add the Tcl/Tk libraries + libs.append('tk'+ version) + libs.append('tcl'+ version) + + if platform in ['aix3', 'aix4']: + libs.append('ld') + + # Finally, link with the X11 libraries (not appropriate on cygwin) + if platform != "cygwin": + libs.append('X11') + + ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], + define_macros=[('WITH_APPINIT', 1)] + defs, + include_dirs = include_dirs, + libraries = libs, + library_dirs = added_lib_dirs, + ) + self.extensions.append(ext) + +## # Uncomment these lines if you want to play with xxmodule.c +## ext = Extension('xx', ['xxmodule.c']) +## self.extensions.append(ext) + + # XXX handle these, but how to detect? + # *** Uncomment and edit for PIL (TkImaging) extension only: + # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ + # *** Uncomment and edit for TOGL extension only: + # -DWITH_TOGL togl.c \ + # *** Uncomment these for TOGL extension only: + # -lGL -lGLU -lXext -lXmu \ + + def configure_ctypes_darwin(self, ext): + # Darwin (OS X) uses preconfigured files, in + # the Modules/_ctypes/libffi_osx directory. + srcdir = sysconfig.get_config_var('srcdir') + ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', + '_ctypes', 'libffi_osx')) + sources = [os.path.join(ffi_srcdir, p) + for p in ['ffi.c', + 'x86/darwin64.S', + 'x86/x86-darwin.S', + 'x86/x86-ffi_darwin.c', + 'x86/x86-ffi64.c', + 'powerpc/ppc-darwin.S', + 'powerpc/ppc-darwin_closure.S', + 'powerpc/ppc-ffi_darwin.c', + 'powerpc/ppc64-darwin_closure.S', + ]] + + # Add .S (preprocessed assembly) to C compiler source extensions. + self.compiler.src_extensions.append('.S') + + include_dirs = [os.path.join(ffi_srcdir, 'include'), + os.path.join(ffi_srcdir, 'powerpc')] + ext.include_dirs.extend(include_dirs) + ext.sources.extend(sources) + return True + + def configure_ctypes(self, ext): + if not self.use_system_libffi: + if sys.platform == 'darwin': + return self.configure_ctypes_darwin(ext) + + srcdir = sysconfig.get_config_var('srcdir') + ffi_builddir = os.path.join(self.build_temp, 'libffi') + ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', + '_ctypes', 'libffi')) + ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') + + from distutils.dep_util import newer_group + + config_sources = [os.path.join(ffi_srcdir, fname) + for fname in os.listdir(ffi_srcdir) + if os.path.isfile(os.path.join(ffi_srcdir, fname))] + if self.force or newer_group(config_sources, + ffi_configfile): + from distutils.dir_util import mkpath + mkpath(ffi_builddir) + config_args = [] + + # Pass empty CFLAGS because we'll just append the resulting + # CFLAGS to Python's; -g or -O2 is to be avoided. + cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ + % (ffi_builddir, ffi_srcdir, " ".join(config_args)) + + res = os.system(cmd) + if res or not os.path.exists(ffi_configfile): + print "Failed to configure _ctypes module" + return False + + fficonfig = {} + with open(ffi_configfile) as f: + exec f in fficonfig + + # Add .S (preprocessed assembly) to C compiler source extensions. + self.compiler.src_extensions.append('.S') + + include_dirs = [os.path.join(ffi_builddir, 'include'), + ffi_builddir, + os.path.join(ffi_srcdir, 'src')] + extra_compile_args = fficonfig['ffi_cflags'].split() + + ext.sources.extend(os.path.join(ffi_srcdir, f) for f in + fficonfig['ffi_sources']) + ext.include_dirs.extend(include_dirs) + ext.extra_compile_args.extend(extra_compile_args) + return True + + def detect_ctypes(self, inc_dirs, lib_dirs): + self.use_system_libffi = False + include_dirs = [] + extra_compile_args = [] + extra_link_args = [] + sources = ['_ctypes/_ctypes.c', + '_ctypes/callbacks.c', + '_ctypes/callproc.c', + '_ctypes/stgdict.c', + '_ctypes/cfield.c'] + depends = ['_ctypes/ctypes.h'] + + if sys.platform == 'darwin': + sources.append('_ctypes/malloc_closure.c') + sources.append('_ctypes/darwin/dlfcn_simple.c') + extra_compile_args.append('-DMACOSX') + include_dirs.append('_ctypes/darwin') +# XXX Is this still needed? +## extra_link_args.extend(['-read_only_relocs', 'warning']) + + elif sys.platform == 'sunos5': + # XXX This shouldn't be necessary; it appears that some + # of the assembler code is non-PIC (i.e. it has relocations + # when it shouldn't. The proper fix would be to rewrite + # the assembler code to be PIC. + # This only works with GCC; the Sun compiler likely refuses + # this option. If you want to compile ctypes with the Sun + # compiler, please research a proper solution, instead of + # finding some -z option for the Sun compiler. + extra_link_args.append('-mimpure-text') + + elif sys.platform.startswith('hp-ux'): + extra_link_args.append('-fPIC') + + ext = Extension('_ctypes', + include_dirs=include_dirs, + extra_compile_args=extra_compile_args, + extra_link_args=extra_link_args, + libraries=[], + sources=sources, + depends=depends) + ext_test = Extension('_ctypes_test', + sources=['_ctypes/_ctypes_test.c']) + self.extensions.extend([ext, ext_test]) + + if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): + return + + if sys.platform == 'darwin': + # OS X 10.5 comes with libffi.dylib; the include files are + # in /usr/include/ffi + inc_dirs.append('/usr/include/ffi') + + ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] + if not ffi_inc or ffi_inc[0] == '': + ffi_inc = find_file('ffi.h', [], inc_dirs) + if ffi_inc is not None: + ffi_h = ffi_inc[0] + '/ffi.h' + fp = open(ffi_h) + while 1: + line = fp.readline() + if not line: + ffi_inc = None + break + if line.startswith('#define LIBFFI_H'): + break + ffi_lib = None + if ffi_inc is not None: + for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): + if (self.compiler.find_library_file(lib_dirs, lib_name)): + ffi_lib = lib_name + break + + if ffi_inc and ffi_lib: + ext.include_dirs.extend(ffi_inc) + ext.libraries.append(ffi_lib) + self.use_system_libffi = True + + +class PyBuildInstall(install): + # Suppress the warning about installation into the lib_dynload + # directory, which is not in sys.path when running Python during + # installation: + def initialize_options (self): + install.initialize_options(self) + self.warn_dir=0 + +class PyBuildInstallLib(install_lib): + # Do exactly what install_lib does but make sure correct access modes get + # set on installed directories and files. All installed files with get + # mode 644 unless they are a shared library in which case they will get + # mode 755. All installed directories will get mode 755. + + so_ext = sysconfig.get_config_var("SO") + + def install(self): + outfiles = install_lib.install(self) + self.set_file_modes(outfiles, 0644, 0755) + self.set_dir_modes(self.install_dir, 0755) + return outfiles + + def set_file_modes(self, files, defaultMode, sharedLibMode): + if not self.is_chmod_supported(): return + if not files: return + + for filename in files: + if os.path.islink(filename): continue + mode = defaultMode + if filename.endswith(self.so_ext): mode = sharedLibMode + log.info("changing mode of %s to %o", filename, mode) + if not self.dry_run: os.chmod(filename, mode) + + def set_dir_modes(self, dirname, mode): + if not self.is_chmod_supported(): return + os.path.walk(dirname, self.set_dir_modes_visitor, mode) + + def set_dir_modes_visitor(self, mode, dirname, names): + if os.path.islink(dirname): return + log.info("changing mode of %s to %o", dirname, mode) + if not self.dry_run: os.chmod(dirname, mode) + + def is_chmod_supported(self): + return hasattr(os, 'chmod') + +SUMMARY = """ +Python is an interpreted, interactive, object-oriented programming +language. It is often compared to Tcl, Perl, Scheme or Java. + +Python combines remarkable power with very clear syntax. It has +modules, classes, exceptions, very high level dynamic data types, and +dynamic typing. There are interfaces to many system calls and +libraries, as well as to various windowing systems (X11, Motif, Tk, +Mac, MFC). New built-in modules are easily written in C or C++. Python +is also usable as an extension language for applications that need a +programmable interface. + +The Python implementation is portable: it runs on many brands of UNIX, +on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't +listed here, it may still be supported, if there's a C compiler for +it. Ask around on comp.lang.python -- or just try compiling Python +yourself. +""" + +CLASSIFIERS = """ +Development Status :: 6 - Mature +License :: OSI Approved :: Python Software Foundation License +Natural Language :: English +Programming Language :: C +Programming Language :: Python +Topic :: Software Development +""" + +def main(): + # turn off warnings when deprecated modules are imported + import warnings + warnings.filterwarnings("ignore",category=DeprecationWarning) + setup(# PyPI Metadata (PEP 301) + name = "Python", + version = sys.version.split()[0], + url = "http://www.python.org/%s" % sys.version[:3], + maintainer = "Guido van Rossum and the Python community", + maintainer_email = "python-dev@python.org", + description = "A high-level object-oriented programming language", + long_description = SUMMARY.strip(), + license = "PSF license", + classifiers = filter(None, CLASSIFIERS.split("\n")), + platforms = ["Many"], + + # Build info + cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, + 'install_lib':PyBuildInstallLib}, + # The struct module is defined here, because build_ext won't be + # called unless there's at least one extension module defined. + ext_modules=[Extension('_struct', ['_struct.c'])], + + # Scripts to install + scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', + 'Tools/scripts/2to3', + 'Lib/smtpd.py'] + ) + +# --install-platlib +if __name__ == '__main__': + main() -- cgit v1.2.3