
* verify checksum of downloaded archives. * add PYTHON_BUILD_MIRROR_URL to use mirror site. But we don't have CloudFront setup as of now :-( * rbenv 0.4.x style help messages
663 lines
13 KiB
Bash
Executable File
663 lines
13 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
|
|
PYTHON_BUILD_VERSION="20121023"
|
|
|
|
set -E
|
|
exec 3<&2 # preserve original stderr at fd 3
|
|
|
|
|
|
lib() {
|
|
parse_options() {
|
|
OPTIONS=()
|
|
ARGUMENTS=()
|
|
local arg option index
|
|
|
|
for arg in "$@"; do
|
|
if [ "${arg:0:1}" = "-" ]; then
|
|
if [ "${arg:1:1}" = "-" ]; then
|
|
OPTIONS[${#OPTIONS[*]}]="${arg:2}"
|
|
else
|
|
index=1
|
|
while option="${arg:$index:1}"; do
|
|
[ -n "$option" ] || break
|
|
OPTIONS[${#OPTIONS[*]}]="$option"
|
|
index=$(($index+1))
|
|
done
|
|
fi
|
|
else
|
|
ARGUMENTS[${#ARGUMENTS[*]}]="$arg"
|
|
fi
|
|
done
|
|
}
|
|
|
|
if [ "$1" == "--$FUNCNAME" ]; then
|
|
declare -f "$FUNCNAME"
|
|
echo "$FUNCNAME \"\$1\";"
|
|
exit
|
|
fi
|
|
}
|
|
lib "$1"
|
|
|
|
|
|
resolve_link() {
|
|
$(type -p greadlink readlink | head -1) "$1"
|
|
}
|
|
|
|
abs_dirname() {
|
|
local cwd="$(pwd)"
|
|
local path="$1"
|
|
|
|
while [ -n "$path" ]; do
|
|
cd "${path%/*}"
|
|
local name="${path##*/}"
|
|
path="$(resolve_link "$name" || true)"
|
|
done
|
|
|
|
pwd
|
|
cd "$cwd"
|
|
}
|
|
|
|
build_failed() {
|
|
{ echo
|
|
echo "BUILD FAILED"
|
|
echo
|
|
|
|
if ! rmdir "${BUILD_PATH}" 2>/dev/null; then
|
|
echo "Inspect or clean up the working tree at ${BUILD_PATH}"
|
|
|
|
if file_is_not_empty "$LOG_PATH"; then
|
|
echo "Results logged to ${LOG_PATH}"
|
|
echo
|
|
echo "Last 10 log lines:"
|
|
tail -n 10 "$LOG_PATH"
|
|
fi
|
|
fi
|
|
} >&3
|
|
exit 1
|
|
}
|
|
|
|
file_is_not_empty() {
|
|
local filename="$1"
|
|
local line_count="$(wc -l "$filename" 2>/dev/null || true)"
|
|
|
|
if [ -n "$line_count" ]; then
|
|
words=( $line_count )
|
|
[ "${words[0]}" -gt 0 ]
|
|
else
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
install_package() {
|
|
install_package_using "tarball" 1 $*
|
|
}
|
|
|
|
install_git() {
|
|
install_package_using "git" 2 $*
|
|
}
|
|
|
|
install_jar() {
|
|
install_package_using "jar" 1 $*
|
|
}
|
|
|
|
install_package_using() {
|
|
local package_type="$1"
|
|
local package_type_nargs="$2"
|
|
local package_name="$3"
|
|
shift 3
|
|
|
|
pushd "$BUILD_PATH" >&4
|
|
"fetch_${package_type}" "$package_name" $*
|
|
shift $(($package_type_nargs))
|
|
make_package "$package_name" $*
|
|
popd >&4
|
|
|
|
{ echo "Installed ${package_name} to ${PREFIX_PATH}"
|
|
echo
|
|
} >&2
|
|
}
|
|
|
|
make_package() {
|
|
local package_name="$1"
|
|
shift
|
|
|
|
pushd "$package_name" >&4
|
|
before_install_package "$package_name"
|
|
build_package "$package_name" $*
|
|
after_install_package "$package_name"
|
|
fix_directory_permissions
|
|
popd >&4
|
|
}
|
|
|
|
compute_md5() {
|
|
if type md5 >/dev/null; then
|
|
md5 -q
|
|
elif type openssl &>/dev/null; then
|
|
local output="$(openssl md5)"
|
|
echo "${output##* }"
|
|
elif type md5sum &>/dev/null; then
|
|
local output="$(md5sum -b)"
|
|
echo "${output% *}"
|
|
else
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
verify_checksum() {
|
|
# If there's no MD5 support, return success
|
|
[ -n "$HAS_MD5_SUPPORT" ] || return 0
|
|
|
|
# If the specified filename doesn't exist, return success
|
|
local filename="$1"
|
|
[ -e "$filename" ] || return 0
|
|
|
|
# If there's no expected checksum, return success
|
|
local expected_checksum="$2"
|
|
[ -n "$expected_checksum" ] || return 0
|
|
|
|
# If the computed checksum is empty, return failure
|
|
local computed_checksum="$(compute_md5 < "$filename")"
|
|
[ -n "$computed_checksum" ] || return 1
|
|
|
|
if [ "$expected_checksum" != "$computed_checksum" ]; then
|
|
{ echo
|
|
echo "checksum mismatch: ${filename} (file is corrupt)"
|
|
echo "expected $expected_checksum, got $computed_checksum"
|
|
echo
|
|
} >&4
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
http() {
|
|
local method="$1"
|
|
local url="$2"
|
|
[ -n "$url" ] || return 1
|
|
|
|
if type curl &>/dev/null; then
|
|
"http_${method}_curl" "$url"
|
|
elif type wget &>/dev/null; then
|
|
"http_${method}_wget" "$url"
|
|
else
|
|
echo "error: please install \`curl\` or \`wget\` and try again" >&2
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
http_head_curl() {
|
|
curl -sILf "$1" >&4 2>&1
|
|
}
|
|
|
|
http_get_curl() {
|
|
curl -sSLf "$1"
|
|
}
|
|
|
|
http_head_wget() {
|
|
wget -q --spider "$1" >&4 2>&1
|
|
}
|
|
|
|
http_get_wget() {
|
|
wget -nv -O- "$1"
|
|
}
|
|
|
|
fetch_tarball() {
|
|
local package_name="$1"
|
|
local package_url="$2"
|
|
local mirror_url
|
|
local checksum
|
|
|
|
if [ "$package_url" != "${package_url/\#}" ]; then
|
|
checksum="${package_url#*#}"
|
|
package_url="${package_url%%#*}"
|
|
|
|
if [ -n "$PYTHON_BUILD_MIRROR_URL" ]; then
|
|
mirror_url="${PYTHON_BUILD_MIRROR_URL}/$checksum"
|
|
fi
|
|
fi
|
|
|
|
local package_filename="${package_name}.tar" # later tar can read compression algorithm from file
|
|
symlink_tarball_from_cache "$package_filename" "$checksum" || {
|
|
echo "Downloading ${package_filename}..." >&2
|
|
{ http head "$mirror_url" &&
|
|
download_tarball "$mirror_url" "$package_filename" "$checksum"
|
|
} ||
|
|
download_tarball "$package_url" "$package_filename" "$checksum"
|
|
}
|
|
|
|
{ tar xvf "$package_filename"
|
|
rm -f "$package_filename"
|
|
} >&4 2>&1
|
|
}
|
|
|
|
symlink_tarball_from_cache() {
|
|
[ -n "$PYTHON_BUILD_CACHE_PATH" ] || return 1
|
|
|
|
local package_filename="$1"
|
|
local cached_package_filename="${PYTHON_BUILD_CACHE_PATH}/$package_filename"
|
|
local checksum="$2"
|
|
|
|
[ -e "$cached_package_filename" ] || return 1
|
|
verify_checksum "$cached_package_filename" "$checksum" >&4 2>&1 || return 1
|
|
ln -s "$cached_package_filename" "$package_filename" >&4 2>&1 || return 1
|
|
}
|
|
|
|
download_tarball() {
|
|
local package_url="$1"
|
|
[ -n "$package_url" ] || return 1
|
|
|
|
local package_filename="$2"
|
|
local checksum="$3"
|
|
|
|
echo "-> $package_url" >&2
|
|
|
|
{ http get "$package_url" > "$package_filename"
|
|
verify_checksum "$package_filename" "$checksum"
|
|
} >&4 2>&1 || return 1
|
|
|
|
if [ -n "$PYTHON_BUILD_CACHE_PATH" ]; then
|
|
local cached_package_filename="${PYTHON_BUILD_CACHE_PATH}/$package_filename"
|
|
{ mv "$package_filename" "$cached_package_filename"
|
|
ln -s "$cached_package_filename" "$package_filename"
|
|
} >&4 2>&1 || return 1
|
|
fi
|
|
}
|
|
|
|
fetch_git() {
|
|
local package_name="$1"
|
|
local git_url="$2"
|
|
local git_ref="$3"
|
|
|
|
echo "Cloning ${git_url}..." >&2
|
|
|
|
if type git &>/dev/null; then
|
|
git clone --depth 1 --branch "$git_ref" "$git_url" "${package_name}" >&4 2>&1
|
|
else
|
|
echo "error: please install \`git\` and try again" >&2
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
fetch_svn() {
|
|
local package_name="$1"
|
|
local svn_url="$2"
|
|
local svn_rev="$3"
|
|
|
|
echo "Checking out ${svn_url}..." >&2
|
|
|
|
if type svn &>/dev/null; then
|
|
svn co -r "$svn_rev" "$svn_url" "${package_name}" >&4 2>&1
|
|
else
|
|
echo "error: please install \`svn\` and try again" >&2
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
fetch_jar() {
|
|
local package_name="$1"
|
|
local package_url="$2"
|
|
|
|
echo "Downloading ${package_url}..." >&2
|
|
{ fetch_url "$package_url" > "${package_name}.jar"
|
|
$JAVA -jar ${package_name}.jar -s -d ${package_name}
|
|
} >&4 2>&1
|
|
}
|
|
|
|
build_package() {
|
|
local package_name="$1"
|
|
shift
|
|
|
|
if [ "$#" -eq 0 ]; then
|
|
local commands="standard"
|
|
else
|
|
local commands="$*"
|
|
fi
|
|
|
|
echo "Installing ${package_name}..." >&2
|
|
|
|
for command in $commands; do
|
|
"build_package_${command}"
|
|
done
|
|
|
|
if [ ! -f "$PYTHON_BIN" ]; then
|
|
for python in ${PREFIX_PATH}/bin/python*; do
|
|
if basename "$python" | grep '^python[0-9][0-9]*\.[0-9][0-9]*$' >/dev/null; then
|
|
{
|
|
cd ${PREFIX_PATH}/bin
|
|
ln -fs "$(basename $python)" python
|
|
}
|
|
break
|
|
fi
|
|
done
|
|
fi
|
|
}
|
|
|
|
build_package_standard() {
|
|
local package_name="$1"
|
|
|
|
if [ "${MAKEOPTS+defined}" ]; then
|
|
MAKE_OPTS="$MAKEOPTS"
|
|
elif [ -z "${MAKE_OPTS+defined}" ]; then
|
|
MAKE_OPTS="-j 2"
|
|
fi
|
|
|
|
{ ./configure --prefix="$PREFIX_PATH" $CONFIGURE_OPTS
|
|
make $MAKE_OPTS
|
|
make install
|
|
} >&4 2>&1
|
|
}
|
|
|
|
build_package_autoconf() {
|
|
{ autoconf
|
|
} >&4 2>&1
|
|
}
|
|
|
|
build_package_python() {
|
|
local package_name="$1"
|
|
|
|
{
|
|
"$PYTHON_BIN" setup.py install
|
|
} >&4 2>&1
|
|
}
|
|
|
|
build_package_jython() {
|
|
{
|
|
build_package_copy
|
|
cd "${PREFIX_PATH}/bin"
|
|
ln -fs jython python
|
|
}
|
|
}
|
|
|
|
build_package_pypy() {
|
|
{
|
|
mkdir -p "$PREFIX_PATH"
|
|
cp -R . "$PREFIX_PATH"
|
|
cd "${PREFIX_PATH}/bin"
|
|
ln -fs pypy python
|
|
}
|
|
}
|
|
|
|
build_package_copy() {
|
|
mkdir -p "$PREFIX_PATH"
|
|
cp -R . "$PREFIX_PATH"
|
|
}
|
|
|
|
before_install_package() {
|
|
local stub=1
|
|
}
|
|
|
|
after_install_package() {
|
|
local stub=1
|
|
}
|
|
|
|
fix_directory_permissions() {
|
|
# Ensure installed directories are not world-writable to avoid Bundler warnings
|
|
find "$PREFIX_PATH" -type d \( -perm -020 -o -perm -002 \) -exec chmod go-w {} \;
|
|
}
|
|
|
|
require_gcc() {
|
|
local gcc="$(locate_gcc || true)"
|
|
|
|
if [ -z "$gcc" ]; then
|
|
local esc=$'\033'
|
|
{ echo
|
|
echo "${esc}[1mERROR${esc}[0m: This package must be compiled with GCC, but python-build couldn't"
|
|
echo "find a suitable \`gcc\` executable on your system. Please install GCC"
|
|
echo "and try again."
|
|
echo
|
|
|
|
if [ "$(uname -s)" = "Darwin" ]; then
|
|
echo "${esc}[1mDETAILS${esc}[0m: Apple no longer includes the official GCC compiler with Xcode"
|
|
echo "as of version 4.2. Instead, the \`gcc\` executable is a symlink to"
|
|
echo "\`llvm-gcc\`, a modified version of GCC which outputs LLVM bytecode."
|
|
fi
|
|
} >&3
|
|
return 1
|
|
fi
|
|
|
|
export CC="$gcc"
|
|
}
|
|
|
|
locate_gcc() {
|
|
local gcc gccs
|
|
IFS=: gccs=($(gccs_in_path))
|
|
|
|
verify_gcc "$CC" ||
|
|
verify_gcc "$(command -v gcc || true)" || {
|
|
for gcc in "${gccs[@]}"; do
|
|
verify_gcc "$gcc" && break || true
|
|
done
|
|
}
|
|
|
|
return 1
|
|
}
|
|
|
|
gccs_in_path() {
|
|
local gcc path paths
|
|
local gccs=()
|
|
IFS=: paths=($PATH)
|
|
|
|
shopt -s nullglob
|
|
for path in "${paths[@]}"; do
|
|
for gcc in "$path"/gcc-*; do
|
|
gccs["${#gccs[@]}"]="$gcc"
|
|
done
|
|
done
|
|
shopt -u nullglob
|
|
|
|
printf :%s "${gccs[@]}"
|
|
}
|
|
|
|
verify_gcc() {
|
|
local gcc="$1"
|
|
if [ -z "$gcc" ]; then
|
|
return 1
|
|
fi
|
|
|
|
local version="$("$gcc" --version || true)"
|
|
if [ -z "$version" ]; then
|
|
return 1
|
|
fi
|
|
|
|
echo "$gcc"
|
|
}
|
|
|
|
require_java() {
|
|
local java="$(locate_java || true)"
|
|
|
|
if [ -z "$java" ]; then
|
|
local esc=$'\033'
|
|
{ echo
|
|
echo "${esc}[1mERROR${esc}[0m: This package must be installed with java, but python-build couldn't"
|
|
echo "find a suitable \`java\` executable on your system. Please install Java"
|
|
echo "and try again."
|
|
echo
|
|
} >&3
|
|
return 1
|
|
fi
|
|
|
|
export JAVA="$java"
|
|
}
|
|
|
|
locate_java() {
|
|
local java javas
|
|
IFS=: javas=($(javas_in_path))
|
|
|
|
verify_java "$JAVA" ||
|
|
verify_java "$(command -v java || true)" || {
|
|
for java in "${javas[@]}"; do
|
|
verify_java "$java" && break || true
|
|
done
|
|
}
|
|
|
|
return 1
|
|
}
|
|
|
|
javas_in_path() {
|
|
local java path paths
|
|
local javas=()
|
|
IFS=: paths=($PATH)
|
|
|
|
shopt -s nullglob
|
|
for path in "${paths[@]}"; do
|
|
local java="$path"/java
|
|
if [ -x "$java" ]; then
|
|
javas["${#javas[@]}"]="$java"
|
|
fi
|
|
done
|
|
shopt -u nullglob
|
|
|
|
printf :%s "${javas[@]}"
|
|
}
|
|
|
|
verify_java() {
|
|
local java="$1"
|
|
if [ -z "$java" ]; then
|
|
return 1
|
|
fi
|
|
|
|
if [ ! -x "$java" ]; then
|
|
return 1
|
|
fi
|
|
|
|
echo "$java"
|
|
}
|
|
|
|
version() {
|
|
echo "python-build ${PYTHON_BUILD_VERSION}"
|
|
}
|
|
|
|
usage() {
|
|
{ version
|
|
echo "usage: python-build [-k|--keep] [-v|--verbose] definition prefix"
|
|
echo " python-build --definitions"
|
|
} >&2
|
|
|
|
if [ -z "$1" ]; then
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
list_definitions() {
|
|
{ for definition in "${PYTHON_BUILD_ROOT}/share/python-build/"*; do
|
|
test -f "${definition}" && echo "${definition##*/}"
|
|
done
|
|
} | sort
|
|
}
|
|
|
|
|
|
|
|
unset VERBOSE
|
|
unset KEEP_BUILD_PATH
|
|
PYTHON_BUILD_ROOT="$(abs_dirname "$0")/.."
|
|
|
|
parse_options "$@"
|
|
|
|
for option in "${OPTIONS[@]}"; do
|
|
case "$option" in
|
|
"h" | "help" )
|
|
usage without_exiting
|
|
{ echo
|
|
echo " -k/--keep Do not remove source tree after installation"
|
|
echo " -v/--verbose Verbose mode: print compilation status to stdout"
|
|
echo " --definitions List all built-in definitions"
|
|
echo
|
|
} >&2
|
|
exit 0
|
|
;;
|
|
"definitions" )
|
|
list_definitions
|
|
exit 0
|
|
;;
|
|
"k" | "keep" )
|
|
KEEP_BUILD_PATH=true
|
|
;;
|
|
"v" | "verbose" )
|
|
VERBOSE=true
|
|
;;
|
|
"version" )
|
|
version
|
|
exit 0
|
|
;;
|
|
esac
|
|
done
|
|
|
|
DEFINITION_PATH="${ARGUMENTS[0]}"
|
|
if [ -z "$DEFINITION_PATH" ]; then
|
|
usage
|
|
elif [ ! -e "$DEFINITION_PATH" ]; then
|
|
BUILTIN_DEFINITION_PATH="${PYTHON_BUILD_ROOT}/share/python-build/${DEFINITION_PATH}"
|
|
if [ -e "$BUILTIN_DEFINITION_PATH" ]; then
|
|
DEFINITION_PATH="$BUILTIN_DEFINITION_PATH"
|
|
else
|
|
echo "python-build: definition not found: ${DEFINITION_PATH}" >&2
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
PREFIX_PATH="${ARGUMENTS[1]}"
|
|
if [ -z "$PREFIX_PATH" ]; then
|
|
usage
|
|
fi
|
|
|
|
if [ -z "$TMPDIR" ]; then
|
|
TMP="/tmp"
|
|
else
|
|
TMP="${TMPDIR%/}"
|
|
fi
|
|
|
|
if [ -z "$MAKE" ]; then
|
|
MAKE="make"
|
|
fi
|
|
|
|
if [ -n "$PYTHON_BUILD_CACHE_PATH" ] && [ -d "$PYTHON_BUILD_CACHE_PATH" ]; then
|
|
PYTHON_BUILD_CACHE_PATH="${PYTHON_BUILD_CACHE_PATH%/}"
|
|
else
|
|
unset PYTHON_BUILD_CACHE_PATH
|
|
fi
|
|
|
|
if [ -z "$PYTHON_BUILD_MIRROR_URL" ]; then
|
|
PYTHON_BUILD_MIRROR_URL="" # FIXME: setup mirror site
|
|
else
|
|
PYTHON_BUILD_MIRROR_URL="${PYTHON_BUILD_MIRROR_URL%/}"
|
|
fi
|
|
|
|
if [ -n "$PYTHON_BUILD_SKIP_MIRROR" ]; then
|
|
unset PYTHON_BUILD_MIRROR_URL
|
|
fi
|
|
|
|
if echo test | compute_md5 >/dev/null; then
|
|
HAS_MD5_SUPPORT=1
|
|
else
|
|
unset HAS_MD5_SUPPORT
|
|
unset PYTHON_BUILD_MIRROR_URL
|
|
fi
|
|
|
|
SEED="$(date "+%Y%m%d%H%M%S").$$"
|
|
LOG_PATH="${TMP}/python-build.${SEED}.log"
|
|
PYTHON_BIN="${PREFIX_PATH}/bin/python"
|
|
CWD="$(pwd)"
|
|
|
|
if [ -z $PYTHON_BUILD_BUILD_PATH ]; then
|
|
BUILD_PATH="${TMP}/python-build.${SEED}"
|
|
else
|
|
BUILD_PATH=$PYTHON_BUILD_BUILD_PATH
|
|
fi
|
|
|
|
exec 4<> "$LOG_PATH" # open the log file at fd 4
|
|
if [ -n "$VERBOSE" ]; then
|
|
tail -f "$LOG_PATH" &
|
|
trap "kill 0" SIGINT SIGTERM EXIT
|
|
fi
|
|
|
|
export LDFLAGS="-L'${PREFIX_PATH}/lib' ${LDFLAGS}"
|
|
export CPPFLAGS="-I'${PREFIX_PATH}/include' ${CPPFLAGS}"
|
|
|
|
unset PYTHONHOME
|
|
unset PYTHONPATH
|
|
|
|
trap build_failed ERR
|
|
mkdir -p "$BUILD_PATH"
|
|
source "$DEFINITION_PATH"
|
|
[ -z "${KEEP_BUILD_PATH}" ] && rm -fr "$BUILD_PATH"
|
|
trap - ERR
|