first commit based on psycopg2 2.9 version

This commit is contained in:
lishifu_db
2021-07-05 21:34:17 +08:00
parent d126b6ec53
commit 3553ed0e30
178 changed files with 51253 additions and 0 deletions

View File

@ -0,0 +1,22 @@
This file is a simple placeholder for forcing the appveyor build cache
to invalidate itself since appveyor.yml changes more frequently then
the cache needs updating. Note, the versions list here can be
different than what is indicated in appveyor.yml.
To invalidate the cache, update this file and check it into git.
Currently used modules built in the cache:
OpenSSL
Version: 1.1.1k
PostgreSQL
Version: 13.3
NOTE: to zap the cache manually you can also use:
curl -X DELETE -H "Authorization: Bearer $APPVEYOR_TOKEN" -H "Content-Type: application/json" https://ci.appveyor.com/api/projects/psycopg/psycopg2/buildcache
with the token from https://ci.appveyor.com/api-token

848
scripts/build/appveyor.py Executable file
View File

@ -0,0 +1,848 @@
#!/usr/bin/env python3
"""
Build steps for the windows binary packages.
The script is designed to be called by appveyor. Subcommands map the steps in
'appveyor.yml'.
"""
import re
import os
import sys
import json
import shutil
import logging
import subprocess as sp
from glob import glob
from pathlib import Path
from zipfile import ZipFile
from argparse import ArgumentParser
from tempfile import NamedTemporaryFile
from urllib.request import urlopen
opt = None
STEP_PREFIX = 'step_'
logger = logging.getLogger()
logging.basicConfig(
level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s'
)
def main():
global opt
opt = parse_cmdline()
logger.setLevel(opt.loglevel)
cmd = globals()[STEP_PREFIX + opt.step]
cmd()
def setup_build_env():
"""
Set the environment variables according to the build environment
"""
setenv('VS_VER', opt.vs_ver)
path = [
str(opt.py_dir),
str(opt.py_dir / 'Scripts'),
r'C:\Strawberry\Perl\bin',
r'C:\Program Files\Git\mingw64\bin',
str(opt.ssl_build_dir / 'bin'),
os.environ['PATH'],
]
setenv('PATH', os.pathsep.join(path))
logger.info("Configuring compiler")
bat_call([opt.vc_dir / "vcvarsall.bat", 'x86' if opt.arch_32 else 'amd64'])
def python_info():
logger.info("Python Information")
run_python(['--version'], stderr=sp.STDOUT)
run_python(
['-c', "import sys; print('64bit: %s' % (sys.maxsize > 2**32))"]
)
def step_install():
python_info()
configure_sdk()
configure_postgres()
if opt.is_wheel:
install_wheel_support()
def install_wheel_support():
"""
Install an up-to-date pip wheel package to build wheels.
"""
run_python("-m pip install --upgrade pip".split())
run_python("-m pip install wheel".split())
def configure_sdk():
# The program rc.exe on 64bit with some versions look in the wrong path
# location when building postgresql. This cheats by copying the x64 bit
# files to that location.
if opt.arch_64:
for fn in glob(
r'C:\Program Files\Microsoft SDKs\Windows\v7.0\Bin\x64\rc*'
):
copy_file(
fn, r"C:\Program Files (x86)\Microsoft SDKs\Windows\v7.0A\Bin"
)
def configure_postgres():
"""
Set up PostgreSQL config before the service starts.
"""
logger.info("Configuring Postgres")
with (opt.pg_data_dir / 'postgresql.conf').open('a') as f:
# allow > 1 prepared transactions for test cases
print("max_prepared_transactions = 10", file=f)
print("ssl = on", file=f)
# Create openssl certificate to allow ssl connection
cwd = os.getcwd()
os.chdir(opt.pg_data_dir)
run_openssl(
'req -new -x509 -days 365 -nodes -text '
'-out server.crt -keyout server.key -subj /CN=initd.org'.split()
)
run_openssl(
'req -new -nodes -text -out root.csr -keyout root.key '
'-subj /CN=initd.org'.split()
)
run_openssl(
'x509 -req -in root.csr -text -days 3650 -extensions v3_ca '
'-signkey root.key -out root.crt'.split()
)
run_openssl(
'req -new -nodes -text -out server.csr -keyout server.key '
'-subj /CN=initd.org'.split()
)
run_openssl(
'x509 -req -in server.csr -text -days 365 -CA root.crt '
'-CAkey root.key -CAcreateserial -out server.crt'.split()
)
os.chdir(cwd)
def run_openssl(args):
"""Run the appveyor-installed openssl with some args."""
# https://www.appveyor.com/docs/windows-images-software/
openssl = Path(r"C:\OpenSSL-v111-Win64") / 'bin' / 'openssl'
return run_command([openssl] + args)
def step_build_script():
setup_build_env()
build_openssl()
build_libpq()
build_psycopg()
if opt.is_wheel:
build_binary_packages()
def build_openssl():
top = opt.ssl_build_dir
if (top / 'lib' / 'libssl.lib').exists():
return
logger.info("Building OpenSSL")
# Setup directories for building OpenSSL libraries
ensure_dir(top / 'include' / 'openssl')
ensure_dir(top / 'lib')
# Setup OpenSSL Environment Variables based on processor architecture
if opt.arch_32:
target = 'VC-WIN32'
setenv('VCVARS_PLATFORM', 'x86')
else:
target = 'VC-WIN64A'
setenv('VCVARS_PLATFORM', 'amd64')
setenv('CPU', 'AMD64')
ver = os.environ['OPENSSL_VERSION']
# Download OpenSSL source
zipname = f'OpenSSL_{ver}.zip'
zipfile = opt.cache_dir / zipname
if not zipfile.exists():
download(
f"https://github.com/openssl/openssl/archive/{zipname}", zipfile
)
with ZipFile(zipfile) as z:
z.extractall(path=opt.build_dir)
sslbuild = opt.build_dir / f"openssl-OpenSSL_{ver}"
os.chdir(sslbuild)
run_command(
['perl', 'Configure', target, 'no-asm']
+ ['no-shared', 'no-zlib', f'--prefix={top}', f'--openssldir={top}']
)
run_command("nmake build_libs install_sw".split())
assert (top / 'lib' / 'libssl.lib').exists()
os.chdir(opt.clone_dir)
shutil.rmtree(sslbuild)
def build_libpq():
top = opt.pg_build_dir
if (top / 'lib' / 'libpq.lib').exists():
return
logger.info("Building libpq")
# Setup directories for building PostgreSQL librarires
ensure_dir(top / 'include')
ensure_dir(top / 'lib')
ensure_dir(top / 'bin')
ver = os.environ['POSTGRES_VERSION']
# Download PostgreSQL source
zipname = f'postgres-REL_{ver}.zip'
zipfile = opt.cache_dir / zipname
if not zipfile.exists():
download(
f"https://github.com/postgres/postgres/archive/REL_{ver}.zip",
zipfile,
)
with ZipFile(zipfile) as z:
z.extractall(path=opt.build_dir)
pgbuild = opt.build_dir / f"postgres-REL_{ver}"
os.chdir(pgbuild)
# Setup build config file (config.pl)
os.chdir("src/tools/msvc")
with open("config.pl", 'w') as f:
print(
"""\
$config->{ldap} = 0;
$config->{openssl} = "%s";
1;
"""
% str(opt.ssl_build_dir).replace('\\', '\\\\'),
file=f,
)
# Hack the Mkvcbuild.pm file so we build the lib version of libpq
file_replace('Mkvcbuild.pm', "'libpq', 'dll'", "'libpq', 'lib'")
# Build libpgport, libpgcommon, libpq
run_command([which("build"), "libpgport"])
run_command([which("build"), "libpgcommon"])
run_command([which("build"), "libpq"])
# Install includes
with (pgbuild / "src/backend/parser/gram.h").open("w") as f:
print("", file=f)
# Copy over built libraries
file_replace("Install.pm", "qw(Install)", "qw(Install CopyIncludeFiles)")
run_command(
["perl", "-MInstall=CopyIncludeFiles", "-e"]
+ [f"chdir('../../..'); CopyIncludeFiles('{top}')"]
)
for lib in ('libpgport', 'libpgcommon', 'libpq'):
copy_file(pgbuild / f'Release/{lib}/{lib}.lib', top / 'lib')
# Prepare local include directory for building from
for dir in ('win32', 'win32_msvc'):
merge_dir(pgbuild / f"src/include/port/{dir}", pgbuild / "src/include")
# Build pg_config in place
os.chdir(pgbuild / 'src/bin/pg_config')
run_command(
['cl', 'pg_config.c', '/MT', '/nologo', fr'/I{pgbuild}\src\include']
+ ['/link', fr'/LIBPATH:{top}\lib']
+ ['libpgcommon.lib', 'libpgport.lib', 'advapi32.lib']
+ ['/NODEFAULTLIB:libcmt.lib']
+ [fr'/OUT:{top}\bin\pg_config.exe']
)
assert (top / 'lib' / 'libpq.lib').exists()
assert (top / 'bin' / 'pg_config.exe').exists()
os.chdir(opt.clone_dir)
shutil.rmtree(pgbuild)
def build_psycopg():
os.chdir(opt.package_dir)
patch_package_name()
add_pg_config_path()
run_python(
["setup.py", "build_ext", "--have-ssl"]
+ ["-l", "libpgcommon libpgport"]
+ ["-L", opt.ssl_build_dir / 'lib']
+ ['-I', opt.ssl_build_dir / 'include']
)
run_python(["setup.py", "build_py"])
def patch_package_name():
"""Change the psycopg2 package name in the setup.py if required."""
if opt.package_name == 'psycopg2':
return
logger.info("changing package name to %s", opt.package_name)
with (opt.package_dir / 'setup.py').open() as f:
data = f.read()
# Replace the name of the package with what desired
rex = re.compile(r"""name=["']psycopg2["']""")
assert len(rex.findall(data)) == 1, rex.findall(data)
data = rex.sub(f'name="{opt.package_name}"', data)
with (opt.package_dir / 'setup.py').open('w') as f:
f.write(data)
def build_binary_packages():
"""Create wheel/exe binary packages."""
os.chdir(opt.package_dir)
add_pg_config_path()
# Build .exe packages for whom still use them
if opt.package_name == 'psycopg2':
run_python(['setup.py', 'bdist_wininst', "-d", opt.dist_dir])
# Build .whl packages
run_python(['setup.py', 'bdist_wheel', "-d", opt.dist_dir])
def step_after_build():
if not opt.is_wheel:
install_built_package()
else:
install_binary_package()
def install_built_package():
"""Install the package just built by setup build."""
os.chdir(opt.package_dir)
# Install the psycopg just built
add_pg_config_path()
run_python(["setup.py", "install"])
shutil.rmtree("psycopg2.egg-info")
def install_binary_package():
"""Install the package from a packaged wheel."""
run_python(
['-m', 'pip', 'install', '--no-index', '-f', opt.dist_dir]
+ [opt.package_name]
)
def add_pg_config_path():
"""Allow finding in the path the pg_config just built."""
pg_path = str(opt.pg_build_dir / 'bin')
if pg_path not in os.environ['PATH'].split(os.pathsep):
setenv('PATH', os.pathsep.join([pg_path, os.environ['PATH']]))
def step_before_test():
print_psycopg2_version()
# Create and setup PostgreSQL database for the tests
run_command([opt.pg_bin_dir / 'createdb', os.environ['PSYCOPG2_TESTDB']])
run_command(
[opt.pg_bin_dir / 'psql', '-d', os.environ['PSYCOPG2_TESTDB']]
+ ['-c', "CREATE EXTENSION hstore"]
)
def print_psycopg2_version():
"""Print psycopg2 and libpq versions installed."""
for expr in (
'psycopg2.__version__',
'psycopg2.__libpq_version__',
'psycopg2.extensions.libpq_version()',
):
out = out_python(['-c', f"import psycopg2; print({expr})"])
logger.info("built %s: %s", expr, out.decode('ascii'))
def step_test_script():
check_libpq_version()
run_test_suite()
def check_libpq_version():
"""
Fail if the package installed is not using the expected libpq version.
"""
want_ver = tuple(map(int, os.environ['POSTGRES_VERSION'].split('_')))
want_ver = "%d%04d" % want_ver
got_ver = (
out_python(
['-c']
+ ["import psycopg2; print(psycopg2.extensions.libpq_version())"]
)
.decode('ascii')
.rstrip()
)
assert want_ver == got_ver, f"libpq version mismatch: {want_ver!r} != {got_ver!r}"
def run_test_suite():
# Remove this var, which would make badly a configured OpenSSL 1.1 work
os.environ.pop('OPENSSL_CONF', None)
# Run the unit test
args = [
'-c',
"import tests; tests.unittest.main(defaultTest='tests.test_suite')",
]
if opt.is_wheel:
os.environ['PSYCOPG2_TEST_FAST'] = '1'
else:
args.append('--verbose')
os.chdir(opt.package_dir)
run_python(args)
def step_on_success():
print_sha1_hashes()
if setup_ssh():
upload_packages()
def print_sha1_hashes():
"""
Print the packages sha1 so their integrity can be checked upon signing.
"""
logger.info("artifacts SHA1 hashes:")
os.chdir(opt.package_dir / 'dist')
run_command([which('sha1sum'), '-b', 'psycopg2-*/*'])
def setup_ssh():
"""
Configure ssh to upload built packages where they can be retrieved.
Return False if can't configure and upload shoould be skipped.
"""
# If we are not on the psycopg AppVeyor account, the environment variable
# REMOTE_KEY will not be decrypted. In that case skip uploading.
if os.environ['APPVEYOR_ACCOUNT_NAME'] != 'psycopg':
logger.warn("skipping artifact upload: you are not psycopg")
return False
pkey = os.environ.get('REMOTE_KEY', None)
if not pkey:
logger.warn("skipping artifact upload: no remote key")
return False
# Write SSH Private Key file from environment variable
pkey = pkey.replace(' ', '\n')
with (opt.clone_dir / 'data/id_rsa-psycopg-upload').open('w') as f:
f.write(
f"""\
-----BEGIN RSA PRIVATE KEY-----
{pkey}
-----END RSA PRIVATE KEY-----
"""
)
# Make a directory to please MinGW's version of ssh
ensure_dir(r"C:\MinGW\msys\1.0\home\appveyor\.ssh")
return True
def upload_packages():
# Upload built artifacts
logger.info("uploading artifacts")
os.chdir(opt.clone_dir)
run_command(
[r"C:\MinGW\msys\1.0\bin\rsync", "-avr"]
+ ["-e", r"C:\MinGW\msys\1.0\bin\ssh -F data/ssh_config"]
+ ["psycopg2/dist/", "upload:"]
)
def download(url, fn):
"""Download a file locally"""
logger.info("downloading %s", url)
with open(fn, 'wb') as fo, urlopen(url) as fi:
while 1:
data = fi.read(8192)
if not data:
break
fo.write(data)
logger.info("file downloaded: %s", fn)
def file_replace(fn, s1, s2):
"""
Replace all the occurrences of the string s1 into s2 in the file fn.
"""
assert os.path.exists(fn)
with open(fn, 'r+') as f:
data = f.read()
f.seek(0)
f.write(data.replace(s1, s2))
f.truncate()
def merge_dir(src, tgt):
"""
Merge the content of the directory src into the directory tgt
Reproduce the semantic of "XCOPY /Y /S src/* tgt"
"""
src = str(src)
for dp, _dns, fns in os.walk(src):
logger.debug("dirpath %s", dp)
if not fns:
continue
assert dp.startswith(src)
subdir = dp[len(src) :].lstrip(os.sep)
tgtdir = ensure_dir(os.path.join(tgt, subdir))
for fn in fns:
copy_file(os.path.join(dp, fn), tgtdir)
def bat_call(cmdline):
"""
Simulate 'CALL' from a batch file
Execute CALL *cmdline* and export the changed environment to the current
environment.
nana-nana-nana-nana...
"""
if not isinstance(cmdline, str):
cmdline = map(str, cmdline)
cmdline = ' '.join(c if ' ' not in c else '"%s"' % c for c in cmdline)
data = f"""\
CALL {cmdline}
{opt.py_exe} -c "import os, sys, json; \
json.dump(dict(os.environ), sys.stdout, indent=2)"
"""
logger.debug("preparing file to batcall:\n\n%s", data)
with NamedTemporaryFile(suffix='.bat') as tmp:
fn = tmp.name
with open(fn, "w") as f:
f.write(data)
try:
out = out_command(fn)
# be vewwy vewwy caweful to print the env var as it might contain
# secwet things like your pwecious pwivate key.
# logger.debug("output of command:\n\n%s", out.decode('utf8', 'replace'))
# The output has some useless crap on stdout, because sure, and json
# indented so the last { on column 1 is where we have to start parsing
m = list(re.finditer(b'^{', out, re.MULTILINE))[-1]
out = out[m.start() :]
env = json.loads(out)
for k, v in env.items():
if os.environ.get(k) != v:
setenv(k, v)
finally:
os.remove(fn)
def ensure_dir(dir):
if not isinstance(dir, Path):
dir = Path(dir)
if not dir.is_dir():
logger.info("creating directory %s", dir)
dir.mkdir(parents=True)
return dir
def run_command(cmdline, **kwargs):
"""Run a command, raise on error."""
if not isinstance(cmdline, str):
cmdline = list(map(str, cmdline))
logger.info("running command: %s", cmdline)
sp.check_call(cmdline, **kwargs)
def out_command(cmdline, **kwargs):
"""Run a command, return its output, raise on error."""
if not isinstance(cmdline, str):
cmdline = list(map(str, cmdline))
logger.info("running command: %s", cmdline)
data = sp.check_output(cmdline, **kwargs)
return data
def run_python(args, **kwargs):
"""
Run a script in the target Python.
"""
return run_command([opt.py_exe] + args, **kwargs)
def out_python(args, **kwargs):
"""
Return the output of a script run in the target Python.
"""
return out_command([opt.py_exe] + args, **kwargs)
def copy_file(src, dst):
logger.info("copying file %s -> %s", src, dst)
shutil.copy(src, dst)
def setenv(k, v):
logger.debug("setting %s=%s", k, v)
os.environ[k] = v
def which(name):
"""
Return the full path of a command found on the path
"""
base, ext = os.path.splitext(name)
if not ext:
exts = ('.com', '.exe', '.bat', '.cmd')
else:
exts = (ext,)
for dir in ['.'] + os.environ['PATH'].split(os.pathsep):
for ext in exts:
fn = os.path.join(dir, base + ext)
if os.path.isfile(fn):
return fn
raise Exception(f"couldn't find program on path: {name}")
class Options:
"""
An object exposing the script configuration from env vars and command line.
"""
@property
def py_ver(self):
"""The Python version to build as 2 digits string."""
rv = os.environ['PY_VER']
assert rv in ('36', '37', '38', '39'), rv
return rv
@property
def py_arch(self):
"""The Python architecture to build, 32 or 64."""
rv = os.environ['PY_ARCH']
assert rv in ('32', '64'), rv
return int(rv)
@property
def arch_32(self):
"""True if the Python architecture to build is 32 bits."""
return self.py_arch == 32
@property
def arch_64(self):
"""True if the Python architecture to build is 64 bits."""
return self.py_arch == 64
@property
def package_name(self):
return os.environ.get('CONFIGURATION', 'psycopg2')
@property
def package_version(self):
"""The psycopg2 version number to build."""
with (self.package_dir / 'setup.py').open() as f:
data = f.read()
m = re.search(
r"""^PSYCOPG_VERSION\s*=\s*['"](.*)['"]""", data, re.MULTILINE
)
return m.group(1)
@property
def is_wheel(self):
"""Are we building the wheel packages or just the extension?"""
workflow = os.environ["WORKFLOW"]
return workflow == "packages"
@property
def py_dir(self):
"""
The path to the target python binary to execute.
"""
dirname = ''.join(
[r"C:\Python", self.py_ver, '-x64' if self.arch_64 else '']
)
return Path(dirname)
@property
def py_exe(self):
"""
The full path of the target python executable.
"""
return self.py_dir / 'python.exe'
@property
def vc_dir(self):
"""
The path of the Visual C compiler.
"""
if self.vs_ver == '16.0':
path = Path(
r"C:\Program Files (x86)\Microsoft Visual Studio\2019"
r"\Community\VC\Auxiliary\Build"
)
else:
path = Path(
r"C:\Program Files (x86)\Microsoft Visual Studio %s\VC"
% self.vs_ver
)
return path
@property
def vs_ver(self):
# https://wiki.python.org/moin/WindowsCompilers
# https://www.appveyor.com/docs/windows-images-software/#python
# Py 3.6--3.8 = VS Ver. 14.0 (VS 2015)
# Py 3.9 = VS Ver. 16.0 (VS 2019)
vsvers = {
'36': '14.0',
'37': '14.0',
'38': '14.0',
'39': '16.0',
}
return vsvers[self.py_ver]
@property
def clone_dir(self):
"""The directory where the repository is cloned."""
return Path(r"C:\Project")
@property
def appveyor_pg_dir(self):
"""The directory of the postgres service made available by Appveyor."""
return Path(os.environ['POSTGRES_DIR'])
@property
def pg_data_dir(self):
"""The data dir of the appveyor postgres service."""
return self.appveyor_pg_dir / 'data'
@property
def pg_bin_dir(self):
"""The bin dir of the appveyor postgres service."""
return self.appveyor_pg_dir / 'bin'
@property
def pg_build_dir(self):
"""The directory where to build the postgres libraries for psycopg."""
return self.cache_arch_dir / 'postgresql'
@property
def ssl_build_dir(self):
"""The directory where to build the openssl libraries for psycopg."""
return self.cache_arch_dir / 'openssl'
@property
def cache_arch_dir(self):
rv = self.cache_dir / str(self.py_arch) / self.vs_ver
return ensure_dir(rv)
@property
def cache_dir(self):
return Path(r"C:\Others")
@property
def build_dir(self):
rv = self.cache_arch_dir / 'Builds'
return ensure_dir(rv)
@property
def package_dir(self):
return self.clone_dir
@property
def dist_dir(self):
"""The directory where to build packages to distribute."""
return (
self.package_dir / 'dist' / (f'psycopg2-{self.package_version}')
)
def parse_cmdline():
parser = ArgumentParser(description=__doc__)
g = parser.add_mutually_exclusive_group()
g.add_argument(
'-q',
'--quiet',
help="Talk less",
dest='loglevel',
action='store_const',
const=logging.WARN,
default=logging.INFO,
)
g.add_argument(
'-v',
'--verbose',
help="Talk more",
dest='loglevel',
action='store_const',
const=logging.DEBUG,
default=logging.INFO,
)
steps = [
n[len(STEP_PREFIX) :]
for n in globals()
if n.startswith(STEP_PREFIX) and callable(globals()[n])
]
parser.add_argument(
'step', choices=steps, help="the appveyor step to execute"
)
opt = parser.parse_args(namespace=Options())
return opt
if __name__ == '__main__':
sys.exit(main())

130
scripts/build/build_libpq.sh Executable file
View File

@ -0,0 +1,130 @@
#!/bin/bash
# Build a modern version of libpq and depending libs from source on Centos 5
set -euo pipefail
set -x
openssl_version="1.1.1k"
ldap_version="2.4.59"
sasl_version="2.1.27"
postgres_version="13.3"
yum install -y zlib-devel krb5-devel pam-devel
# Build openssl if needed
openssl_tag="OpenSSL_${openssl_version//./_}"
openssl_dir="openssl-${openssl_tag}"
if [ ! -d "${openssl_dir}" ]; then curl -sL \
https://github.com/openssl/openssl/archive/${openssl_tag}.tar.gz \
| tar xzf -
cd "${openssl_dir}"
./config --prefix=/usr/local/ --openssldir=/usr/local/ \
zlib -fPIC shared
make depend
make
else
cd "${openssl_dir}"
fi
# Install openssl
make install_sw
cd ..
# Build libsasl2 if needed
# The system package (cyrus-sasl-devel) causes an amazing error on i686:
# "unsupported version 0 of Verneed record"
# https://github.com/pypa/manylinux/issues/376
sasl_tag="cyrus-sasl-${sasl_version}"
sasl_dir="cyrus-sasl-${sasl_tag}"
if [ ! -d "${sasl_dir}" ]; then
curl -sL \
https://github.com/cyrusimap/cyrus-sasl/archive/${sasl_tag}.tar.gz \
| tar xzf -
cd "${sasl_dir}"
autoreconf -i
./configure
make
else
cd "${sasl_dir}"
fi
# Install libsasl2
# requires missing nroff to build
touch saslauthd/saslauthd.8
make install
cd ..
# Build openldap if needed
ldap_tag="${ldap_version}"
ldap_dir="openldap-${ldap_tag}"
if [ ! -d "${ldap_dir}" ]; then
curl -sL \
https://www.openldap.org/software/download/OpenLDAP/openldap-release/openldap-${ldap_tag}.tgz \
| tar xzf -
cd "${ldap_dir}"
./configure --enable-backends=no --enable-null
make depend
make -C libraries/liblutil/
make -C libraries/liblber/
make -C libraries/libldap/
make -C libraries/libldap_r/
else
cd "${ldap_dir}"
fi
# Install openldap
make -C libraries/liblber/ install
make -C libraries/libldap/ install
make -C libraries/libldap_r/ install
make -C include/ install
chmod +x /usr/local/lib/{libldap,liblber}*.so*
cd ..
# Build libpq if needed
postgres_tag="REL_${postgres_version//./_}"
postgres_dir="postgres-${postgres_tag}"
if [ ! -d "${postgres_dir}" ]; then
curl -sL \
https://github.com/postgres/postgres/archive/${postgres_tag}.tar.gz \
| tar xzf -
cd "${postgres_dir}"
# Match the default unix socket dir default with what defined on Ubuntu and
# Red Hat, which seems the most common location
sed -i 's|#define DEFAULT_PGSOCKET_DIR .*'\
'|#define DEFAULT_PGSOCKET_DIR "/var/run/postgresql"|' \
src/include/pg_config_manual.h
# Without this, libpq ./configure fails on i686
if [[ "$(uname -m)" == "i686" ]]; then
export LD_LIBRARY_PATH=/usr/local/lib
fi
./configure --prefix=/usr/local --without-readline \
--with-gssapi --with-openssl --with-pam --with-ldap
make -C src/interfaces/libpq
make -C src/bin/pg_config
make -C src/include
else
cd "${postgres_dir}"
fi
# Install libpq
make -C src/interfaces/libpq install
make -C src/bin/pg_config install
make -C src/include install
cd ..
find /usr/local/ -name \*.so.\* -type f -exec strip --strip-unneeded {} \;

79
scripts/build/build_macos.sh Executable file
View File

@ -0,0 +1,79 @@
#!/bin/bash
# Create macOS wheels for psycopg2
#
# Following instructions from https://github.com/MacPython/wiki/wiki/Spinning-wheels
# Cargoculting pieces of implementation from https://github.com/matthew-brett/multibuild
set -euo pipefail
set -x
dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
prjdir="$( cd "${dir}/../.." && pwd )"
brew install gnu-sed postgresql@13
# Start the database for testing
brew services start postgresql
for i in $(seq 10 -1 0); do
eval pg_isready && break
if [ $i == 0 ]; then
echo "PostgreSQL service not ready, giving up"
exit 1
fi
echo "PostgreSQL service not ready, waiting a bit, attempts left: $i"
sleep 5
done
# Find psycopg version
version=$(grep -e ^PSYCOPG_VERSION "${prjdir}/setup.py" | gsed "s/.*'\(.*\)'/\1/")
# A gratuitous comment to fix broken vim syntax file: '")
distdir="${prjdir}/dist/psycopg2-$version"
mkdir -p "$distdir"
# Install required python packages
pip install -U pip wheel delocate
# Replace the package name
if [[ "${PACKAGE_NAME:-}" ]]; then
gsed -i "s/^setup(name=\"psycopg2\"/setup(name=\"${PACKAGE_NAME}\"/" \
"${prjdir}/setup.py"
fi
# Build the wheels
wheeldir="${prjdir}/wheels"
pip wheel -w ${wheeldir} .
delocate-listdeps ${wheeldir}/*.whl
# Check where is the libpq. I'm gonna kill it for testing
if [[ -z "${LIBPQ:-}" ]]; then
export LIBPQ=$(delocate-listdeps ${wheeldir}/*.whl | grep libpq)
fi
delocate-wheel ${wheeldir}/*.whl
# https://github.com/MacPython/wiki/wiki/Spinning-wheels#question-will-pip-give-me-a-broken-wheel
delocate-addplat --rm-orig -x 10_9 -x 10_10 ${wheeldir}/*.whl
cp ${wheeldir}/*.whl ${distdir}
# kill the libpq to make sure tests don't depend on it
mv "$LIBPQ" "${LIBPQ}-bye"
# Install and test the built wheel
pip install ${PACKAGE_NAME:-psycopg2} --no-index -f "$distdir"
# Print psycopg and libpq versions
python -c "import psycopg2; print(psycopg2.__version__)"
python -c "import psycopg2; print(psycopg2.__libpq_version__)"
python -c "import psycopg2; print(psycopg2.extensions.libpq_version())"
# fail if we are not using the expected libpq library
# Disabled as we just use what's available on the system on macOS
# if [[ "${WANT_LIBPQ:-}" ]]; then
# python -c "import psycopg2, sys; sys.exit(${WANT_LIBPQ} != psycopg2.extensions.libpq_version())"
# fi
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
# just because I'm a boy scout
mv "${LIBPQ}-bye" "$LIBPQ"

View File

@ -0,0 +1,75 @@
#!/bin/bash
# Create manylinux2014 wheels for psycopg2
#
# manylinux2014 is built on CentOS 7, which packages an old version of the
# libssl, (1.0, which has concurrency problems with the Python libssl). So we
# need to build these libraries from source.
#
# Look at the .github/workflows/packages.yml file for hints about how to use it.
set -euo pipefail
set -x
dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
prjdir="$( cd "${dir}/../.." && pwd )"
# Build all the available versions, or just the ones specified in PYVERS
if [ ! "${PYVERS:-}" ]; then
PYVERS="$(ls /opt/python/)"
fi
# Find psycopg version
version=$(grep -e ^PSYCOPG_VERSION "${prjdir}/setup.py" | sed "s/.*'\(.*\)'/\1/")
# A gratuitous comment to fix broken vim syntax file: '")
distdir="${prjdir}/dist/psycopg2-$version"
# Replace the package name
if [[ "${PACKAGE_NAME:-}" ]]; then
sed -i "s/^setup(name=\"psycopg2\"/setup(name=\"${PACKAGE_NAME}\"/" \
"${prjdir}/setup.py"
fi
# Build depending libraries
"${dir}/build_libpq.sh" > /dev/null
# Create the wheel packages
for pyver in $PYVERS; do
pybin="/opt/python/${pyver}/bin"
"${pybin}/pip" wheel "${prjdir}" -w "${prjdir}/dist/"
done
# Bundle external shared libraries into the wheels
for whl in "${prjdir}"/dist/*.whl; do
auditwheel repair "$whl" -w "$distdir"
done
# Make sure the libpq is not in the system
for f in $(find /usr/local/lib -name libpq\*) ; do
mkdir -pv "/libpqbak/$(dirname $f)"
mv -v "$f" "/libpqbak/$(dirname $f)"
done
# Install packages and test
cd "${prjdir}"
for pyver in $PYVERS; do
pybin="/opt/python/${pyver}/bin"
"${pybin}/pip" install ${PACKAGE_NAME:-psycopg2} --no-index -f "$distdir"
# Print psycopg and libpq versions
"${pybin}/python" -c "import psycopg2; print(psycopg2.__version__)"
"${pybin}/python" -c "import psycopg2; print(psycopg2.__libpq_version__)"
"${pybin}/python" -c "import psycopg2; print(psycopg2.extensions.libpq_version())"
# Fail if we are not using the expected libpq library
if [[ "${WANT_LIBPQ:-}" ]]; then
"${pybin}/python" -c "import psycopg2, sys; sys.exit(${WANT_LIBPQ} != psycopg2.extensions.libpq_version())"
fi
"${pybin}/python" -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
done
# Restore the libpq packages
for f in $(cd /libpqbak/ && find . -not -type d); do
mv -v "/libpqbak/$f" "/$f"
done

View File

@ -0,0 +1,75 @@
#!/bin/bash
# Create manylinux_2_24 wheels for psycopg2
#
# Look at the .github/workflows/packages.yml file for hints about how to use it.
set -euo pipefail
set -x
dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
prjdir="$( cd "${dir}/../.." && pwd )"
# Build all the available versions, or just the ones specified in PYVERS
if [ ! "${PYVERS:-}" ]; then
PYVERS="$(ls /opt/python/)"
fi
# Find psycopg version
version=$(grep -e ^PSYCOPG_VERSION "${prjdir}/setup.py" | sed "s/.*'\(.*\)'/\1/")
# A gratuitous comment to fix broken vim syntax file: '")
distdir="${prjdir}/dist/psycopg2-$version"
# Replace the package name
if [[ "${PACKAGE_NAME:-}" ]]; then
sed -i "s/^setup(name=\"psycopg2\"/setup(name=\"${PACKAGE_NAME}\"/" \
"${prjdir}/setup.py"
fi
# Install prerequisite libraries
curl -s https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
echo "deb http://apt.postgresql.org/pub/repos/apt stretch-pgdg main" \
> /etc/apt/sources.list.d/pgdg.list
apt-get -y update
apt-get install -y libpq-dev
# Create the wheel packages
for pyver in $PYVERS; do
pybin="/opt/python/${pyver}/bin"
"${pybin}/pip" wheel "${prjdir}" -w "${prjdir}/dist/"
done
# Bundle external shared libraries into the wheels
for whl in "${prjdir}"/dist/*.whl; do
auditwheel repair "$whl" -w "$distdir"
done
# Make sure the libpq is not in the system
for f in $(find /usr/lib /usr/lib64 -name libpq\*) ; do
mkdir -pv "/libpqbak/$(dirname $f)"
mv -v "$f" "/libpqbak/$(dirname $f)"
done
# Install packages and test
cd "${prjdir}"
for pyver in $PYVERS; do
pybin="/opt/python/${pyver}/bin"
"${pybin}/pip" install ${PACKAGE_NAME:-psycopg2} --no-index -f "$distdir"
# Print psycopg and libpq versions
"${pybin}/python" -c "import psycopg2; print(psycopg2.__version__)"
"${pybin}/python" -c "import psycopg2; print(psycopg2.__libpq_version__)"
"${pybin}/python" -c "import psycopg2; print(psycopg2.extensions.libpq_version())"
# Fail if we are not using the expected libpq library
if [[ "${WANT_LIBPQ:-}" ]]; then
"${pybin}/python" -c "import psycopg2, sys; sys.exit(${WANT_LIBPQ} != psycopg2.extensions.libpq_version())"
fi
"${pybin}/python" -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"
done
# Restore the libpq packages
for f in $(cd /libpqbak/ && find . -not -type d); do
mv -v "/libpqbak/$f" "/$f"
done

26
scripts/build/build_sdist.sh Executable file
View File

@ -0,0 +1,26 @@
#!/bin/bash
set -euo pipefail
set -x
dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
prjdir="$( cd "${dir}/../.." && pwd )"
# Find psycopg version
version=$(grep -e ^PSYCOPG_VERSION setup.py | sed "s/.*'\(.*\)'/\1/")
# A gratuitous comment to fix broken vim syntax file: '")
distdir="${prjdir}/dist/psycopg2-$version"
# Replace the package name
if [[ "${PACKAGE_NAME:-}" ]]; then
sed -i "s/^setup(name=\"psycopg2\"/setup(name=\"${PACKAGE_NAME}\"/" \
"${prjdir}/setup.py"
fi
# Build the source package
python setup.py sdist -d "$distdir"
# install and test
pip install "${distdir}"/*.tar.gz
python -c "import tests; tests.unittest.main(defaultTest='tests.test_suite')"

View File

@ -0,0 +1,103 @@
#!/usr/bin/env python
"""Download packages from github actions artifacts
"""
import os
import re
import sys
import logging
import datetime as dt
from pathlib import Path
import requests
logger = logging.getLogger()
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
API_URL = "https://ci.appveyor.com/api"
REPOS = "psycopg/psycopg2"
WORKFLOW_NAME = "Build packages"
class ScriptError(Exception):
"""Controlled exception raised by the script."""
def main():
try:
token = os.environ["APPVEYOR_TOKEN"]
except KeyError:
raise ScriptError("please set a APPVEYOR_TOKEN to download artifacts")
s = requests.Session()
s.headers["Content-Type"] = "application/json"
s.headers["Authorization"] = f"Bearer {token}"
logger.info("fetching last run")
resp = s.get(f"{API_URL}/projects/{REPOS}/")
resp.raise_for_status()
data = resp.json()
updated_at = dt.datetime.fromisoformat(
re.sub(r"\.\d+", "", data["build"]["finished"])
)
now = dt.datetime.now(dt.timezone.utc)
age = now - updated_at
logger.info(
f"found build {data['build']['version']} updated {pretty_interval(age)} ago"
)
if age > dt.timedelta(hours=6):
logger.warning("maybe it's a bit old?")
jobs = data["build"]["jobs"]
for job in jobs:
if job["status"] != "success":
raise ScriptError("status for job {job['jobId']} is {job['status']}")
logger.info(f"fetching artifacts info for {job['name']}")
resp = s.get(f"{API_URL}/buildjobs/{job['jobId']}/artifacts/")
resp.raise_for_status()
afs = resp.json()
for af in afs:
fn = af["fileName"]
if fn.startswith("dist/"):
fn = fn.split("/", 1)[1]
dest = Path("packages") / fn
logger.info(f"downloading {dest}")
resp = s.get(
f"{API_URL}/buildjobs/{job['jobId']}/artifacts/{af['fileName']}"
)
resp.raise_for_status()
if not dest.parent.exists():
dest.parent.mkdir()
with dest.open("wb") as f:
f.write(resp.content)
logger.info("now you can run: 'twine upload -s packages/*'")
def pretty_interval(td):
secs = td.total_seconds()
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
days, hours = divmod(hours, 24)
if days:
return f"{int(days)} days, {int(hours)} hours, {int(mins)} minutes"
elif hours:
return f"{int(hours)} hours, {int(mins)} minutes"
else:
return f"{int(mins)} minutes"
if __name__ == "__main__":
try:
sys.exit(main())
except ScriptError as e:
logger.error("%s", e)
sys.exit(1)
except KeyboardInterrupt:
logger.info("user interrupt")
sys.exit(1)

View File

@ -0,0 +1,99 @@
#!/usr/bin/env python
"""Download packages from github actions artifacts
"""
import io
import os
import sys
import logging
import datetime as dt
from pathlib import Path
from zipfile import ZipFile
import requests
logger = logging.getLogger()
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
REPOS = "psycopg/psycopg2"
WORKFLOW_NAME = "Build packages"
class ScriptError(Exception):
"""Controlled exception raised by the script."""
def main():
try:
token = os.environ["GITHUB_TOKEN"]
except KeyError:
raise ScriptError("please set a GITHUB_TOKEN to download artifacts")
s = requests.Session()
s.headers["Accept"] = "application/vnd.github.v3+json"
s.headers["Authorization"] = f"token {token}"
logger.info("looking for recent runs")
resp = s.get(f"https://api.github.com/repos/{REPOS}/actions/runs?per_page=10")
resp.raise_for_status()
for run in resp.json()["workflow_runs"]:
if run["name"] == WORKFLOW_NAME:
break
else:
raise ScriptError(f"couldn't find {WORKFLOW_NAME!r} in recent runs")
if run["status"] != "completed":
raise ScriptError(f"run #{run['run_number']} is in status {run['status']}")
updated_at = dt.datetime.fromisoformat(run["updated_at"].replace("Z", "+00:00"))
now = dt.datetime.now(dt.timezone.utc)
age = now - updated_at
logger.info(f"found run #{run['run_number']} updated {pretty_interval(age)} ago")
if age > dt.timedelta(hours=6):
logger.warning("maybe it's a bit old?")
logger.info(f"looking for run #{run['run_number']} artifacts")
resp = s.get(f"{run['url']}/artifacts")
resp.raise_for_status()
artifacts = resp.json()["artifacts"]
dest = Path("packages")
if not dest.exists():
logger.info(f"creating dir {dest}")
dest.mkdir()
for artifact in artifacts:
logger.info(f"downloading {artifact['name']} archive")
zip_url = artifact["archive_download_url"]
resp = s.get(zip_url)
with ZipFile(io.BytesIO(resp.content)) as zf:
logger.info("extracting archive content")
zf.extractall(dest)
logger.info(f"now you can run: 'twine upload -s {dest}/*'")
def pretty_interval(td):
secs = td.total_seconds()
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
days, hours = divmod(hours, 24)
if days:
return f"{int(days)} days, {int(hours)} hours, {int(mins)} minutes"
elif hours:
return f"{int(hours)} hours, {int(mins)} minutes"
else:
return f"{int(mins)} minutes"
if __name__ == "__main__":
try:
sys.exit(main())
except ScriptError as e:
logger.error("%s", e)
sys.exit(1)
except KeyboardInterrupt:
logger.info("user interrupt")
sys.exit(1)