Skip to content
This repository has been archived by the owner on Jul 23, 2024. It is now read-only.

Commit

Permalink
HAWQ-1780. Add GitHub Action Step to Test against Running Instance
Browse files Browse the repository at this point in the history
It fixes the install_name of libhdfs and libyarn with @rpath prefix,
which is required when loading executable in different path, and
enforces searching headers and libraries in those self-contained first.
  • Loading branch information
chiyang10000 authored and wcl14 committed Jan 22, 2021
1 parent a8c0940 commit 4cbd805
Show file tree
Hide file tree
Showing 7 changed files with 201 additions and 8 deletions.
34 changes: 30 additions & 4 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

name: Apache HAWQ

on: [push]
on: [push, pull_request]

jobs:
build-on-macOS:
Expand Down Expand Up @@ -44,6 +44,7 @@ jobs:
install_name_tool -add_rpath $GITHUB_WORKSPACE/dependency-Darwin/package/lib/perl5/5.28.0/darwin-thread-multi-2level/CORE/ $GITHUB_WORKSPACE/dependency-Darwin/package/bin/perl
- name: configure
timeout-minutes: 10
run: |
source $GITHUB_WORKSPACE/dependency-Darwin/package/env.sh
export CFLAGS="$CFLAGS -w"
Expand All @@ -55,16 +56,41 @@ jobs:
- name: build hawq
run: |
source $GITHUB_WORKSPACE/dependency-Darwin/package/env.sh
make -j$(nproc)
make -j$(nproc) install
make -j$(sysctl -n hw.ncpu)
make -j$(sysctl -n hw.ncpu) install
- name: build feature-test
run: |
source $GITHUB_WORKSPACE/dependency-Darwin/package/env.sh
make -j$(nproc) feature-test
make -j$(sysctl -n hw.ncpu) feature-test
- name: test executable
run: |
for file in $(find /tmp/hawq/bin -name '*' -type f); do
if [[ $(file $file | grep Mach-O) ]]; then
install_name_tool -add_rpath /tmp/hawq/lib $file;
fi
done
source /tmp/hawq/greenplum_path.sh
postgres -V
src/test/feature/feature-test --gtest_list_tests
- name: install HDFS
run: |
export HOMEBREW_NO_INSTALL_CLEANUP=1
brew install hadoop
- name: initilize macOS
run: .github/workflows/scripts/init_macos.sh

- name: initilize HDFS
run: |
export HADOOP_HOME=/usr/local/opt/hadoop/libexec
.github/workflows/scripts/init_hdfs.sh
- name: initilize HAWQ
run: |
source /tmp/hawq/greenplum_path.sh
.github/workflows/scripts/init_hawq.sh
psql -d postgres -c 'create database hawq_feature_test_db;'
src/test/feature/feature-test --gtest_filter=TestDatabase.BasicTest
64 changes: 64 additions & 0 deletions .github/workflows/scripts/init_hawq.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e



# Configure
tee $GPHOME/etc/hawq-site.xml << EOF_hawq_site
<configuration>
<property>
<name>hawq_dfs_url</name>
<value>localhost:8020/hawq_default</value>
<description>URL for accessing HDFS.</description>
</property>
<property>
<name>hawq_master_address_host</name>
<value>localhost</value>
</property>
<property>
<name>hawq_master_address_port</name>
<value>5432</value>
</property>
<property>
<name>hawq_segment_address_port</name>
<value>40000</value>
</property>
<property>
<name>hawq_master_directory</name>
<value>/tmp/db_data/hawq-data-directory/masterdd</value>
</property>
<property>
<name>hawq_segment_directory</name>
<value>/tmp/db_data/hawq-data-directory/segmentdd</value>
</property>
<property>
<name>hawq_master_temp_directory</name>
<value>/tmp</value>
</property>
<property>
<name>hawq_segment_temp_directory</name>
<value>/tmp</value>
</property>
</configuration>
EOF_hawq_site

# Initialize
rm -rf /opt/dependency*
rm -rf /tmp/db_data/hawq-data-directory
install -d /tmp/db_data/hawq-data-directory/masterdd
install -d /tmp/db_data/hawq-data-directory/segmentdd
hawq init cluster -a
52 changes: 52 additions & 0 deletions .github/workflows/scripts/init_hdfs.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e



# Configure
tee $HADOOP_HOME/etc/hadoop/core-site.xml << EOF_core_site
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:8020</value>
</property>
</configuration>
EOF_core_site
tee $HADOOP_HOME/etc/hadoop/hdfs-site.xml << EOF_hdfs_site
<configuration>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:///tmp/db_data/hdfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:///tmp/db_data/hdfs/data</value>
</property>
</configuration>
EOF_hdfs_site

# Initialize
install -d /tmp/db_data/hdfs/name
install -d /tmp/db_data/hdfs/data
hdfs namenode -format

# Start
$HADOOP_HOME/sbin/start-dfs.sh

# Connect
hdfs dfsadmin -report
hdfs dfs -ls /
49 changes: 49 additions & 0 deletions .github/workflows/scripts/init_macos.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e



# Setup passphraseless ssh
sudo systemsetup -setremotelogin on
ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 0700 ~/.ssh
chmod 0600 ~/.ssh/authorized_keys

tee -a ~/.ssh/config <<EOF_ssh_config
Host *
StrictHostKeyChecking no
UserKnownHostsFile=/dev/null
EOF_ssh_config

ssh -v localhost whoami

# Configure system kernel state
sudo tee /etc/sysctl.conf << EOF_sysctl
kern.sysv.shmmax=2147483648
kern.sysv.shmmin=1
kern.sysv.shmmni=64
kern.sysv.shmseg=16
kern.sysv.shmall=524288
kern.maxfiles=65535
kern.maxfilesperproc=65536
kern.corefile=/cores/core.%N.%P
EOF_sysctl
</etc/sysctl.conf xargs sudo sysctl

# Add data folder
sudo install -o $USER -d /tmp/db_data/
1 change: 1 addition & 0 deletions depends/libhdfs3/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
PROJECT(libhdfs3)

SET(CMAKE_VERBOSE_MAKEFILE ON CACHE STRING "Verbose build." FORCE)
SET(CMAKE_MACOSX_RPATH 1)

IF(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR})
MESSAGE(FATAL_ERROR "cannot build the project in the source directory! Out-of-source build is enforced!")
Expand Down
1 change: 1 addition & 0 deletions depends/libyarn/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ PROJECT(libyarn)

SET(CMAKE_VERBOSE_MAKEFILE ON CACHE STRING "Verbose build." FORCE)
SET(CMAKE_FIND_ROOT_PATH "/Users/weikui/Documents/project/osx106_x86")
SET(CMAKE_MACOSX_RPATH 1)

IF(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR})
MESSAGE(FATAL_ERROR "cannot build the project in the source directory! Out-of-source build is enforced!")
Expand Down
8 changes: 4 additions & 4 deletions src/Makefile.global.in
Original file line number Diff line number Diff line change
Expand Up @@ -221,8 +221,6 @@ COLLATEINDEX = @COLLATEINDEX@

CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
CPPFLAGS += -I$(abs_top_srcdir)/depends/libhdfs3/build/install$(prefix)/include
CPPFLAGS += -I$(abs_top_srcdir)/depends/libyarn/build/install$(prefix)/include
CPPFLAGS += -I/usr/local/hawq/include

ifdef PGXS
Expand All @@ -237,7 +235,9 @@ endif # not PGXS
CC = @CC@
GCC = @GCC@
SUN_STUDIO_CC = @SUN_STUDIO_CC@
CFLAGS = @CFLAGS@
CFLAGS = -I$(abs_top_srcdir)/depends/libhdfs3/build/install$(prefix)/include
CFLAGS += -I$(abs_top_srcdir)/depends/libyarn/build/install$(prefix)/include
CFLAGS += @CFLAGS@
CFLAGS_SSE42 = @CFLAGS_SSE42@

# Kind-of compilers
Expand Down Expand Up @@ -272,9 +272,9 @@ ifdef PGXS
else
LDFLAGS = -L$(top_builddir)/src/port
endif
LDFLAGS += @LDFLAGS@
LDFLAGS += -L$(abs_top_srcdir)/depends/libhdfs3/build/install$(prefix)/lib
LDFLAGS += -L$(abs_top_srcdir)/depends/libyarn/build/install$(prefix)/lib
LDFLAGS += @LDFLAGS@

LDFLAGS_EX = @LDFLAGS_EX@
# LDFLAGS_SL might have already been assigned by calling makefile
Expand Down

0 comments on commit 4cbd805

Please sign in to comment.