Skip to content

Commit

Permalink
Initialize github action for python unit testing
Browse files Browse the repository at this point in the history
Signed-off-by: Harry <quanghai.ng1512@gmail.com>
  • Loading branch information
sudohainguyen committed Apr 28, 2024
1 parent 5cbbdc3 commit f235c71
Show file tree
Hide file tree
Showing 23 changed files with 206 additions and 120 deletions.
66 changes: 66 additions & 0 deletions .github/workflows/python.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

name: Python Client

on:
push:
branches:
- master
- branch-*
pull_request:
branches:
- master
- branch-*

concurrency:
group: python-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:
unit-test:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.8", "3.9", "3.10"]
env:
PYTHONHASHSEED: random
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: "temurin"
java-version: "8"
- uses: hoverkraft-tech/compose-action@v1.5.1
with:
compose-file: "./python/docker/docker-compose.yml"
- name: Install dependencies
run: |
cd python
./scripts/install-deps.sh
pip install --upgrade pip
pip install -e .
pip install -r dev_requirements.txt
- name: Run tests
run: |
pytest -v
19 changes: 19 additions & 0 deletions python/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
cover/
.coverage
/dist/
/build/
.DS_Store
*.egg
/env/
/htmlcov/
.idea/
.project
*.pyc
.pydevproject
/*.egg-info/
.settings
.cache/
*.iml
/scripts/.thrift_gen
venv/
.envrc
10 changes: 5 additions & 5 deletions python/dev_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@
flake8==3.4.1
mock==2.0.0
pycodestyle==2.3.1
pytest==3.2.1
pytest-cov==2.5.1
pytest-flake8==0.8.1
pytest-random==0.2
pytest-timeout==1.2.0
pytest>=7.4.4,<8.0
pytest-cov>=2.8.1
pytest-randomly>=3.11.0
pytest-timeouts>=1.2.0

# actual dependencies: let things break if a package changes
sqlalchemy>=1.3.0
requests>=1.0.0
requests_kerberos>=0.12.0
sasl>=0.2.1
Expand Down
2 changes: 2 additions & 0 deletions python/docker/conf/presto/catalog/hive.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
connector.name=hive-hadoop2
hive.metastore.uri=thrift://hive-metastore:9083
File renamed without changes.
File renamed without changes.
File renamed without changes.
2 changes: 2 additions & 0 deletions python/docker/conf/trino/catalog/hive.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
connector.name=hive-hadoop2
hive.metastore.uri=thrift://hive-metastore:9083
File renamed without changes.
File renamed without changes.
File renamed without changes.
62 changes: 62 additions & 0 deletions python/docker/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
version: "3"

services:
namenode:
image: bde2020/hadoop-namenode:2.0.0-hadoop2.7.4-java8
volumes:
- namenode:/hadoop/dfs/name
environment:
- CLUSTER_NAME=test
env_file:
- hadoop-hive.env
ports:
- "50070:50070"
datanode:
image: bde2020/hadoop-datanode:2.0.0-hadoop2.7.4-java8
volumes:
- datanode:/hadoop/dfs/data
env_file:
- hadoop-hive.env
environment:
SERVICE_PRECONDITION: "namenode:50070"
ports:
- "50075:50075"
hive-server:
image: bde2020/hive:2.3.2-postgresql-metastore
env_file:
- hadoop-hive.env
command: /opt/hive/scripts/make_test_tables.sh
volumes:
- ../scripts:/opt/hive/scripts
environment:
HIVE_CORE_CONF_javax_jdo_option_ConnectionURL: "jdbc:postgresql://hive-metastore/metastore"
SERVICE_PRECONDITION: "hive-metastore:9083"
ports:
- "10000:10000"
hive-metastore:
image: bde2020/hive:2.3.2-postgresql-metastore
env_file:
- hadoop-hive.env
command: /opt/hive/bin/hive --service metastore
environment:
SERVICE_PRECONDITION: "namenode:50070 datanode:50075 hive-metastore-postgresql:5432"
ports:
- "9083:9083"
hive-metastore-postgresql:
image: bde2020/hive-metastore-postgresql:2.3.0
presto-coordinator:
image: shawnzhu/prestodb:0.181
ports:
- "8080:8080"
volumes:
- ./conf/presto/:/etc/presto
trino:
image: trinodb/trino:351
ports:
- "18080:18080"
volumes:
- ./conf/trino:/etc/trino

volumes:
namenode:
datanode:
30 changes: 30 additions & 0 deletions python/docker/hadoop-hive.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
HIVE_SITE_CONF_javax_jdo_option_ConnectionURL=jdbc:postgresql://hive-metastore-postgresql/metastore
HIVE_SITE_CONF_javax_jdo_option_ConnectionDriverName=org.postgresql.Driver
HIVE_SITE_CONF_javax_jdo_option_ConnectionUserName=hive
HIVE_SITE_CONF_javax_jdo_option_ConnectionPassword=hive
HIVE_SITE_CONF_datanucleus_autoCreateSchema=false
HIVE_SITE_CONF_hive_metastore_uris=thrift://hive-metastore:9083
HDFS_CONF_dfs_namenode_datanode_registration_ip___hostname___check=false

CORE_CONF_fs_defaultFS=hdfs://namenode:8020
CORE_CONF_hadoop_http_staticuser_user=root
CORE_CONF_hadoop_proxyuser_hue_hosts=*
CORE_CONF_hadoop_proxyuser_hue_groups=*

HDFS_CONF_dfs_webhdfs_enabled=true
HDFS_CONF_dfs_permissions_enabled=false

YARN_CONF_yarn_log___aggregation___enable=true
YARN_CONF_yarn_resourcemanager_recovery_enabled=true
YARN_CONF_yarn_resourcemanager_store_class=org.apache.hadoop.yarn.server.resourcemanager.recovery.FileSystemRMStateStore
YARN_CONF_yarn_resourcemanager_fs_state___store_uri=/rmstate
YARN_CONF_yarn_nodemanager_remote___app___log___dir=/app-logs
YARN_CONF_yarn_log_server_url=http://historyserver:8188/applicationhistory/logs/
YARN_CONF_yarn_timeline___service_enabled=true
YARN_CONF_yarn_timeline___service_generic___application___history_enabled=true
YARN_CONF_yarn_resourcemanager_system___metrics___publisher_enabled=true
YARN_CONF_yarn_resourcemanager_hostname=resourcemanager
YARN_CONF_yarn_timeline___service_hostname=historyserver
YARN_CONF_yarn_resourcemanager_address=resourcemanager:8032
YARN_CONF_yarn_resourcemanager_scheduler_address=resourcemanager:8030
YARN_CONF_yarn_resourcemanager_resource__tracker_address=resourcemanager:8031
13 changes: 8 additions & 5 deletions python/pyhive/tests/test_hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from decimal import Decimal

import mock
import pytest
import thrift.transport.TSocket
import thrift.transport.TTransport
import thrift_sasl
Expand Down Expand Up @@ -151,10 +152,11 @@ def test_no_result_set(self, cursor):
self.assertIsNone(cursor.description)
self.assertRaises(hive.ProgrammingError, cursor.fetchone)

@pytest.mark.skip
def test_ldap_connection(self):
rootdir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
orig_ldap = os.path.join(rootdir, 'scripts', 'travis-conf', 'hive', 'hive-site-ldap.xml')
orig_none = os.path.join(rootdir, 'scripts', 'travis-conf', 'hive', 'hive-site.xml')
orig_ldap = os.path.join(rootdir, 'scripts', 'conf', 'hive', 'hive-site-ldap.xml')
orig_none = os.path.join(rootdir, 'scripts', 'conf', 'hive', 'hive-site.xml')
des = os.path.join('/', 'etc', 'hive', 'conf', 'hive-site.xml')
try:
subprocess.check_call(['sudo', 'cp', orig_ldap, des])
Expand Down Expand Up @@ -209,11 +211,12 @@ def test_custom_transport(self):
with contextlib.closing(conn.cursor()) as cursor:
cursor.execute('SELECT * FROM one_row')
self.assertEqual(cursor.fetchall(), [(1,)])


@pytest.mark.skip
def test_custom_connection(self):
rootdir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
orig_ldap = os.path.join(rootdir, 'scripts', 'travis-conf', 'hive', 'hive-site-custom.xml')
orig_none = os.path.join(rootdir, 'scripts', 'travis-conf', 'hive', 'hive-site.xml')
orig_ldap = os.path.join(rootdir, 'scripts', 'conf', 'hive', 'hive-site-custom.xml')
orig_none = os.path.join(rootdir, 'scripts', 'conf', 'hive', 'hive-site.xml')
des = os.path.join('/', 'etc', 'hive', 'conf', 'hive-site.xml')
try:
subprocess.check_call(['sudo', 'cp', orig_ldap, des])
Expand Down
File renamed without changes.
File renamed without changes.
11 changes: 11 additions & 0 deletions python/scripts/install-deps.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
#!/bin/bash -eux

source /etc/lsb-release

sudo apt-get -q update
sudo apt-get -q install -y g++ libsasl2-dev libkrb5-dev

while ! nc -vz localhost 9083; do sleep 1; done
while ! nc -vz localhost 10000; do sleep 1; done

# sudo -Eu hive $(dirname $0)/make_test_tables.sh
2 changes: 0 additions & 2 deletions python/scripts/travis-conf/presto/catalog/hive.properties

This file was deleted.

2 changes: 0 additions & 2 deletions python/scripts/travis-conf/trino/catalog/hive.properties

This file was deleted.

96 changes: 0 additions & 96 deletions python/scripts/travis-install.sh

This file was deleted.

11 changes: 1 addition & 10 deletions python/setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,9 @@ tag_build =
[tool:pytest]
timeout = 100
timeout_method = thread
addopts = --random --tb=short --cov pyhive --cov-report html --cov-report term --flake8
addopts = --random --tb=short --cov pyhive --cov-report html --cov-report term
norecursedirs = env
python_files = test_*.py
flake8-max-line-length = 100
flake8-ignore =
TCLIService/*.py ALL
pyhive/sqlalchemy_backports.py ALL
presto-server/** ALL
pyhive/hive.py F405
pyhive/presto.py F405
pyhive/trino.py F405
W503
filterwarnings =
error
# For Python 2 flake8
Expand Down

0 comments on commit f235c71

Please sign in to comment.