Skip to content

Update php csv #79

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 46 additions & 22 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,27 +1,46 @@
FROM php:7.1
FROM php:7.1-cli

ENV DEBIAN_FRONTEND noninteractive
ARG COMPOSER_FLAGS="--prefer-dist --no-interaction"
ARG DEBIAN_FRONTEND=noninteractive
ENV COMPOSER_ALLOW_SUPERUSER 1
ENV COMPOSER_PROCESS_TIMEOUT 3600
# snowflake - charset settings
ENV LANG en_US.UTF-8

ARG SQLSRV_VERSION=5.6.1
ARG SNOWFLAKE_ODBC_VERSION=2.21.1
ARG SNOWFLAKE_GPG_KEY=EC218558EABB25A1

RUN apt-get update \
&& apt-get install -y unzip \
git \
unixodbc \
unixodbc-dev \
libpq-dev \
gpg \
debsig-verify \
dirmngr \
gpg-agent \
&& rm -r /var/lib/apt/lists/*
WORKDIR /code/

RUN echo "memory_limit = -1" >> /usr/local/etc/php/php.ini
COPY docker/php-prod.ini /usr/local/etc/php/php.ini
COPY docker/composer-install.sh /tmp/composer-install.sh

RUN apt-get update -q \
&& apt-get install gnupg -y --no-install-recommends \
&& curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
&& curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list \
&& apt-get update -q \
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends\
git \
locales \
unzip \
unixodbc \
unixodbc-dev \
libpq-dev \
gpg \
debsig-verify \
dirmngr \
gpg-agent \
libonig-dev \
libxml2-dev \
&& rm -r /var/lib/apt/lists/* \
&& sed -i 's/^# *\(en_US.UTF-8\)/\1/' /etc/locale.gen \
&& locale-gen \
&& chmod +x /tmp/composer-install.sh \
&& /tmp/composer-install.sh

ENV LANGUAGE=en_US.UTF-8
ENV LANG=en_US.UTF-8
ENV LC_ALL=en_US.UTF-8

RUN docker-php-ext-install pdo_pgsql pdo_mysql

Expand All @@ -40,21 +59,26 @@ RUN set -ex; \
docker-php-ext-install odbc; \
docker-php-source delete

## install snowflake drivers
COPY ./docker/snowflake/generic.pol /etc/debsig/policies/$SNOWFLAKE_GPG_KEY/generic.pol
ADD https://sfc-repo.snowflakecomputing.com/odbc/linux/$SNOWFLAKE_ODBC_VERSION/snowflake-odbc-$SNOWFLAKE_ODBC_VERSION.x86_64.deb /tmp/snowflake-odbc.deb
COPY ./docker/snowflake/simba.snowflake.ini /usr/lib/snowflake/odbc/lib/simba.snowflake.ini

RUN mkdir -p ~/.gnupg \
&& chmod 700 ~/.gnupg \
&& echo "disable-ipv6" >> ~/.gnupg/dirmngr.conf \
&& mkdir -p /usr/share/debsig/keyrings/$SNOWFLAKE_GPG_KEY \
&& gpg --keyserver hkp://keys.gnupg.net --recv-keys $SNOWFLAKE_GPG_KEY \
&& gpg --keyserver hkp://keyserver.ubuntu.com --recv-keys $SNOWFLAKE_GPG_KEY \
&& gpg --export $SNOWFLAKE_GPG_KEY > /usr/share/debsig/keyrings/$SNOWFLAKE_GPG_KEY/debsig.gpg \
&& curl https://sfc-repo.snowflakecomputing.com/odbc/linux/$SNOWFLAKE_ODBC_VERSION/snowflake-odbc-$SNOWFLAKE_ODBC_VERSION.x86_64.deb --output /tmp/snowflake-odbc.deb \
&& debsig-verify /tmp/snowflake-odbc.deb \
&& gpg --batch --delete-key --yes $SNOWFLAKE_GPG_KEY \
&& dpkg -i /tmp/snowflake-odbc.deb

RUN cd \
&& curl -sS https://getcomposer.org/installer | php \
&& ln -s /root/composer.phar /usr/local/bin/composer
## Composer - deps always cached unless changed
# First copy only composer files
COPY composer.* /code/
# Download dependencies, but don't run scripts or init autoloaders as the app is missing
RUN composer install $COMPOSER_FLAGS --no-scripts --no-autoloader
# copy rest of the app
COPY . /code/
# run normal composer - all deps are cached already
RUN composer install $COMPOSER_FLAGS
2 changes: 1 addition & 1 deletion composer.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"license": "MIT",
"require": {
"PHP": "^7.1",
"keboola/csv": "^1.1",
"keboola/csv": "^2.3",
"tracy/tracy": "^2.3",
"aws/aws-sdk-php": "^3.11"
},
Expand Down
17 changes: 17 additions & 0 deletions docker/composer-install.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#!/bin/sh

EXPECTED_SIGNATURE=$(curl -s https://composer.github.io/installer.sig)
php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');"
ACTUAL_SIGNATURE=$(php -r "echo hash_file('SHA384', 'composer-setup.php');")

if [ "$EXPECTED_SIGNATURE" != "$ACTUAL_SIGNATURE" ]
then
>&2 echo 'ERROR: Invalid installer signature'
rm composer-setup.php
exit 1
fi

php composer-setup.php --quiet --install-dir=/usr/local/bin/ --filename=composer
RESULT=$?
rm composer-setup.php
exit $RESULT
19 changes: 19 additions & 0 deletions docker/php-prod.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
; Recommended production values
display_errors = Off
display_startup_errors = Off
error_reporting = E_ALL & ~E_DEPRECATED & ~E_STRICT
html_errors = On
log_errors = On
max_input_time = 60
output_buffering = 4096
register_argc_argv = Off
request_order = "GP"
session.gc_divisor = 1000
session.sid_bits_per_character = 5
short_open_tag = Off
track_errors = Off
variables_order = "GPCS"

; Custom
date.timezone = UTC
memory_limit = -1
4 changes: 2 additions & 2 deletions docker/xdebug/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM keboola/php-db-import

RUN pecl install xdebug \
&& docker-php-ext-enable xdebug
RUN pecl install xdebug-2.9.8 \
&& docker-php-ext-enable xdebug
31 changes: 31 additions & 0 deletions src/Helper/CsvFile.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
<?php

declare(strict_types=1);

namespace Keboola\Db\Import\Helper;

use Keboola\Csv\CsvReader;

/**
* CsvReader no longer extends SplFileInfo this class aims to implement used methods in class
*/
class CsvFile extends CsvReader
{
/**
* @return string the base name without path information.
*/
public function getBasename(): string
{
return basename($this->fileName);
}

/**
* Gets the path to the file
*
* @return string The path to the file.
*/
public function getPathname(): string
{
return $this->fileName;
}
}
2 changes: 1 addition & 1 deletion src/RedshiftBaseCsv.php
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

namespace Keboola\Db\Import;

use Keboola\Csv\CsvFile;
use Keboola\Db\Import\Helper\CsvFile;
use Tracy\Debugger;

abstract class RedshiftBaseCsv extends RedshiftBase
Expand Down
2 changes: 1 addition & 1 deletion src/Snowflake/CsvImportBase.php
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

namespace Keboola\Db\Import\Snowflake;

use Keboola\Csv\CsvFile;
use Keboola\Db\Import\Exception;
use Keboola\Db\Import\Helper\CsvFile;
use Tracy\Debugger;
use Aws\Exception\AwsException;

Expand Down
12 changes: 6 additions & 6 deletions tests/Redshift/ImportTest.php
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

namespace Keboola\DbImportTest\Redshift;

use Keboola\Csv\CsvFile;
use Keboola\Db\Import\Helper\CsvFile;
use Keboola\Db\Import\Exception;

class ImportTest extends \PHPUnit_Framework_TestCase
Expand Down Expand Up @@ -418,8 +418,8 @@ public function testCopyInvalidSourceDataShouldThrowException(): void
public function testInvalidManifestImport(): void
{
$s3bucket = getenv(self::AWS_S3_BUCKET_ENV);
$initialFile = new \Keboola\Csv\CsvFile(__DIR__ . "/../_data/csv-import/tw_accounts.csv");
$importFile = new \Keboola\Csv\CsvFile("s3://{$s3bucket}/02_tw_accounts.csv.invalid.manifest");
$initialFile = new \Keboola\Db\Import\Helper\CsvFile(__DIR__ . "/../_data/csv-import/tw_accounts.csv");
$importFile = new \Keboola\Db\Import\Helper\CsvFile("s3://{$s3bucket}/02_tw_accounts.csv.invalid.manifest");

$import = $this->getImport('manifest');
$import->setIgnoreLines(1);
Expand Down Expand Up @@ -622,7 +622,7 @@ public function tables(): array
{

$expectedEscaping = [];
$file = new \Keboola\Csv\CsvFile(__DIR__ . '/../_data/csv-import/escaping/standard-with-enclosures.csv');
$file = new \Keboola\Db\Import\Helper\CsvFile(__DIR__ . '/../_data/csv-import/escaping/standard-with-enclosures.csv');
foreach ($file as $row) {
$expectedEscaping[] = $row;
}
Expand All @@ -631,14 +631,14 @@ public function tables(): array


$expectedAccounts = [];
$file = new \Keboola\Csv\CsvFile(__DIR__ . '/../_data/csv-import/tw_accounts.csv');
$file = new \Keboola\Db\Import\Helper\CsvFile(__DIR__ . '/../_data/csv-import/tw_accounts.csv');
foreach ($file as $row) {
$expectedAccounts[] = $row;
}
$accountsHeader = array_shift($expectedAccounts); // remove header
$expectedAccounts = array_values($expectedAccounts);

$file = new \Keboola\Csv\CsvFile(__DIR__ . '/../_data/csv-import/tw_accounts.changedColumnsOrder.csv');
$file = new \Keboola\Db\Import\Helper\CsvFile(__DIR__ . '/../_data/csv-import/tw_accounts.changedColumnsOrder.csv');
$accountChangedColumnsOrderHeader = $file->getHeader();

$s3bucket = getenv(self::AWS_S3_BUCKET_ENV);
Expand Down
22 changes: 11 additions & 11 deletions tests/Snowflake/ImportTest.php
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

namespace Keboola\DbImportTest\Snowflake;

use Keboola\Csv\CsvFile;
use Keboola\Db\Import\Helper\CsvFile;
use Keboola\Db\Import\Exception;
use Keboola\Db\Import\Helper\TableHelper;
use Keboola\Db\Import\Snowflake\Connection;
Expand Down Expand Up @@ -212,25 +212,25 @@ public function incrementalImportData(): array
public function fullImportData(): array
{
$expectedEscaping = [];
$file = new \Keboola\Csv\CsvFile(__DIR__ . '/../_data/csv-import/escaping/standard-with-enclosures.csv');
$file = new \Keboola\Db\Import\Helper\CsvFile(__DIR__ . '/../_data/csv-import/escaping/standard-with-enclosures.csv');
foreach ($file as $row) {
$expectedEscaping[] = $row;
}
$escapingHeader = array_shift($expectedEscaping); // remove header
$expectedEscaping = array_values($expectedEscaping);

$expectedAccounts = [];
$file = new \Keboola\Csv\CsvFile(__DIR__ . '/../_data/csv-import/tw_accounts.csv');
$file = new \Keboola\Db\Import\Helper\CsvFile(__DIR__ . '/../_data/csv-import/tw_accounts.csv');
foreach ($file as $row) {
$expectedAccounts[] = $row;
}
$accountsHeader = array_shift($expectedAccounts); // remove header
$expectedAccounts = array_values($expectedAccounts);

$file = new \Keboola\Csv\CsvFile(__DIR__ . '/../_data/csv-import/tw_accounts.changedColumnsOrder.csv');
$file = new \Keboola\Db\Import\Helper\CsvFile(__DIR__ . '/../_data/csv-import/tw_accounts.changedColumnsOrder.csv');
$accountChangedColumnsOrderHeader = $file->getHeader();

$file = new \Keboola\Csv\CsvFile(__DIR__ . '/../_data/csv-import/lemma.csv');
$file = new \Keboola\Db\Import\Helper\CsvFile(__DIR__ . '/../_data/csv-import/lemma.csv');
$expectedLemma = [];
foreach ($file as $row) {
$expectedLemma[] = $row;
Expand Down Expand Up @@ -309,7 +309,7 @@ public function fullImportData(): array
public function testInvalidCsvImport(): void
{
$s3bucket = getenv(self::AWS_S3_BUCKET_ENV);
$importFile = new \Keboola\Csv\CsvFile("s3://{$s3bucket}/tw_accounts.csv");
$importFile = new \Keboola\Db\Import\Helper\CsvFile("s3://{$s3bucket}/tw_accounts.csv");

$import = $this->getImport();
$import->setIgnoreLines(1);
Expand All @@ -324,7 +324,7 @@ public function testInvalidCsvImport(): void
public function testImportShouldNotFailOnColumnNameRowNumber(): void
{
$s3bucket = getenv(self::AWS_S3_BUCKET_ENV);
$importFile = new \Keboola\Csv\CsvFile("s3://{$s3bucket}/column-name-row-number.csv");
$importFile = new \Keboola\Db\Import\Helper\CsvFile("s3://{$s3bucket}/column-name-row-number.csv");

$import = $this->getImport();
$import->setIncremental(false);
Expand All @@ -334,8 +334,8 @@ public function testImportShouldNotFailOnColumnNameRowNumber(): void
public function testInvalidManifestImport(): void
{
$s3bucket = getenv(self::AWS_S3_BUCKET_ENV);
$initialFile = new \Keboola\Csv\CsvFile(__DIR__ . "/../_data/csv-import/tw_accounts.csv");
$importFile = new \Keboola\Csv\CsvFile("s3://{$s3bucket}/02_tw_accounts.csv.invalid.manifest");
$initialFile = new \Keboola\Db\Import\Helper\CsvFile(__DIR__ . "/../_data/csv-import/tw_accounts.csv");
$importFile = new \Keboola\Db\Import\Helper\CsvFile("s3://{$s3bucket}/02_tw_accounts.csv.invalid.manifest");

$import = $this->getImport('manifest');
$import->setIgnoreLines(1);
Expand All @@ -351,7 +351,7 @@ public function testInvalidManifestImport(): void
public function testMoreColumnsShouldThrowException(): void
{
$s3bucket = getenv(self::AWS_S3_BUCKET_ENV);
$importFile = new \Keboola\Csv\CsvFile("s3://{$s3bucket}/tw_accounts.csv");
$importFile = new \Keboola\Db\Import\Helper\CsvFile("s3://{$s3bucket}/tw_accounts.csv");

$import = $this->getImport();
$import->setIgnoreLines(1);
Expand All @@ -368,7 +368,7 @@ public function testMoreColumnsShouldThrowException(): void
public function testMoreColumnsShouldNotThrowExceptionWhenCheckDisabled(): void
{
$s3bucket = getenv(self::AWS_S3_BUCKET_ENV);
$importFile = new \Keboola\Csv\CsvFile("s3://{$s3bucket}/tw_accounts.csv");
$importFile = new \Keboola\Db\Import\Helper\CsvFile("s3://{$s3bucket}/tw_accounts.csv");

$import = $this->getImport('csv', true);
$import->setIgnoreLines(1);
Expand Down