From 2a3691703c1f5aa5109881455cfc909f80c7f50f Mon Sep 17 00:00:00 2001 From: Nikita Titov Date: Tue, 14 May 2019 14:50:27 +0300 Subject: [PATCH 1/6] [ci] added swig compilation on macOS and Windows with artifacts (#2170) * compile SWIG artifact on Windows * compile SWIG artifact on Windows * combined swig and sdist jobs for Linux * added conditions for artifacts publishing * added SWIG artifact compilation on macOS * hotfix * test: switch compilers * harsh workaround for OpenMP library conflict * switch compilers back * consistent order of tasks in condition * dummy commit to trigger CI and fix CLA --- .ci/setup.sh | 3 +++ .ci/test.sh | 26 +++++++++++++------------- .ci/test_windows.ps1 | 9 +++++++++ .vsts-ci.yml | 11 +++++------ 4 files changed, 30 insertions(+), 19 deletions(-) diff --git a/.ci/setup.sh b/.ci/setup.sh index a91a944a8814..f8bc41726bc5 100755 --- a/.ci/setup.sh +++ b/.ci/setup.sh @@ -22,6 +22,9 @@ if [[ $OS_NAME == "macos" ]]; then if [[ $TASK == "mpi" ]]; then brew install open-mpi fi + if [[ $AZURE == "true" ]] && [[ $TASK == "sdist" ]]; then + brew install https://raw.githubusercontent.com/Homebrew/homebrew-core/f3544543a3115023fc7ca962c21d14b443f419d0/Formula/swig.rb # swig 3.0.12 + fi wget -q -O conda.sh https://repo.continuum.io/miniconda/Miniconda${PYTHON_VERSION:0:1}-latest-MacOSX-x86_64.sh else # Linux if [[ $AZURE == "true" ]] && [[ $COMPILER == "clang" ]]; then diff --git a/.ci/test.sh b/.ci/test.sh index 6830bebbd974..f397df135bc5 100755 --- a/.ci/test.sh +++ b/.ci/test.sh @@ -18,19 +18,6 @@ else CMAKE_OPTS=() fi -if [[ $AZURE == "true" ]] && [[ $OS_NAME == "linux" ]] && [[ $TASK == "swig" ]]; then - mkdir $BUILD_DIRECTORY/build && cd $BUILD_DIRECTORY/build - cmake -DUSE_SWIG=ON "${CMAKE_OPTS[@]}" .. - make -j4 || exit -1 - if [[ $COMPILER == "gcc" ]]; then - objdump -T $BUILD_DIRECTORY/lib_lightgbm.so > $BUILD_DIRECTORY/objdump.log || exit -1 - objdump -T $BUILD_DIRECTORY/lib_lightgbm_swig.so >> $BUILD_DIRECTORY/objdump.log || exit -1 - python $BUILD_DIRECTORY/helpers/check_dynamic_dependencies.py $BUILD_DIRECTORY/objdump.log || exit -1 - fi - cp $BUILD_DIRECTORY/build/lightgbmlib.jar $BUILD_ARTIFACTSTAGINGDIRECTORY/lightgbmlib.jar - exit 0 -fi - conda create -q -y -n $CONDA_ENV python=$PYTHON_VERSION source activate $CONDA_ENV @@ -91,6 +78,19 @@ if [[ $TASK == "sdist" ]]; then pip install --user $BUILD_DIRECTORY/python-package/dist/lightgbm-$LGB_VER.tar.gz -v || exit -1 if [[ $AZURE == "true" ]]; then cp $BUILD_DIRECTORY/python-package/dist/lightgbm-$LGB_VER.tar.gz $BUILD_ARTIFACTSTAGINGDIRECTORY + mkdir $BUILD_DIRECTORY/build && cd $BUILD_DIRECTORY/build + if [[ $OS_NAME == "macos" ]]; then + cmake -DUSE_SWIG=ON -DAPPLE_OUTPUT_DYLIB=ON "${CMAKE_OPTS[@]}" .. + else + cmake -DUSE_SWIG=ON "${CMAKE_OPTS[@]}" .. + fi + make -j4 || exit -1 + if [[ $OS_NAME == "linux" ]] && [[ $COMPILER == "gcc" ]]; then + objdump -T $BUILD_DIRECTORY/lib_lightgbm.so > $BUILD_DIRECTORY/objdump.log || exit -1 + objdump -T $BUILD_DIRECTORY/lib_lightgbm_swig.so >> $BUILD_DIRECTORY/objdump.log || exit -1 + python $BUILD_DIRECTORY/helpers/check_dynamic_dependencies.py $BUILD_DIRECTORY/objdump.log || exit -1 + fi + cp $BUILD_DIRECTORY/build/lightgbmlib.jar $BUILD_ARTIFACTSTAGINGDIRECTORY/lightgbmlib_$OS_NAME.jar fi pytest $BUILD_DIRECTORY/tests/python_package_test || exit -1 exit 0 diff --git a/.ci/test_windows.ps1 b/.ci/test_windows.ps1 index e4425266695e..eececf18cdcd 100644 --- a/.ci/test_windows.ps1 +++ b/.ci/test_windows.ps1 @@ -20,6 +20,15 @@ elseif ($env:TASK -eq "sdist") { cd $env:BUILD_SOURCESDIRECTORY/python-package python setup.py sdist --formats gztar ; Check-Output $? cd dist; pip install @(Get-ChildItem *.gz) -v ; Check-Output $? + + $env:JAVA_HOME = $env:JAVA_HOME_8_X64 # there is pre-installed Zulu OpenJDK-8 somewhere + Invoke-WebRequest -Uri "https://sourceforge.net/projects/swig/files/swigwin/swigwin-3.0.12/swigwin-3.0.12.zip/download" -OutFile $env:BUILD_SOURCESDIRECTORY/swig/swigwin.zip -UserAgent "NativeHost" + Add-Type -AssemblyName System.IO.Compression.FileSystem + [System.IO.Compression.ZipFile]::ExtractToDirectory("$env:BUILD_SOURCESDIRECTORY/swig/swigwin.zip", "$env:BUILD_SOURCESDIRECTORY/swig") + $env:PATH += ";$env:BUILD_SOURCESDIRECTORY/swig/swigwin-3.0.12" + mkdir $env:BUILD_SOURCESDIRECTORY/build; cd $env:BUILD_SOURCESDIRECTORY/build + cmake -A x64 -DUSE_SWIG=ON .. ; cmake --build . --target ALL_BUILD --config Release ; Check-Output $? + cp $env:BUILD_SOURCESDIRECTORY/build/lightgbmlib.jar $env:BUILD_ARTIFACTSTAGINGDIRECTORY/lightgbmlib_win.jar } elseif ($env:TASK -eq "bdist") { cd $env:BUILD_SOURCESDIRECTORY/python-package diff --git a/.vsts-ci.yml b/.vsts-ci.yml index 1f52e0ec8bcb..b9e3a1ac49f7 100644 --- a/.vsts-ci.yml +++ b/.vsts-ci.yml @@ -15,7 +15,7 @@ jobs: vmImage: 'ubuntu-16.04' container: ubuntu1404 strategy: - maxParallel: 7 + maxParallel: 6 matrix: regular: TASK: regular @@ -25,8 +25,6 @@ jobs: bdist: TASK: bdist PYTHON_VERSION: 3.6 - swig: - TASK: swig inference: TASK: if-else mpi_source: @@ -56,7 +54,7 @@ jobs: - bash: $(Build.SourcesDirectory)/.ci/test.sh displayName: Test - task: PublishBuildArtifacts@1 - condition: and(succeeded(), not(startsWith(variables['Build.SourceBranch'], 'refs/pull/'))) + condition: and(succeeded(), in(variables['TASK'], 'regular', 'sdist', 'bdist'), not(startsWith(variables['Build.SourceBranch'], 'refs/pull/'))) inputs: pathtoPublish: '$(Build.ArtifactStagingDirectory)' artifactName: PackageAssets @@ -89,13 +87,14 @@ jobs: CONDA=$AGENT_HOMEDIRECTORY/miniconda echo "##vso[task.setvariable variable=CONDA]$CONDA" echo "##vso[task.prependpath]$CONDA/bin" + echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_8_X64" displayName: 'Set variables' - bash: $(Build.SourcesDirectory)/.ci/setup.sh displayName: Setup - bash: $(Build.SourcesDirectory)/.ci/test.sh displayName: Test - task: PublishBuildArtifacts@1 - condition: and(succeeded(), not(startsWith(variables['Build.SourceBranch'], 'refs/pull/'))) + condition: and(succeeded(), in(variables['TASK'], 'regular', 'sdist', 'bdist'), not(startsWith(variables['Build.SourceBranch'], 'refs/pull/'))) inputs: pathtoPublish: '$(Build.ArtifactStagingDirectory)' artifactName: PackageAssets @@ -126,7 +125,7 @@ jobs: cmd /c "activate %CONDA_ENV% & powershell -ExecutionPolicy Bypass -File %BUILD_SOURCESDIRECTORY%/.ci/test_windows.ps1" displayName: Test - task: PublishBuildArtifacts@1 - condition: and(succeeded(), not(startsWith(variables['Build.SourceBranch'], 'refs/pull/'))) + condition: and(succeeded(), in(variables['TASK'], 'regular', 'sdist', 'bdist'), not(startsWith(variables['Build.SourceBranch'], 'refs/pull/'))) inputs: pathtoPublish: '$(Build.ArtifactStagingDirectory)' artifactName: PackageAssets From f91e5644a3ab5104a942e2fb23f6f9239ef08a54 Mon Sep 17 00:00:00 2001 From: Nikita Titov Date: Wed, 15 May 2019 15:44:37 +0300 Subject: [PATCH 2/6] [python] added ability to pass first_metric_only in params (#2175) * added ability to pass first_metric_only in params * simplified tests * fixed test * fixed punctuation --- docs/Python-Intro.rst | 2 +- python-package/lightgbm/engine.py | 18 +++++++++--------- python-package/lightgbm/sklearn.py | 4 ++-- tests/python_package_test/test_consistency.py | 2 +- tests/python_package_test/test_engine.py | 15 +++++++-------- 5 files changed, 20 insertions(+), 21 deletions(-) diff --git a/docs/Python-Intro.rst b/docs/Python-Intro.rst index e0622e45f858..edb7043f9a10 100644 --- a/docs/Python-Intro.rst +++ b/docs/Python-Intro.rst @@ -207,7 +207,7 @@ Note that ``train()`` will return a model from the best iteration. This works with both metrics to minimize (L2, log loss, etc.) and to maximize (NDCG, AUC, etc.). Note that if you specify more than one evaluation metric, all of them will be used for early stopping. -However, you can change this behavior and make LightGBM check only the first metric for early stopping by creating ``early_stopping`` callback with ``first_metric_only=True``. +However, you can change this behavior and make LightGBM check only the first metric for early stopping by passing ``first_metric_only=True`` in ``param`` or ``early_stopping`` callback constructor. Prediction ---------- diff --git a/python-package/lightgbm/engine.py b/python-package/lightgbm/engine.py index c6bfb70c6460..a04aa30fc69f 100644 --- a/python-package/lightgbm/engine.py +++ b/python-package/lightgbm/engine.py @@ -66,8 +66,7 @@ def train(params, train_set, num_boost_round=100, to continue training. Requires at least one validation data and one metric. If there's more than one, will check all of them. But the training data is ignored anyway. - To check only the first metric you can pass in ``callbacks`` - ``early_stopping`` callback with ``first_metric_only=True``. + To check only the first metric, set the ``first_metric_only`` parameter to ``True`` in ``params``. The index of iteration that has the best performance will be saved in the ``best_iteration`` field if early stopping logic is enabled by setting ``early_stopping_rounds``. evals_result: dict or None, optional (default=None) @@ -116,14 +115,15 @@ def train(params, train_set, num_boost_round=100, for alias in ["num_iterations", "num_iteration", "n_iter", "num_tree", "num_trees", "num_round", "num_rounds", "num_boost_round", "n_estimators"]: if alias in params: - num_boost_round = int(params.pop(alias)) + num_boost_round = params.pop(alias) warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias)) break for alias in ["early_stopping_round", "early_stopping_rounds", "early_stopping"]: - if alias in params and params[alias] is not None: - early_stopping_rounds = int(params.pop(alias)) + if alias in params: + early_stopping_rounds = params.pop(alias) warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias)) break + first_metric_only = params.pop('first_metric_only', False) if num_boost_round <= 0: raise ValueError("num_boost_round should be greater than zero.") @@ -181,7 +181,7 @@ def train(params, train_set, num_boost_round=100, callbacks.add(callback.print_evaluation(verbose_eval)) if early_stopping_rounds is not None: - callbacks.add(callback.early_stopping(early_stopping_rounds, verbose=bool(verbose_eval))) + callbacks.add(callback.early_stopping(early_stopping_rounds, first_metric_only, verbose=bool(verbose_eval))) if learning_rates is not None: callbacks.add(callback.reset_parameter(learning_rate=learning_rates)) @@ -400,8 +400,7 @@ def cv(params, train_set, num_boost_round=100, CV score needs to improve at least every ``early_stopping_rounds`` round(s) to continue. Requires at least one metric. If there's more than one, will check all of them. - To check only the first metric you can pass in ``callbacks`` - ``early_stopping`` callback with ``first_metric_only=True``. + To check only the first metric, set the ``first_metric_only`` parameter to ``True`` in ``params``. Last entry in evaluation history is the one from the best iteration. fpreproc : callable or None, optional (default=None) Preprocessing function that takes (dtrain, dtest, params) @@ -449,6 +448,7 @@ def cv(params, train_set, num_boost_round=100, warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias)) early_stopping_rounds = params.pop(alias) break + first_metric_only = params.pop('first_metric_only', False) if num_boost_round <= 0: raise ValueError("num_boost_round should be greater than zero.") @@ -480,7 +480,7 @@ def cv(params, train_set, num_boost_round=100, cb.__dict__.setdefault('order', i - len(callbacks)) callbacks = set(callbacks) if early_stopping_rounds is not None: - callbacks.add(callback.early_stopping(early_stopping_rounds, verbose=False)) + callbacks.add(callback.early_stopping(early_stopping_rounds, first_metric_only, verbose=False)) if verbose_eval is True: callbacks.add(callback.print_evaluation(show_stdv=show_stdv)) elif isinstance(verbose_eval, integer_types): diff --git a/python-package/lightgbm/sklearn.py b/python-package/lightgbm/sklearn.py index c31f2ff5b646..6f3694a53e10 100644 --- a/python-package/lightgbm/sklearn.py +++ b/python-package/lightgbm/sklearn.py @@ -376,8 +376,8 @@ def fit(self, X, y, to continue training. Requires at least one validation data and one metric. If there's more than one, will check all of them. But the training data is ignored anyway. - To check only the first metric you can pass in ``callbacks`` - ``early_stopping`` callback with ``first_metric_only=True``. + To check only the first metric, set the ``first_metric_only`` parameter to ``True`` + in additional parameters ``**kwargs`` of the model constructor. verbose : bool or int, optional (default=True) Requires at least one evaluation data. If True, the eval metric on the eval set is printed at each boosting stage. diff --git a/tests/python_package_test/test_consistency.py b/tests/python_package_test/test_consistency.py index 89e2e5ce0140..27de80e1b1e0 100644 --- a/tests/python_package_test/test_consistency.py +++ b/tests/python_package_test/test_consistency.py @@ -21,7 +21,7 @@ def __init__(self, directory, prefix, config_file='train.conf'): if line and not line.startswith('#'): key, value = [token.strip() for token in line.split('=')] if 'early_stopping' not in key: # disable early_stopping - self.params[key] = value + self.params[key] = value if key != 'num_trees' else int(value) def load_dataset(self, suffix, is_sparse=False): filename = self.path(suffix) diff --git a/tests/python_package_test/test_engine.py b/tests/python_package_test/test_engine.py index 35ac2c2856af..c68f17320008 100644 --- a/tests/python_package_test/test_engine.py +++ b/tests/python_package_test/test_engine.py @@ -1379,24 +1379,23 @@ def constant_metric(preds, train_data): return ('constant_metric', 0.0, False) # test that all metrics are checked (default behaviour) - early_stop_callback = lgb.early_stopping(5, verbose=False) gbm = lgb.train(params, lgb_train, num_boost_round=20, valid_sets=[lgb_eval], feval=lambda preds, train_data: [decreasing_metric(preds, train_data), constant_metric(preds, train_data)], - callbacks=[early_stop_callback]) + early_stopping_rounds=5, verbose_eval=False) self.assertEqual(gbm.best_iteration, 1) # test that only the first metric is checked - early_stop_callback = lgb.early_stopping(5, first_metric_only=True, verbose=False) - gbm = lgb.train(params, lgb_train, num_boost_round=20, valid_sets=[lgb_eval], + gbm = lgb.train(dict(params, first_metric_only=True), lgb_train, + num_boost_round=20, valid_sets=[lgb_eval], feval=lambda preds, train_data: [decreasing_metric(preds, train_data), constant_metric(preds, train_data)], - callbacks=[early_stop_callback]) + early_stopping_rounds=5, verbose_eval=False) self.assertEqual(gbm.best_iteration, 20) # ... change the order of metrics - early_stop_callback = lgb.early_stopping(5, first_metric_only=True, verbose=False) - gbm = lgb.train(params, lgb_train, num_boost_round=20, valid_sets=[lgb_eval], + gbm = lgb.train(dict(params, first_metric_only=True), lgb_train, + num_boost_round=20, valid_sets=[lgb_eval], feval=lambda preds, train_data: [constant_metric(preds, train_data), decreasing_metric(preds, train_data)], - callbacks=[early_stop_callback]) + early_stopping_rounds=5, verbose_eval=False) self.assertEqual(gbm.best_iteration, 1) From b3c1ffbf9f0e7fd2c98b40b8b6ffb86308d8f408 Mon Sep 17 00:00:00 2001 From: Nikita Titov Date: Wed, 15 May 2019 16:40:07 +0300 Subject: [PATCH 3/6] [ci] create GitHub release automatically (#2171) * added GitHub Release task * add connection name * test * added triggers for tags * hotfixes --- .vsts-ci.yml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/.vsts-ci.yml b/.vsts-ci.yml index b9e3a1ac49f7..08df1e7f89a2 100644 --- a/.vsts-ci.yml +++ b/.vsts-ci.yml @@ -1,3 +1,10 @@ +trigger: + branches: + include: + - '*' + tags: + include: + - v* variables: PYTHON_VERSION: 3.7 CONDA_ENV: test-env @@ -161,3 +168,20 @@ jobs: pathtoPublish: '$(Build.ArtifactStagingDirectory)' artifactName: NuGet artifactType: container + - task: GitHubRelease@0 + displayName: 'Create GitHub Release' + condition: and(succeeded(), startsWith(variables['Build.SourceBranch'], 'refs/tags/v')) + inputs: + gitHubConnection: guolinke + repositoryName: '$(Build.Repository.Name)' + action: 'create' + target: '$(Build.SourceVersion)' + tagSource: 'auto' + title: '$(Build.SourceBranchName)' + assets: | + $(Build.SourcesDirectory)/binaries/PackageAssets/* + $(Build.ArtifactStagingDirectory)/*.nupkg + assetUploadMode: 'delete' + isDraft: true + isPreRelease: false + addChangeLog: false From 3d8770aff7c4cb4c5bc4774802b68fde0500c381 Mon Sep 17 00:00:00 2001 From: Laurae Date: Wed, 15 May 2019 16:04:50 +0200 Subject: [PATCH 4/6] [docs] fixing max_depth param description (#2155) * PR #1879 * Update docs with parameter_generator.py * Update wrapper doc for sklearn --- docs/Parameters.rst | 2 +- include/LightGBM/config.h | 2 +- python-package/lightgbm/sklearn.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/Parameters.rst b/docs/Parameters.rst index 99d5a2bb1c82..28c2860efedd 100644 --- a/docs/Parameters.rst +++ b/docs/Parameters.rst @@ -190,7 +190,7 @@ Learning Control Parameters - limit the max depth for tree model. This is used to deal with over-fitting when ``#data`` is small. Tree still grows leaf-wise - - ``< 0`` means no limit + - ``<= 0`` means no limit - ``min_data_in_leaf`` :raw-html:`🔗︎`, default = ``20``, type = int, aliases: ``min_data_per_leaf``, ``min_data``, ``min_child_samples``, constraints: ``min_data_in_leaf >= 0`` diff --git a/include/LightGBM/config.h b/include/LightGBM/config.h index 2a6896f82dc0..cb552f65c023 100644 --- a/include/LightGBM/config.h +++ b/include/LightGBM/config.h @@ -212,7 +212,7 @@ struct Config { #pragma region Learning Control Parameters // desc = limit the max depth for tree model. This is used to deal with over-fitting when ``#data`` is small. Tree still grows leaf-wise - // desc = ``< 0`` means no limit + // desc = ``<= 0`` means no limit int max_depth = -1; // alias = min_data_per_leaf, min_data, min_child_samples diff --git a/python-package/lightgbm/sklearn.py b/python-package/lightgbm/sklearn.py index 6f3694a53e10..60ac9bf60f26 100644 --- a/python-package/lightgbm/sklearn.py +++ b/python-package/lightgbm/sklearn.py @@ -152,7 +152,7 @@ def __init__(self, boosting_type='gbdt', num_leaves=31, max_depth=-1, num_leaves : int, optional (default=31) Maximum tree leaves for base learners. max_depth : int, optional (default=-1) - Maximum tree depth for base learners, -1 means no limit. + Maximum tree depth for base learners, <=0 means no limit. learning_rate : float, optional (default=0.1) Boosting learning rate. You can use ``callbacks`` parameter of ``fit`` method to shrink/adapt learning rate From 6f3fae51fe98c71a5c54e0fe27d7a05d5195c3f6 Mon Sep 17 00:00:00 2001 From: Ilya Matiach Date: Wed, 15 May 2019 11:26:46 -0400 Subject: [PATCH 5/6] [doc] minor doc fix for gamma param (#2180) --- docs/Parameters.rst | 2 +- include/LightGBM/config.h | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/Parameters.rst b/docs/Parameters.rst index 28c2860efedd..d1c1f476906d 100644 --- a/docs/Parameters.rst +++ b/docs/Parameters.rst @@ -51,7 +51,7 @@ Core Parameters - **Note**: can be used only in CLI version; for language-specific packages you can use the correspondent functions -- ``objective`` :raw-html:`🔗︎`, default = ``regression``, type = enum, options: ``regression``, ``regression_l1``, ``huber``, ``fair``, ``poisson``, ``quantile``, ``mape``, ``gammma``, ``tweedie``, ``binary``, ``multiclass``, ``multiclassova``, ``xentropy``, ``xentlambda``, ``lambdarank``, aliases: ``objective_type``, ``app``, ``application`` +- ``objective`` :raw-html:`🔗︎`, default = ``regression``, type = enum, options: ``regression``, ``regression_l1``, ``huber``, ``fair``, ``poisson``, ``quantile``, ``mape``, ``gamma``, ``tweedie``, ``binary``, ``multiclass``, ``multiclassova``, ``xentropy``, ``xentlambda``, ``lambdarank``, aliases: ``objective_type``, ``app``, ``application`` - regression application diff --git a/include/LightGBM/config.h b/include/LightGBM/config.h index cb552f65c023..e1c1427d1760 100644 --- a/include/LightGBM/config.h +++ b/include/LightGBM/config.h @@ -102,7 +102,7 @@ struct Config { // [doc-only] // type = enum - // options = regression, regression_l1, huber, fair, poisson, quantile, mape, gammma, tweedie, binary, multiclass, multiclassova, xentropy, xentlambda, lambdarank + // options = regression, regression_l1, huber, fair, poisson, quantile, mape, gamma, tweedie, binary, multiclass, multiclassova, xentropy, xentlambda, lambdarank // alias = objective_type, app, application // desc = regression application // descl2 = ``regression_l2``, L2 loss, aliases: ``regression``, ``mean_squared_error``, ``mse``, ``l2_root``, ``root_mean_squared_error``, ``rmse`` From c9d681ac0d8c7e242b2fff217db268c11c83c745 Mon Sep 17 00:00:00 2001 From: Nikita Titov Date: Wed, 15 May 2019 18:41:27 +0300 Subject: [PATCH 6/6] [ci] migrate to new Dockerfile (#2174) * migrate Dockerfile * switch tag back --- .ci/README.md | 6 +++ .ci/dockers/ubuntu-14.04/Dockerfile | 79 ----------------------------- 2 files changed, 6 insertions(+), 79 deletions(-) create mode 100644 .ci/README.md delete mode 100644 .ci/dockers/ubuntu-14.04/Dockerfile diff --git a/.ci/README.md b/.ci/README.md new file mode 100644 index 000000000000..3805703d0f8c --- /dev/null +++ b/.ci/README.md @@ -0,0 +1,6 @@ +Helper Scripts for CI +===================== + +This folder contains scripts which are run on CI services. + +Dockerfile used on CI service is maintained in a separate [GitHub repository](https://github.com/guolinke/lightgbm-ci-docker) and can be pulled from [Docker Hub](https://hub.docker.com/r/lightgbm/vsts-agent). diff --git a/.ci/dockers/ubuntu-14.04/Dockerfile b/.ci/dockers/ubuntu-14.04/Dockerfile deleted file mode 100644 index fe6527b1d17d..000000000000 --- a/.ci/dockers/ubuntu-14.04/Dockerfile +++ /dev/null @@ -1,79 +0,0 @@ -FROM microsoft/vsts-agent:ubuntu-14.04 - -# Install basic command-line utilities -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - curl \ - locales \ - sudo \ - unzip \ - wget \ - zip \ - && rm -rf /var/lib/apt/lists/* - -# Setup the locale -ENV LANG en_US.UTF-8 -ENV LC_ALL $LANG -RUN locale-gen $LANG \ - && update-locale - -# Install essential build tools -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - build-essential \ - && rm -rf /var/lib/apt/lists/* - -# Install clang 7.0 -RUN add-apt-repository ppa:ubuntu-toolchain-r/test -y \ - && cd /tmp \ - && wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - \ - && add-apt-repository "deb http://apt.llvm.org/trusty/ llvm-toolchain-trusty-7 main" -y \ - && apt-get update \ - && apt-get install -y --no-install-recommends \ - clang-7 \ - libomp-7-dev \ - && rm -rf /var/lib/apt/lists/* - -# Install CMake -RUN curl -sL https://cmake.org/files/v3.14/cmake-3.14.1-Linux-x86_64.sh -o cmake.sh \ - && chmod +x cmake.sh \ - && ./cmake.sh --prefix=/usr/local --exclude-subdir \ - && rm cmake.sh - -# Install Java -RUN add-apt-repository ppa:openjdk-r/ppa -y \ - && apt-get update \ - && apt-get install -y --no-install-recommends \ - openjdk-8-jdk \ - && rm -rf /var/lib/apt/lists/* - -ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/ - -# Install SWIG -RUN curl -sL https://downloads.sourceforge.net/project/swig/swig/swig-3.0.12/swig-3.0.12.tar.gz -o swig.tar.gz \ - && tar -xzf swig.tar.gz \ - && cd swig-3.0.12 \ - && ./configure --prefix=/usr/local --without-pcre \ - && make \ - && make install \ - && cd .. \ - && rm swig.tar.gz \ - && rm -rf swig-3.0.12 - -# Install Miniconda -RUN curl -sL https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -o miniconda.sh \ - && chmod +x miniconda.sh \ - && ./miniconda.sh -b -p /opt/conda \ - && rm miniconda.sh \ - && /opt/conda/bin/conda install python=3 -q -y \ - && /opt/conda/bin/conda install mkl qt -q -y \ - && /opt/conda/bin/conda clean -a -y \ - && chmod -R 777 /opt/conda - -ENV CONDA=/opt/conda/ - -# Clean system -RUN apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && rm -rf /etc/apt/sources.list.d/* \ - && rm -rf /tmp/*