Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integrate met #25

Merged
merged 2 commits into from
Jun 28, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions code/all_dependencies.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
from sklearn.ensemble import RandomForestRegressor
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error
from sklearn import metrics
from sklearn import tree
import joblib
import os
from pathlib import Path
import json
import geopandas as gpd
import geojson
import os.path
import math
from sklearn.model_selection import RandomizedSearchCV

import json
import pandas as pd
import ee
import seaborn as sns
import matplotlib.pyplot as plt
import os
import geopandas as gpd
import geojson
import numpy as np
import os.path

#pd.set_option('display.max_columns', None)

8 changes: 8 additions & 0 deletions code/process.json
Original file line number Diff line number Diff line change
Expand Up @@ -190,4 +190,12 @@
"lang" : "python",
"owner" : "111111",
"confidential" : "FALSE"
},{
"id" : "ilbqzg",
"name" : "all_dependencies",
"description" : null,
"code" : "from sklearn.ensemble import RandomForestRegressor\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.preprocessing import MinMaxScaler\nfrom sklearn.metrics import mean_squared_error\nfrom sklearn import metrics \nfrom sklearn import tree\nimport joblib\nimport os\nfrom pathlib import Path\nimport json\nimport geopandas as gpd\nimport geojson\nimport os.path\nimport math\nfrom sklearn.model_selection import RandomizedSearchCV\n\nimport json\nimport pandas as pd\nimport ee\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nimport os\nimport geopandas as gpd\nimport geojson\nimport numpy as np\nimport os.path\n\n#pd.set_option('display.max_columns', None)\n",
"lang" : "python",
"owner" : "111111",
"confidential" : "FALSE"
}]
183,091 changes: 183,091 additions & 0 deletions data/sat_testing/modis/mod10a1_ndsi_2022-06-01_2022-06-22.csv

Large diffs are not rendered by default.

58,892 changes: 58,892 additions & 0 deletions data/sat_testing/sentinel1/s1_grd_vv_2022-06-05_2022-06-22.csv

Large diffs are not rendered by default.

121 changes: 121 additions & 0 deletions history/process_2wkl6e.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
[{
"history_id" : "jh9ydimw0cm",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1654521115025,
"history_end_time" : 1654521115144,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "100001",
"indicator" : "Done"
},{
"history_id" : "3fy8xfm8567",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1647826117273,
"history_end_time" : 1647826117386,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "100001",
"indicator" : "Done"
},{
"history_id" : "nf2n7gtg3kz",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1647225952602,
"history_end_time" : 1647225952698,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "100001",
"indicator" : "Done"
},{
"history_id" : "148a3mugiz2",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1646693033766,
"history_end_time" : 1646693033896,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "100001",
"indicator" : "Done"
},{
"history_id" : "3buye4neevn",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1646604648073,
"history_end_time" : 1646604648235,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "100001",
"indicator" : "Done"
},{
"history_id" : "56y4ftkt5yi",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1646272337859,
"history_end_time" : 1646272337989,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "100001",
"indicator" : "Done"
},{
"history_id" : "k84eh83rkz6",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1645977675714,
"history_end_time" : 1645977676764,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "je29f6",
"indicator" : "Done"
},{
"history_id" : "qcg3ken9a69",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1645977445591,
"history_end_time" : 1645977446592,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "je29f6",
"indicator" : "Done"
},{
"history_id" : "argt5yqt0ad",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1642977866710,
"history_end_time" : 1642977866827,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "100001",
"indicator" : "Done"
},{
"history_id" : "rilhyfolrey",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1642969781031,
"history_end_time" : 1642969781150,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "100001",
"indicator" : "Done"
},{
"history_id" : "hs2g7ll5vhi",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1642455232511,
"history_end_time" : 1642455232636,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "100001",
"indicator" : "Done"
},{
"history_id" : "67w94uwybuf",
"history_input" : "# Deploy model to service\n\nprint(\"deploy model to service\")\n",
"history_output" : "deploy model to service\n",
"history_begin_time" : 1642454682946,
"history_end_time" : 1642454683090,
"history_notes" : null,
"history_process" : "2wkl6e",
"host_id" : "100001",
"indicator" : "Done"
},]
21 changes: 21 additions & 0 deletions history/process_4i0sop.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
[{
"history_id" : "xnp8macj97a",
"history_input" : "from sklearn.ensemble import RandomForestRegressor\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.preprocessing import MinMaxScaler\nfrom sklearn.metrics import mean_squared_error\nfrom sklearn import metrics \nfrom sklearn import tree\nimport joblib\nimport os\nfrom pathlib import Path\nimport json\nimport geopandas as gpd\nimport geojson\nimport os.path\nimport math\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.ensemble import RandomForestRegressor\nfrom sklearn.preprocessing import OneHotEncoder, StandardScaler\nfrom sklearn.model_selection import RandomizedSearchCV\nfrom base_hole import BaseHole\nfrom sklearn.model_selection import train_test_split\nfrom datetime import datetime\nfrom model_creation_rf import RandomForestHole\nfrom sklearn.ensemble import ExtraTreesRegressor\n\nclass XGBoostHole(RandomForestHole):\n\n def get_model(self):\n \"\"\"\n rfc_pipeline = Pipeline(steps = [\n ('data_scaling', StandardScaler()),\n ('model', RandomForestRegressor(max_depth = 15,\n min_samples_leaf = 0.004,\n min_samples_split = 0.008,\n n_estimators = 25))])\n #return rfc_pipeline\n \t\"\"\"\n etmodel = ExtraTreesRegressor(bootstrap=False, ccp_alpha=0.0, criterion='mse',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n max_samples=None, min_impurity_decrease=0.0,\n #min_impurity_split=None, \n min_samples_leaf=1,\n min_samples_split=2, min_weight_fraction_leaf=0.0,\n n_estimators=100, n_jobs=-1, oob_score=False,\n random_state=123, verbose=0, warm_start=False)\n return etmodel\n\n\n",
"history_output" : "",
"history_begin_time" : 1654519485116,
"history_end_time" : 1654519485335,
"history_notes" : null,
"history_process" : "4i0sop",
"host_id" : "100001",
"indicator" : "Done"
},{
"history_id" : "erkkycwyu8r",
"history_input" : "from sklearn.ensemble import RandomForestRegressor\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.preprocessing import MinMaxScaler\nfrom sklearn.metrics import mean_squared_error\nfrom sklearn import metrics \nfrom sklearn import tree\nimport joblib\nimport os\nfrom pathlib import Path\nimport json\nimport geopandas as gpd\nimport geojson\nimport os.path\nimport math\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.ensemble import RandomForestRegressor\nfrom sklearn.preprocessing import OneHotEncoder, StandardScaler\nfrom sklearn.model_selection import RandomizedSearchCV\nfrom base_hole import BaseHole\nfrom sklearn.model_selection import train_test_split\nfrom datetime import datetime\nfrom model_creation_rf import RandomForestHole\n\n\nclass XGBoostHole(RandomForestHole):\n\n def get_model(self):\n rfc_pipeline = Pipeline(steps = [\n ('data_scaling', StandardScaler()),\n ('model', RandomForestRegressor(max_depth = 15,\n min_samples_leaf = 0.004,\n min_samples_split = 0.008,\n n_estimators = 25))])\n return rfc_pipeline\n\n\n",
"history_output" : "",
"history_begin_time" : 1647826066086,
"history_end_time" : 1647826068072,
"history_notes" : null,
"history_process" : "4i0sop",
"host_id" : "100001",
"indicator" : "Done"
},]
Loading