Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
randersenYB committed Jul 24, 2024
2 parents 5b01744 + 8754424 commit 6d2f139
Show file tree
Hide file tree
Showing 14 changed files with 1,838 additions and 629 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,8 @@ venv
.idea
aerospike/data/*.hdf5
aerospike/data/*
<<<<<<< HEAD
aerospike/results/*.hdf5
=======
aerospike/results/*
>>>>>>> 8754424984433ad748d1b1a1b864bc1bda33bed4
118 changes: 84 additions & 34 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -62,36 +62,64 @@
"console": "integratedTerminal"
},
{
"name": "Python Debugger: hdf_import",
"name": "Python Debugger: hdf_import (prompt DS)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/aerospike/hdf_import.py",
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--dataset", "random-xs-20-angular",
"--dataset", "${input:enterDataset}",
"--concurrency", "5000",
"--idxdrop",
"--logfile", "./hdfimport.log",
"--loglevel", "DEBUG"
"--logfile", "./hdfimport.log"
],
"justMyCode": false,
"console": "integratedTerminal"
},
{
"name": "Python Debugger: hdf_import (prompt DS)",
"name": "Python Debugger: hdf_import (prompt HDF)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/aerospike/hdf_import.py",
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--hdf", "${input:enterDataset}",
"--hdf", "${input:enterHDFFile}",
"--concurrency", "5000",
"--idxdrop",
"--logfile", "./hdfimport.log",
"--loglevel", "DEBUG"
"--logfile", "./hdfimport-hdf.log"
],
"console": "integratedTerminal"
},
{
"name": "Python Debugger: hdf_import Sleep 10 mins (prompt HDF)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/aerospike/hdf_import.py",
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--hdf", "${input:enterHDFFile}",
"--concurrency", "5000",
"--idxdrop",
"--logfile", "./hdfimport-hdf.log",
"--exhaustedevt", "600"
],
"console": "integratedTerminal"
},
{
"name": "Python Debugger: hdf_import angular (prompt HDF)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/aerospike/hdf_import.py",
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--hdf", "${input:enterHDFFile}",
"--concurrency", "5000",
"--distancetype", "COSINE",
"--idxdrop",
"--logfile", "./hdfimport-angular.log",
],
"console": "integratedTerminal"
},
},
{
"name": "Python Debugger: hdf_import LB",
"type": "debugpy",
Expand Down Expand Up @@ -130,27 +158,39 @@
"args": [
"--dataset", "glove-100-angular",
"--concurrency", "10000",
"--logfile", "./hdfimport.log",
"--idxdrop",
"--loglevel", "DEBUG"
"--logfile", "./hdfimport-glove.log",
"--idxdrop"
],
"console": "integratedTerminal"
},
{
"name": "Python Debugger: hdf_query",
"name": "Python Debugger: hdf_query (prompt DS)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/aerospike/hdf_query.py",
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--hdf", "random-xs-20-angular",
"--dataset", "${input:enterDataset}",
"--logfile", "./hdfquery.log",
"-r", "10"
],
"console": "integratedTerminal"
},
{
"name": "Python Debugger: hdf_query (proimpt HDF)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/aerospike/hdf_query.py",
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--hdf", "${input:enterDataset}",
"--logfile", "./hdfquery.log",
"-r", "10"
],
"console": "integratedTerminal"
},
{
"name": "Python Debugger: hdf_query (check prompt)",
"name": "Python Debugger: hdf_query (check prompt HDF)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/aerospike/hdf_query.py",
Expand All @@ -171,7 +211,7 @@
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--dataset", "glove-100-angular",
"--logfile", "./hdfquery.log",
"--logfile", "./hdfquery-glove.log",
"--check",
"-r", "10",
"--searchparams", "{\"ef\":10}"
Expand All @@ -186,60 +226,64 @@
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--dataset", "glove-100-angular",
"--logfile", "./hdfquery.log",
"--logfile", "./hdfquery-glove.log",
"--check",
"-r", "10",
"--limit", "10"
],
"console": "integratedTerminal"
},
{
"name": "Python Debugger: hdf_create_dataset (prompt)",
"name": "Python Debugger: hdf_query (gist 960)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/aerospike/hdf_create_dataset.py",
"program": "${workspaceFolder}/aerospike/hdf_query.py",
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--hosts", "localhost:3000",
"--hdf", "${input:enterHDFFile}",
"--logfile", "./hdfcreatedataset.log",
"--indexname", "HDF-data_Idx",
"--records", "-1"
"--dataset", "gist-960-euclidean",
"--logfile", "./hdfquery-gist1.log",
"-r", "10",
"--limit", "10",
"--idxname", "ANN-data_euclidean_SQUARED_EUCLIDEAN_960_16_100_100_Idx"
],
"justMyCode": false,
"console": "integratedTerminal"
},
{
"name": "Python Debugger: bigann_download random-xs",
"name": "Python Debugger: hdf_create_dataset (prompt HDF)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/aerospike/bigann_download.py",
"program": "${workspaceFolder}/aerospike/hdf_create_dataset.py",
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--dataset", "random-xs"
"--hosts", "localhost:3000",
"--hdf", "${input:enterHDFFile}",
"--logfile", "./hdfcreatedataset.log",
"--indexname", "HDF-data_Idx",
"--records", "-1"
],
"justMyCode": false,
"console": "integratedTerminal"
},
},
{
"name": "Python Debugger: bigann_download deep-10M",
"name": "Python Debugger: bigann_download (prompt)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/aerospike/bigann_download.py",
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--dataset", "deep-10M"
"--dataset", "${input:enterBigDataset}"
],
"console": "integratedTerminal"
},
{
"name": "Python Debugger: bigann_convert",
"name": "Python Debugger: bigann_convert (prompt DS)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/aerospike/bigann_convert_hdf.py",
"cwd": "${workspaceFolder}/aerospike",
"args": [
"--dataset", "${input:enterBigDataset}",
"--hdf", "bigtest.hdf5"
"--hdf", "${input:enterBigHDFFile}"
],
"console": "integratedTerminal"
},
Expand All @@ -256,12 +300,18 @@
"type": "promptString",
"description": "Enter Big Dataset",
"default": "random-xs"
},
},
{
"id": "enterHDFFile",
"type": "promptString",
"description": "Enter HDF Path",
"default": "test"
},
{
"id": "enterBigHDFFile",
"type": "promptString",
"description": "Enter HDF Path",
"default": "bigtest"
}
]
}
Loading

0 comments on commit 6d2f139

Please sign in to comment.