Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Trapi15 #130

Merged
merged 11 commits into from
Apr 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion openapi-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ x-translator:
- Ranking Agent
infores: "infores:aragorn-ranker"
x-trapi:
version: 1.4.0
version: 1.5.0
operations:
- Rank by OmniCorp overlay
- Rank by score
Expand Down
3 changes: 1 addition & 2 deletions ranker/modules/omnicorp_overlay.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,11 +216,10 @@ async def query(request: PDResponse):

#Now we want to find the publication count for every pair. But: we only want to do that for pairs that
# are part of the same answer
#Note, this will be affected by TRAPI 1.4
t1 = datetime.now()
pair_to_answer = await generate_curie_pairs(answers, qgraph_setnodes, node_pub_counts, message)
t2 = datetime.now()
logger.info(f"generate_curie_pairs time: {t2 - t1}")
logger.info(f"generate_curie_pairs time: {t2 - t1}. Number of pairs: {len(pair_to_answer)}")

# get all pair supports
keypairs = {make_key(x,node_indices):x for x in pair_to_answer.keys()}
Expand Down
2 changes: 1 addition & 1 deletion ranker/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from ranker.util.omnicorp_get_node_pmids import get_node_pmids

# set the app version
APP_VERSION = '3.2.8'
APP_VERSION = '3.3.0'

APP = FastAPI(title='ARAGORN Ranker', version=APP_VERSION)

Expand Down
14 changes: 7 additions & 7 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
asyncpg==0.27.0
asyncpg==0.29.0
fastapi==0.63.0
gunicorn==20.0.4
httpx==0.16.1
jsonschema==3.2.0
lru-dict==1.1.8
lru-dict==1.3.0
pydantic>=1.8.1
pyyaml==5.3.1
redis==4.5.4
uvicorn==0.17.6
uvloop==0.17.0
numpy==1.24.1
httptools==0.5.0
reasoner-pydantic==4.0.5
yappi==1.4.0
uvloop==0.19.0
numpy==1.26.4
httptools==0.6.0
reasoner-pydantic==5.0.0
yappi==1.6.0
85 changes: 85 additions & 0 deletions tests/InputJson_15/UpdateJSON.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
import os
import json

#TO run this, grab the jsons you want from earlier version, copy them into this directory (changing the names to
#whatever_original.json) and then run it. There some cleaner ways to do this...

def get_originals():
files = os.listdir('.')
originals = []
for f in files:
if f.endswith("_original.json"):
originals.append(f)
return originals

def go():
originals = get_originals()
for original in originals:
parse(original)

def add_logs(trapi):
if "logs" not in trapi.keys():
trapi["logs"] = []

def add_sources_and_attributes_to_edges(trapi):
for edge_id, edge in trapi["message"]["knowledge_graph"]["edges"].items():
if "sources" not in edge:
edge["sources"] = [
{
"resource_id": "infores:madeup",
"resource_role": "primary_knowledge_source"
}
]
if "attributes" not in edge:
edge["attributes"] = []

def add_attributes_to_nodes(trapi):
for node_id, node in trapi["message"]["knowledge_graph"]["nodes"].items():
if "categories" not in node:
node["categories"] = ["biolink:NamedThing"]
if "attributes" not in node:
node["attributes"] = []

def add_attributes_to_bindings(trapi):
for result in trapi["message"]["results"]:
for node_binding, nb in result["node_bindings"].items():
for nbx in nb:
if "attributes" not in nbx:
nbx["attributes"] = []
for analysis in result["analyses"]:
for edge_binding ,eb in analysis["edge_bindings"].items():
for ebx in eb:
if "attributes" not in ebx:
ebx["attributes"] = []

def add_attributes_to_auxgraphs(trapi):
if "auxiliary_graphs" not in trapi["message"]:
return
for auxid,auxgraph in trapi["message"]["auxiliary_graphs"].items():
if "attributes" not in auxgraph:
auxgraph["attributes"] = []

def fix_analysis(trapi):
for result in trapi["message"]["results"]:
if "analysis" not in result:
result["analyses"] = []
if "edge_bindings" in result:
analysis = {"edge_bindings": result["edge_bindings"], "attributes": [], "resource_id": "fake:thing" }
result["analyses"].append(analysis)
del result["edge_bindings"]

def parse(original):
with open(original,"r") as inf:
trapi = json.load(inf)
add_logs(trapi)
add_sources_and_attributes_to_edges(trapi)
add_attributes_to_nodes(trapi)
fix_analysis(trapi)
add_attributes_to_bindings(trapi)
add_attributes_to_auxgraphs(trapi)
newname = original[:-len("_original.json")]+".json"
with open(newname, "w") as outf:
json.dump(trapi,outf,indent=2)

if __name__ == "__main__":
go()
Loading
Loading