Skip to content

Commit

Permalink
Add option --show-prefixes for example-queries command
Browse files Browse the repository at this point in the history
On the side, make sure that for the `for-each` of `MULTI_INPUT_JSON`,
the input streams are created in the lexicographical order of the
input files.
  • Loading branch information
Hannah Bast committed Dec 15, 2024
1 parent dd74d18 commit b0c29de
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 15 deletions.
38 changes: 24 additions & 14 deletions src/qlever/commands/example_queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,7 @@ class ExampleQueriesCommand(QleverCommand):
"""

def __init__(self):
self.presets = {
"virtuoso-wikidata": "https://wikidata.demo.openlinksw.com/sparql",
"qlever-wikidata": "https://qlever.cs.uni-freiburg.de/api/wikidata",
}
pass

def description(self) -> str:
return "Show how much of the cache is currently being used"
Expand All @@ -41,8 +38,15 @@ def additional_arguments(self, subparser) -> None:
)
subparser.add_argument(
"--sparql-endpoint-preset",
choices=self.presets.keys(),
help="Shortcut for setting the SPARQL endpoint",
choices=[
"https://qlever.dev/api/wikidata",
"https://qlever.dev/api/uniprot",
"https://qlever.dev/api/pubchem",
"https://qlever.dev/api/osm-planet",
"https://wikidata.demo.openlinksw.com/sparql",
"https://sparql.uniprot.org/sparql",
],
help="SPARQL endpoint from fixed list (to save typing)",
)
subparser.add_argument(
"--get-queries-cmd",
Expand Down Expand Up @@ -86,7 +90,7 @@ def additional_arguments(self, subparser) -> None:
"application/sparql-results+json",
"text/turtle",
],
default="text/tab-separated-values",
default="application/sparql-results+json",
help="Accept header for the SPARQL query",
)
subparser.add_argument(
Expand Down Expand Up @@ -119,12 +123,19 @@ def additional_arguments(self, subparser) -> None:
default="never",
help="Show the queries that will be executed (always, never, on error)",
)
subparser.add_argument(
"--show-prefixes",
action="store_true",
default=False,
help="When showing the query, also show the prefixes",
)

def pretty_print_query(self, query: str) -> None:
def pretty_print_query(self, query: str, show_prefixes: bool) -> None:
remove_prefixes_cmd = " | sed '/^PREFIX /Id'" if not show_prefixes else ""
pretty_print_query_cmd = (
f"echo {shlex.quote(query)}"
f" | docker run -i --rm sparqling/sparql-formatter"
f" | sed '/^PREFIX /Id' | grep -v '^$'"
f"{remove_prefixes_cmd} | grep -v '^$'"
)
try:
query_pp = run_command(pretty_print_query_cmd, return_output=True)
Expand Down Expand Up @@ -154,9 +165,8 @@ def execute(self, args) -> bool:
return False

# Handle shotcuts for SPARQL endpoint.
if args.sparql_endpoint_preset in self.presets:
args.sparql_endpoint = self.presets[args.sparql_endpoint_preset]
args.ui_config = args.sparql_endpoint_preset.split("-")[1]
if args.sparql_endpoint_preset:
args.sparql_endpoint = args.sparql_endpoint_preset

# Limit only works with full result.
if args.limit and args.download_or_count == "count":
Expand Down Expand Up @@ -286,7 +296,7 @@ def execute(self, args) -> bool:
query = re.sub(r"\s*\.\s*\}", " }", query)
if args.show_query == "always":
log.info("")
self.pretty_print_query(query)
self.pretty_print_query(query, args.show_prefixes)

# Launch query.
try:
Expand Down Expand Up @@ -420,7 +430,7 @@ def execute(self, args) -> bool:
f"{colored(error_msg['long'], 'red')}"
)
if args.show_query == "on-error":
self.pretty_print_query(query)
self.pretty_print_query(query, args.show_prefixes)
log.info("")

# Check that each query has a time and a result size, or it failed.
Expand Down
2 changes: 1 addition & 1 deletion src/qlever/commands/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def get_input_options_for_json(self, args) -> str:
input_cmds = [input_spec["cmd"]]
else:
try:
files = glob.glob(input_spec["for-each"])
files = sorted(glob.glob(input_spec["for-each"]))
except Exception as e:
raise self.InvalidInputJson(
f"Element {i} in `MULTI_INPUT_JSON` contains an "
Expand Down

0 comments on commit b0c29de

Please sign in to comment.