Forráskód Böngészése

Merge branch 'release-0.4.0'

Arkadiusz Ryś 2 éve
szülő
commit
0b657d52b5
7 módosított fájl, 15 hozzáadás és 11 törlés
  1. 2 2
      .editorconfig
  2. 2 1
      Dockerfile
  3. 5 3
      README.rst
  4. 2 1
      data/configuration.toml
  5. 1 1
      spendpoint/__init__.py
  6. 2 2
      spendpoint/bridge.py
  7. 1 1
      spendpoint/service.py

+ 2 - 2
.editorconfig

@@ -8,10 +8,10 @@ indent_style = space
 insert_final_newline = true
 trim_trailing_whitespace = true
 
-[*.{css, html, yml, yaml, js, xml}]
+[*.{css,html,yml,yaml,js,xml}]
 indent_size = 2
 
-[{*.log, LICENSE}]
+[{*.log,LICENSE}]
 insert_final_newline = false
 
 [*.rst]

+ 2 - 1
Dockerfile

@@ -7,4 +7,5 @@ WORKDIR ${APP_HOME}
 COPY ./requirements.txt ${APP_HOME}/requirements.txt
 RUN pip install --no-cache-dir --upgrade -r requirements.txt
 COPY . ${APP_HOME}
-CMD ["uvicorn", "spendpoint.main:app", "--host", "0.0.0.0", "--port", "80", "--proxy-headers"]
+#CMD ["uvicorn", "spendpoint.main:app", "--host", "0.0.0.0", "--port", "80", "--proxy-headers"]
+CMD ["python3", "-m", "spendpoint"]

+ 5 - 3
README.rst

@@ -3,9 +3,11 @@ SpEndPoint
 ##########
 
 Creates a SPARQL endpoint supporting custom services.
-Default access at `http://127.0.0.1:8000`.
+The default access point is at `http://127.0.0.1:8000`.
+This endpoint can be configured in the `configuration.toml <data/configuration.toml>`_ file.
+The docker image created uses uvicorn the host the application at `0.0.0.0:80`. Feel free to map this to any port of your liking.
 
-Currently supports 3 services:
+We currently support 3 services out of the box:
 
 .. code-block::
 
@@ -32,7 +34,7 @@ Installation
 Configuration
 -------------
 
-A configuration file at `data/configuration.toml` holds all user configurable data.
+A configuration file at `configuration.toml <data/configuration.toml>`_ holds all user configurable data.
 You can set the `host` and `port` the server will listen on.
 A more advanced use is to import extra services.
 These services need to be defined in the `service.py` file as well.

+ 2 - 1
data/configuration.toml

@@ -7,6 +7,7 @@ name = "outliers"
 namespace = "https://ontology.rys.app/dt/function/outlier"
 call = "outlier_service"
 endpoint = "http://127.0.0.1:9090/api/csv/outlier"
+#endpoint = "https://outlier.rys.app/api/csv/outlier"
 timeout = 60 # How many seconds we wait for a result
 
 [[services]]
@@ -15,6 +16,6 @@ namespace = "https://ontology.rys.app/dt/function/example"
 call = "example_service"
 
 [[services]]
-name = "outliers"
+name = "conversion"
 namespace = "https://ontology.rys.app/dt/function/conversion"
 call = "conversion_service"

+ 1 - 1
spendpoint/__init__.py

@@ -1,3 +1,3 @@
 """SPARQL endpoint for ontologies."""
-__version__ = "0.3.0"
+__version__ = "0.4.0"
 __version_info__ = tuple((int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".")))

+ 2 - 2
spendpoint/bridge.py

@@ -7,7 +7,7 @@ from typing import Union
 arklog.set_config_logging()
 
 
-def fetch_outliers(file_name: str, column: Union[str, int], iri: str, outlier_service_url: str) -> Graph:
+def fetch_outliers(file_name: str, column: Union[str, int], iri: str, outlier_service_url: str, timeout: int) -> Graph:
     """"""
     try:
         column = column if isinstance(column, int) else int(column)
@@ -16,7 +16,7 @@ def fetch_outliers(file_name: str, column: Union[str, int], iri: str, outlier_se
         raise
     parameters = {"iri": iri, "column" : column, "file" : file_name}
     try:
-        outliers_result = requests.post(outlier_service_url, json=parameters, timeout=60)
+        outliers_result = requests.post(outlier_service_url, json=parameters, timeout=timeout)
         outliers_result.raise_for_status()
     except requests.exceptions.InvalidSchema as e:
         logging.error(f"Invalid schema for '{outlier_service_url}'.")

+ 1 - 1
spendpoint/service.py

@@ -47,7 +47,7 @@ def outlier_service(query_results, ctx, part, eval_part, service_configuration):
     column = str(_eval(part.expr.expr[1], eval_part.forget(ctx, _except=part.expr._vars)))
     iri = str(_eval(part.expr.expr[2], eval_part.forget(ctx, _except=part.expr._vars)))
     logging.info(f"Looking for outlier in '{file_name}' at column '{column}' for '{iri}'.")
-    outlier_graph = fetch_outliers(file_name, column, iri, service_configuration.endpoint)
+    outlier_graph = fetch_outliers(file_name, column, iri, service_configuration.endpoint, service_configuration.timeout)
     for stmt in outlier_graph:
         query_results.append(eval_part.merge({
             part.var: stmt[0],