Quellcode durchsuchen

Merge branch 'release-0.1.0'

Arkadiusz Ryś vor 2 Jahren
Ursprung
Commit
813f87d858

+ 0 - 1
.dockerignore

@@ -3,7 +3,6 @@ __pycache__/
 /.git
 /.idea
 /docs
-/data
 /.dockerignore
 /.gitignore
 /.gitlab-ci.yml

+ 2 - 0
.gitignore

@@ -6,3 +6,5 @@ dist/
 *.log
 
 .idea
+data/rail_image.jpg
+data/rail_image_annotated.jpg

+ 9 - 0
HISTORY.rst

@@ -2,6 +2,15 @@
 History
 =======
 
+0.1.0 (2023-10-11)
+------------------
+* Add octiva rail detection router
+* Add notch filter simulation router
+* Add example router
+* Add template router
+* Add file router
+* Refactor router creation
+
 0.0.1 (2023-09-22)
 ------------------
 * Initial version of data exchange specification implemented.

+ 18 - 2
README.md

@@ -11,23 +11,26 @@ This endpoint expects this context in the form of a json body following a highly
             "<name of artefact port going into the activity>": {
                 "type": "<inline|reference>",
                 "content": <the contents of the artefact going into this port in case it's inlined>,
+                "name": "<the file name>",
                 "encoding": "<the encoding of said artefact>"
             }
         }
     }
 
 Anything between `<` and `>` is to be filled in by the requester.
-Only `inline` type artefacts are supported at the moment.
+Only `inline` and `reference` type artefacts are supported at the moment.
+Which one you should use depends on the activity.
+A good rule to follow is: "If the filetype is text-like, use inline.".
 
 Barring any errors, mocka will retaliate with a json response in the same gist.
 
-
     {
         "ctrl": "<the name of the control port which should be taken out of the activity>",
         "output": {
             "<the name of the artefact which got generated>": {
                 "type": "<inline|reference>",
                 "content": <the contents of the generated artefact>,
+                "name": "<the file name>",
                 "encoding": "<the encoding of said artefact>"
             }
         }
@@ -47,7 +50,20 @@ Drop into a shell and sing the magic incantation `python3 -m mocka`.
 This will leave you haunted with an endpoint lingering on port `7999` by default.
 From this point onward you're on your own and can perform any request you want.
 
+### Extra dependencies
+
+There are multiple mock routers.
+The `notch` and `octiva` routers require external software to be installed.
+OpenCV in the case `octiva` and open-modelica in the case of `notch`.
+Installing the Python `opencv-python` package will the trick for the first one.
+Good luck on the modelica one.
+
 ## Wishful thinking
 
 It would be nice if this endpoint would support gradual progress updates.
 This would also require the Workflow Enactment Engine to do the same.
+
+The storage backend location is hardcoded. It might be useful not to do this.
+
+Not all the possible options are available when sending data.
+An example of this is an image. It should always be sent as a reference.

+ 3 - 0
data/configuration.toml

@@ -1,3 +1,6 @@
 [server]
 host = "0.0.0.0"
 port = 7999
+[octiva]
+storage_base_uri = "http://localhost:5000"
+#storage_base_uri = "https://dtb.rys.app"

+ 1 - 0
docs/templates/pyproject.toml

@@ -7,6 +7,7 @@ name = "mocka"
 authors = [
     {name = "Arkadiusz Michał Ryś", email = "Arkadiusz.Michal.Rys@gmail.com"},
     {name = "Lucas Albertins de Lima", email = "lucas.albertinsdelima@uantwerpen.be"},
+    {name = "Rakshit Mittal", email = "rakshit.mittal@uantwerpen.be"},
 ]
 readme = "README.md"
 requires-python = ">=3.9"

+ 1 - 1
mocka/__init__.py

@@ -1,3 +1,3 @@
 """Mock Activity Endpoint."""
-__version__ = "0.0.1"
+__version__ = "0.1.0"
 __version_info__ = tuple((int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".")))

+ 27 - 0
mocka/artefact.py

@@ -0,0 +1,27 @@
+from dataclasses import dataclass, fields, Field
+from typing import Tuple
+
+
+@dataclass
+class Artefact:
+    """"""
+    type: str # TODO Create enum with possibilities or maybe Subclasses (InlineArtefact, ...)
+    content: str
+    name: str
+    encoding: str
+
+    def as_dict(self):
+        """Returns the artefact in a dict format."""
+        cls_fields: Tuple[Field, ...] = fields(self.__class__)
+        return {field.name: getattr(self, field.name) for field in cls_fields}
+
+# def fill_artefact(data: dict) -> Artefact:
+#     """"""
+#     type = data.get("")
+#     content = data.get("")
+#     encoding = data.get("")
+#     if type and content and encoding:
+#         return Artefact(type, content, encoding)
+#     raise ValueError
+
+# TODO Add a 'set from file' method. It will fill out the attributes and to the base64 stuff if type is inline

+ 5 - 0
mocka/configuration.py

@@ -1,4 +1,5 @@
 from dataclasses import dataclass
+from typing import Optional
 
 
 @dataclass(init=True, repr=True, order=False, frozen=True)
@@ -6,7 +7,11 @@ class Server:
     host: str
     port: int
 
+@dataclass(init=True, repr=True, order=False, frozen=True)
+class Octiva:
+    storage_base_uri: str
 
 @dataclass(init=True, repr=True, order=False, frozen=True)
 class Configuration:
     server: Server
+    octiva: Optional[Octiva] = None

+ 8 - 3
mocka/endpoint.py

@@ -4,7 +4,7 @@ import time
 from typing import Any
 from fastapi import FastAPI, Request, Response
 from fastapi.middleware.cors import CORSMiddleware
-from mocka.router import MockRouter
+from mocka.routers import *
 
 arklog.set_config_logging()
 
@@ -19,8 +19,13 @@ class MockActivityEndpoint(FastAPI):
         self.configuration = configuration
         super().__init__(*args, title=title, description=description, version=version, **kwargs)
         logging.debug(self.description)
-        sparql_router = MockRouter(title=title, description=description, version=version, configuration=configuration)
-        self.include_router(sparql_router)
+
+        self.include_router(ExampleRouter(title=title, description=description, version=version, configuration=configuration))
+        self.include_router(OctivaRouter(title=title, description=description, version=version, configuration=configuration), prefix="/octiva")
+        self.include_router(TemplateRouter(title=title, description=description, version=version, configuration=configuration), prefix="/template")
+        self.include_router(NotchRouter(title=title, description=description, version=version, configuration=configuration), prefix="/notch")
+        self.include_router(FileRouter(title=title, description=description, version=version, configuration=configuration), prefix="/files")
+
         self.add_middleware(
             CORSMiddleware,
             allow_origins=["*"],

+ 13 - 0
mocka/routers/__init__.py

@@ -0,0 +1,13 @@
+from mocka.routers.octiva import OctivaRouter
+from mocka.routers.template import TemplateRouter
+from mocka.routers.example import ExampleRouter
+from mocka.routers.notch import NotchRouter
+from mocka.routers.file import FileRouter
+
+__all__ = [
+    "OctivaRouter",
+    "TemplateRouter",
+    "ExampleRouter",
+    "NotchRouter",
+    "FileRouter",
+]

+ 78 - 0
mocka/routers/example.py

@@ -0,0 +1,78 @@
+# https://fastapi.tiangolo.com/tutorial/body-multiple-params/#singular-values-in-body
+from json import JSONDecodeError
+from pathlib import Path
+from typing import Any
+import magic
+import arklog
+from fastapi import APIRouter, Query, Request, Response
+from fastapi.responses import JSONResponse
+import asyncio
+from io import StringIO
+import csv
+
+
+class ExampleRouter(APIRouter):
+    """"""
+
+    def __init__(self, *args: Any, title: str, description: str, version: str, configuration, **kwargs: Any):
+        self.title = title
+        self.description = description
+        self.version = version
+        self.configuration = configuration
+        super().__init__(*args, **kwargs)
+
+        @self.get("/")
+        async def root_mock(request: Request, query: str | None = Query(None)) -> Response:
+            """Example request response for a simulated activity."""
+            if query:
+                match query.lower():
+                    case "error":
+                        return JSONResponse(status_code=400, content={"ctrl": "error"})
+                    case "get":
+                        return JSONResponse(status_code=200, content={"ctrl": "ok"})
+            try:
+                body = await request.json()
+            except JSONDecodeError:
+                return JSONResponse(status_code=500, content={"ctrl": "error", "debug": "Could not parse json input."})
+            control = body.get("ctrl")
+            arklog.debug(control)
+            content = body.get("input").get("din").get("content")
+            f = StringIO(content)
+            reader = csv.reader(f, delimiter=',')
+            file_path = Path(__file__).parent.parent.parent / Path("data/mock_requirements.txt")
+            requirements = file_path.read_text() + "\n\nChecked!"
+            mime = magic.Magic(mime=True).from_file(file_path)
+            await asyncio.sleep(3)
+            if ((list(reader)[5][1]) == '0.0004'):
+                return JSONResponse(status_code=200, content={
+                    "ctrl": "ok",
+                    "output": {
+                        "dout": {
+                            "type": "inline",
+                            "content": requirements,
+                            "name": file_path.name,
+                            "encoding": mime
+                        }
+                    }
+                })
+            else:
+                return JSONResponse(status_code=200, content={
+                    "ctrl": "notOk",
+                    "output": {
+                        "dout": {
+                            "type": "inline",
+                            "content": requirements,
+                            "name": file_path.name,
+                            "encoding": mime
+                        }
+                    }
+                })
+
+
+        @self.put("/")
+        async def root_put_mock(request: Request, query: str | None = Query(None)) -> Response:
+            return await root_mock(request, query)
+
+        @self.post("/")
+        async def root_post_mock(request: Request, query: str | None = Query(None)) -> Response:
+            return await root_mock(request, query)

+ 26 - 0
mocka/routers/file.py

@@ -0,0 +1,26 @@
+from pathlib import Path
+from typing import Any
+import arklog
+from fastapi import APIRouter, Response
+from fastapi.responses import JSONResponse, StreamingResponse, FileResponse
+
+
+class FileRouter(APIRouter):
+    """"""
+
+    def __init__(self, *args: Any, title: str, description: str, version: str, configuration, **kwargs: Any):
+        self.title = title
+        self.description = description
+        self.version = version
+        self.configuration = configuration
+        super().__init__(*args, **kwargs)
+
+        @self.get("/{file_path:path}")
+        async def file_access(file_path: str) -> Response:
+            """"""
+            looking_for = Path(__file__).parent.parent.parent / Path("tests") / Path(file_path)
+            if looking_for.exists():
+                arklog.debug(f"Retuning mock file {looking_for.name}.")
+                return FileResponse(looking_for)
+            arklog.warning(f"File {looking_for.name} not found.")
+            return JSONResponse(status_code=404, content={"error": f"File {looking_for.name} not found."})

+ 141 - 0
mocka/routers/notch.py

@@ -0,0 +1,141 @@
+import json
+from json import JSONDecodeError
+from pathlib import Path
+from typing import Any
+import arklog
+import magic
+import requests
+from fastapi import APIRouter, Query, Request, Response
+from fastapi.responses import JSONResponse
+from OMPython import OMCSessionZMQ, ModelicaSystem
+from datetime import datetime
+import struct
+import pyarrow
+from pyarrow import parquet
+
+from mocka.artefact import Artefact
+
+
+def single_simulation_om_python(model_file_path, test_frequency, test_amplitude, sampling_ratio, tolerance, data_cycles, output_stabilisation_time, output):
+    arklog.debug(f"Performing simulation")
+    arklog.debug(f"\t{test_frequency=}")
+    arklog.debug(f"\t{test_amplitude=}")
+    arklog.debug(f"\t{sampling_ratio=}")
+    sampling_frequency = test_frequency * sampling_ratio
+    omc = OMCSessionZMQ()
+    model = ModelicaSystem(str(model_file_path),model_file_path.stem)
+    # model.buildModel("(V_in.v)|(V_out.v)")
+    model.buildModel()
+    model.setSimulationOptions([
+        f"stepSize={1/sampling_frequency}",
+        f"tolerance={tolerance}",
+        f"startTime={output_stabilisation_time}",
+        f"stopTime={output_stabilisation_time + (data_cycles / test_frequency)}",
+    ])
+    model.setParameters([
+        f"Vt={test_amplitude}",
+        f"Ft={test_frequency}",
+    ])
+    model.simulate()
+    samples = model.getSolutions(output)
+    # for fileName in os.listdir():
+    #     if re.search("notchFilter.*", fileName) and fileName != "notchFilter.mo":
+    #         os.remove(fileName)
+    return save_data(samples, test_frequency, test_amplitude, sampling_ratio, model_file_path)
+
+
+def save_data(samples, test_frequency, test_amplitude, sampling_ratio, file_path):
+    time = datetime.timestamp(datetime.now())
+    common_column_metadata = {
+        "test_frequency": struct.pack("d", test_frequency),
+        "test_amplitude": struct.pack("d", test_amplitude),
+        "sampling_ratio": struct.pack("d", sampling_ratio),
+        "time": struct.pack("d", time),
+        "type": b"input"
+    }
+    input_field = pyarrow.field(f"i_{time}", pyarrow.float64(), False, metadata=common_column_metadata)
+    common_column_metadata["type"]=b"output"
+    output_field = pyarrow.field(f"o_{time}", pyarrow.float64(), False, metadata=common_column_metadata)
+    common_column_metadata["type"]=b"time"
+    time_field = pyarrow.field(f"ft_{time}",pyarrow.float64(), False, metadata=common_column_metadata)
+    # TODO In FTGPM terms we don't want to append to existing experiments
+    storage_file_path = Path(__file__).parent.parent.parent / Path("data") / Path(f"{file_path.stem}_results.parquet")
+    if storage_file_path.exists():
+        experiment_data = parquet.read_table(str(storage_file_path))
+        experiment_data = experiment_data.append_column(time_field, [samples[0]]).append_column(output_field, [samples[1]]).append_column(input_field, [samples[2]])
+    else:
+        experiment_data = pyarrow.Table.from_arrays(samples, schema=pyarrow.schema([time_field, input_field, output_field]))
+    parquet.write_table(experiment_data,str(storage_file_path))
+    return storage_file_path
+
+
+class NotchRouter(APIRouter):
+    """"""
+
+    def __init__(self, *args: Any, title: str, description: str, version: str, configuration, **kwargs: Any):
+        self.title = title
+        self.description = description
+        self.version = version
+        self.configuration = configuration
+        super().__init__(*args, **kwargs)
+
+        @self.get("/simulation/")
+        async def simulation_notch(request: Request, query: str | None = Query(None)) -> Response:
+            """Notch simulation request response for a simulated activity."""
+            try:
+                body = await request.json()
+            except JSONDecodeError:
+                return JSONResponse(status_code=500, content={"ctrl": "error", "debug": "Could not parse json input."})
+            control = body.get("ctrl")
+            # The predefined activity expects the control to be on the cin or rep port.
+            if control not in ["cin"]:
+                return JSONResponse(status_code=400, content={"ctrl": "error", "debug": "This activity expects the control flow input on port 'cin'."})
+            body_artefacts = body.get("input", {})
+            model = body_artefacts.get("model")
+            parameters = body_artefacts.get("parameters")
+            if not model or not parameters:
+                return JSONResponse(status_code=400, content={"ctrl": "error", "debug": "Input artefacts are missing."})
+
+            model_file_path = Path(__file__).parent.parent.parent / Path("data") / Path(model.get("name"))
+            model_file_path.write_text(model.get("content"))
+            parsed_parameters = json.loads(parameters.get("content"))
+            frequency = parsed_parameters.get("frequency")
+            amplitude = parsed_parameters.get("amplitude")
+            sampling_ratio = parsed_parameters.get("sampling_ratio")
+            tolerance = parsed_parameters.get("tolerance")
+            data_cycles = parsed_parameters.get("data_cycles")
+            output_stabilisation_time = parsed_parameters.get("output_stabilisation_time")
+            output = parsed_parameters.get("output")
+            arklog.debug(f"{frequency=}")
+            arklog.debug(f"{amplitude=}")
+            arklog.debug(f"{sampling_ratio=}")
+            arklog.debug(f"{tolerance=}")
+            arklog.debug(f"{data_cycles=}")
+            arklog.debug(f"{output_stabilisation_time=}")
+            arklog.debug(f"{output=}")
+            artefact_file_path = single_simulation_om_python(model_file_path, frequency, amplitude, sampling_ratio, tolerance,data_cycles,output_stabilisation_time, output)
+            requests.put(f"http://localhost:5000/files/file/{artefact_file_path.name}", data=artefact_file_path.read_bytes())
+            mime = magic.Magic(mime=True).from_file(artefact_file_path)
+            simulation_artefact = Artefact("reference", f"http://localhost:5000/files/file/{artefact_file_path.name}", artefact_file_path.name, mime)
+            return JSONResponse(status_code=200, content={"ctrl": "ok", "output": {"experiment": simulation_artefact.as_dict()}})
+
+        @self.put("/simulation/")
+        async def simulation_put_notch(request: Request, query: str | None = Query(None)) -> Response:
+            return await simulation_notch(request, query)
+
+        @self.post("/simulation/")
+        async def simulation_post_notch(request: Request, query: str | None = Query(None)) -> Response:
+            return await simulation_notch(request, query)
+
+        @self.get("/validation/")
+        async def validation_notch(request: Request, query: str | None = Query(None)) -> Response:
+            """Notch simulation request response for a simulated activity."""
+            raise NotImplementedError
+
+        @self.put("/validation/")
+        async def validation_put_notch(request: Request, query: str | None = Query(None)) -> Response:
+            return await validation_notch(request, query)
+
+        @self.post("/validation/")
+        async def validation_post_notch(request: Request, query: str | None = Query(None)) -> Response:
+            return await validation_notch(request, query)

+ 117 - 0
mocka/routers/octiva.py

@@ -0,0 +1,117 @@
+from json import JSONDecodeError
+
+import cv2 as cv
+import numpy as np
+from pathlib import Path
+from typing import Any
+import arklog
+from fastapi import APIRouter, Query, Request, Response
+from fastapi.responses import JSONResponse
+import requests
+from mocka.artefact import Artefact
+
+
+def find_parallel(lines) -> []:
+    """Find parallel lines and return their index."""
+    parallel_lines = []
+    # TODO Can probably be done using cross-product/zip
+    for i in range(len(lines)):
+        for j in range(len(lines)):
+            if i == j:
+                # Every line is parallel to itself. We don't want all lines...
+                continue
+            if abs(lines[i][0][1] - lines[j][0][1]) == 0:
+                parallel_lines.append(i)
+    return parallel_lines
+
+
+def rail_finder_algo_one(filename: Path, storage_base_uri: str) -> (Artefact, Artefact):
+    """Create image with the lines laid over."""
+    source_color_image = cv.imread(cv.samples.findFile(str(filename)), cv.IMREAD_COLOR)
+    source_greyscale_image = cv.cvtColor(source_color_image, cv.COLOR_BGR2GRAY)
+    edge_detected_image = cv.Canny(source_greyscale_image, 50, 200, None, 3)
+    detected_lines = cv.HoughLinesP(edge_detected_image, 1, np.pi / 180, 150, None, 250, 0)
+    parallel_line_indices = find_parallel(detected_lines)
+    # Color found lines red
+    for detected_line in detected_lines:
+        x1, y1, x2, y2 = detected_line[0]
+        cv.line(source_color_image, (x1, y1), (x2, y2), (0, 0, 255), 6, cv.LINE_AA)
+    # Color parallel lines green
+    for parallel_line_index in parallel_line_indices:
+        x1, y1, x2, y2 = detected_lines[parallel_line_index][0]
+        cv.line(source_color_image, (x1, y1), (x2, y2), (0, 255, 0), 6, cv.LINE_AA)
+    # TODO Replace with in memory version
+    temp_image_loc = Path(__file__).parent.parent.parent / Path("data") / Path("rail_image_annotated.jpg")
+    cv.imwrite(str(temp_image_loc), source_color_image)
+    r = requests.put(f"{storage_base_uri}/files/file/{temp_image_loc.name}", data=temp_image_loc.read_bytes())
+    return (
+        Artefact("reference", f"{storage_base_uri}/files/file/{temp_image_loc.name}",f"{filename.stem}_annotated{filename.suffix}" , "image/jpg"),
+        Artefact("inline", f"D: {detected_lines}\nP: {parallel_line_indices}", f"{filename.stem}_annotated.txt", "text/plain")
+    )
+
+
+class OctivaRouter(APIRouter):
+    """"""
+
+    def __init__(self, *args: Any, title: str, description: str, version: str, configuration, **kwargs: Any):
+        self.title = title
+        self.description = description
+        self.version = version
+        self.configuration = configuration
+        super().__init__(*args, **kwargs)
+        arklog.warning("DO NOT DEPLOY IN PRODUCTION")
+        arklog.warning("ANY SECURE ENV IS COMPROMISED")
+        arklog.warning("WE USE EVAL TO RUN ARBITRARY USER PROVIDED STRINGS")
+
+        @self.get("/")
+        async def root_mock(request: Request, query: str | None = Query(None)) -> Response:
+            """"""
+            # TODO Change some of this to use buffers instead of files
+            try:
+                body = await request.json()
+            except JSONDecodeError:
+                return JSONResponse(status_code=500, content={"ctrl": "error", "debug": "Could not parse json input."})
+            control = body.get("ctrl")
+            # The predefined activity expects the control to be on the cin or rep port.
+            if control not in ["cin", "rep"]:
+                return JSONResponse(status_code=400, content={"ctrl": "error", "debug": "This activity expects the control flow input on port 'cin' or 'rep'."})
+            body_artefacts = body.get("input", {})
+            rail_image_data = body_artefacts.get("image")
+            algorithm_data = body_artefacts.get("algorithm")
+            if not rail_image_data or not algorithm_data:
+                return JSONResponse(status_code=400, content={"ctrl": "error", "debug": "Input artefacts are missing."})
+            if rail_image_data.get("type") not in ["reference"]:
+                return JSONResponse(status_code=400, content={"ctrl": "error", "debug": "Only reference image file types are implemented at the moment."})
+            # TODO Do this in memory
+            try:
+                image_response = requests.get(rail_image_data.get("content"), allow_redirects=True)
+            except ConnectionError as e:
+                return JSONResponse(status_code=400, content={"ctrl": "error", "debug": f"Could not connect to the file storage backend. {e}"})
+            if image_response.status_code != 200:
+                arklog.warning(f"{image_response.status_code=} File was likely not found on the storage backend.")
+                return JSONResponse(status_code=500, content={"ctrl": "error", "debug": f"File not found on storage backend."})
+            temp_image_loc = Path(__file__).parent.parent.parent / Path("data") / Path("rail_image.jpg")
+            temp_image_loc.write_bytes(image_response.content)
+            storage_base_uri = self.configuration.octiva.storage_base_uri
+            try:
+                annotated_image_artefact, data_artefact = rail_finder_algo_one(temp_image_loc, storage_base_uri)
+            except ConnectionError as e:
+                arklog.exception(e)
+                return JSONResponse(status_code=500, content={"ctrl": "error", "debug": f"Could not connect to endpoint ('{storage_base_uri}'). {e}"})
+
+            response = {
+                "ctrl": "ok",
+                "output": {
+                    "image": annotated_image_artefact.as_dict(),
+                    "data": data_artefact.as_dict()
+                }
+            }
+            return JSONResponse(status_code=200, content=response)
+
+        @self.put("/")
+        async def root_put_mock(request: Request, query: str | None = Query(None)) -> Response:
+            return await root_mock(request, query)
+
+        @self.post("/")
+        async def root_post_mock(request: Request, query: str | None = Query(None)) -> Response:
+            return await root_mock(request, query)

+ 19 - 17
mocka/router.py

@@ -1,14 +1,15 @@
-# https://fastapi.tiangolo.com/tutorial/body-multiple-params/#singular-values-in-body
+from json import JSONDecodeError
 from pathlib import Path
 from typing import Any
 import magic
 import arklog
 from fastapi import APIRouter, Query, Request, Response
 from fastapi.responses import JSONResponse
-import time
 
+from mocka.artefact import Artefact
 
-class MockRouter(APIRouter):
+
+class TemplateRouter(APIRouter):
     """"""
 
     def __init__(self, *args: Any, title: str, description: str, version: str, configuration, **kwargs: Any):
@@ -20,32 +21,33 @@ class MockRouter(APIRouter):
 
         @self.get("/")
         async def root_mock(request: Request, query: str | None = Query(None)) -> Response:
-            """Example request response for a simulated activity."""
+            """Template request response for a simulated activity."""
             if query:
                 match query.lower():
                     case "error":
                         return JSONResponse(status_code=400, content={"ctrl": "error"})
                     case "get":
                         return JSONResponse(status_code=200, content={"ctrl": "ok"})
-
-            body = await request.json()
+            try:
+                body = await request.json()
+            except JSONDecodeError:
+                return JSONResponse(status_code=500, content={"ctrl": "error", "debug": "Could not parse json input."})
+            # Check if we were provided all the data we need to abort early
             control = body.get("ctrl")
             arklog.debug(control)
             content = body.get("input").get("din").get("content")
-            file_path = Path(__file__).parent.parent / Path("data/mock_requirements.txt")
-            requirements = file_path.read_text() + "\n\nChecked!"
-            mime = magic.Magic(mime=True).from_file(file_path)
-            #time.sleep(5)
-            assert content + "\n\nChecked!" == requirements
+            name = body.get("input").get("din").get("name")
+            mime_type = body.get("input").get("din").get("encoding")
+
+            # The example uses a mock requirements file.
+            # This activity checks a local file and appends checked to it to send to the requester.
+            requirements = content + "\n\nChecked!"
+            return_artefact = Artefact("inline", requirements, name, mime_type)
+
             return JSONResponse(status_code=200, content={
                 "ctrl": "ok",
                 "output": {
-                    "dout": {
-                        "type": "inline",
-                        "content": requirements,
-                        "name": file_path.name,
-                        "encoding": mime
-                    }
+                    "dout": return_artefact.as_dict()
                 }
             })
 

+ 2 - 0
pyproject.toml

@@ -7,6 +7,7 @@ name = "mocka"
 authors = [
     {name = "Arkadiusz Michał Ryś", email = "Arkadiusz.Michal.Rys@gmail.com"},
     {name = "Lucas Albertins de Lima", email = "lucas.albertinsdelima@uantwerpen.be"},
+    {name = "Rakshit Mittal", email = "rakshit.mittal@uantwerpen.be"},
 ]
 readme = "README.md"
 requires-python = ">=3.9"
@@ -45,6 +46,7 @@ dev = [
     "pip~=23.2.1",
     "flit~=3.9.0",
     "twine~=4.0.2",
+    "vermin~=1.5.1",
     "numpy~=1.26.0",
     "invoke~=2.2.0",
     "jinja2~=3.1.2",

+ 4 - 0
requirements.txt

@@ -7,7 +7,10 @@ pyarrow           ~= 13.0.0
 requests          ~= 2.31.0
 starlette         ~= 0.27.0
 python-magic      ~= 0.4.27
+opencv-python     ~= 4.8.0.76 # Octiva
 uvicorn[standard] ~= 0.23.2
+#pydelica ~= 0.4.5 # VaFL Notch
+ompython ~= 3.4.0 # VaFL Notch
 # Test
 httpx  ~= 0.25.0
 pytest ~= 7.4.0
@@ -18,6 +21,7 @@ tox      ~= 4.11.3
 pip      ~= 23.2.1
 flit     ~= 3.9.0
 twine    ~= 4.0.2
+vermin      ~= 1.5.1
 numpy    ~= 1.26.0
 invoke   ~= 2.2.0
 jinja2   ~= 3.1.2

+ 10 - 5
tasks.py

@@ -4,6 +4,9 @@ from invoke import task
 from jinja2 import Template
 
 system = "mocka"  # Directory name of the project
+main_branch = "main" # The release branch on origin
+dev_branch = "dev" # The main development branch on origin
+
 
 @task
 def lint(c):
@@ -44,8 +47,10 @@ def test(c):
 @task
 def coverage(c):
     """Run coverage from the 'tests' directory."""
-    c.run("coverage run --source . -m unittest discover tests 'test_*' -v")
-    c.run("coverage html")
+    c.run("python3 -m coverage erase")
+    c.run("python3 -m coverage run --source . -m unittest discover tests 'test_*' -v")
+    c.run("python3 -m coverage report -m")
+    c.run("python3 -m coverage html")
 
 
 @task
@@ -96,18 +101,18 @@ def release(c, version):
         _minor = 0
         _patch = 0
 
-    c.run(f"git checkout -b release-{_major}.{_minor}.{_patch} dev")
+    c.run(f"git checkout -b release-{_major}.{_minor}.{_patch} {dev_branch}")
     c.run(f"sed -i 's/{__version__}/{_major}.{_minor}.{_patch}/g' {system}/__init__.py")
     print(f"Update the readme for version {_major}.{_minor}.{_patch}.")
     input("Press enter when ready.")
     c.run(f"git add -u")
     c.run(f'git commit -m "Update changelog version {_major}.{_minor}.{_patch}"')
     c.run(f"git push --set-upstream origin release-{_major}.{_minor}.{_patch}")
-    c.run(f"git checkout main")
+    c.run(f"git checkout {main_branch}")
     c.run(f"git merge --no-ff release-{_major}.{_minor}.{_patch}")
     c.run(f'git tag -a {_major}.{_minor}.{_patch} -m "Release {_major}.{_minor}.{_patch}"')
     c.run(f"git push")
-    c.run(f"git checkout dev")
+    c.run(f"git checkout {dev_branch}")
     c.run(f"git merge --no-ff release-{_major}.{_minor}.{_patch}")
     c.run(f"git push")
     c.run(f"git branch -d release-{_major}.{_minor}.{_patch}")

+ 61 - 0
tests/notch/notchFilter.mo

@@ -0,0 +1,61 @@
+model notchFilter
+// Parameters
+  parameter Modelica.Units.SI.PotentialDifference Vt "testAmplitude";
+  parameter Modelica.Units.SI.Frequency Ft "testFrequency";
+// Components
+  Modelica.Electrical.Analog.Basic.Ground ground_in annotation(
+    Placement(visible = true, transformation(origin = {-80, -30}, extent = {{-10, -10}, {10, 10}}, rotation = 0)));
+  Modelica.Electrical.Analog.Basic.Ground ground annotation(
+    Placement(transformation(origin = {28, -18}, extent = {{-10, -10}, {10, 10}})));
+  Modelica.Electrical.Analog.Basic.Ground ground_out annotation(
+    Placement(visible = true, transformation(origin = {80, -30}, extent = {{-10, -10}, {10, 10}}, rotation = 0)));
+  Modelica.Electrical.Analog.Basic.Capacitor C1(C(displayUnit = "nF") = 1e-9, v(start = 0)) annotation(
+    Placement(visible = true, transformation(origin = {-20, -40}, extent = {{-10, -10}, {10, 10}}, rotation = 0)));
+  Modelica.Electrical.Analog.Basic.Capacitor C2(C(displayUnit = "pF") = 1e-9, v(start = 0)) annotation(
+    Placement(visible = true, transformation(origin = {20, -40}, extent = {{-10, -10}, {10, 10}}, rotation = 0)));
+  Modelica.Electrical.Analog.Basic.Capacitor C3(C(displayUnit = "pF") = 2e-9, v(start = 0)) annotation(
+    Placement(visible = true, transformation(origin = {0, 20}, extent = {{-10, -10}, {10, 10}}, rotation = -90)));
+  Modelica.Electrical.Analog.Basic.Resistor R1(R(displayUnit = "kOhm") = 1600) annotation(
+    Placement(visible = true, transformation(origin = {-20, 40}, extent = {{-10, -10}, {10, 10}}, rotation = 0)));
+  Modelica.Electrical.Analog.Basic.Resistor R2(R(displayUnit = "kOhm") = 1600) annotation(
+    Placement(visible = true, transformation(origin = {20, 40}, extent = {{-10, -10}, {10, 10}}, rotation = 0)));
+  Modelica.Electrical.Analog.Basic.Resistor R3a(R(displayUnit = "kOhm") = 1600) annotation(
+    Placement(transformation(origin = {12, -18}, extent = {{-10, -10}, {10, 10}}, rotation = -90)));
+  Modelica.Electrical.Analog.Basic.Resistor R3b(R(displayUnit = "kOhm") = 1600) annotation(
+    Placement(transformation(origin = {-12, -18}, extent = {{-10, -10}, {10, 10}}, rotation = -90)));
+  Modelica.Electrical.Analog.Sensors.VoltageSensor V_out annotation(
+    Placement(visible = true, transformation(origin = {60, 0}, extent = {{-10, -10}, {10, 10}}, rotation = 0)));
+  Modelica.Electrical.Analog.Sources.SineVoltage V_in(V(displayUnit = "V") = Vt, f(displayUnit = "Hz") = Ft) annotation(
+    Placement(visible = true, transformation(origin = {-60, 0}, extent = {{10, -10}, {-10, 10}}, rotation = 0)));
+// Equations
+  equation
+  connect(C3.n, R3a.p) annotation(
+    Line(points = {{0, 10}, {0, -1}, {12, -1}, {12, -8}}, color = {0, 0, 255}));
+  connect(V_out.p, C2.n) annotation(
+    Line(points = {{50, 0}, {40, 0}, {40, -40}, {30, -40}}, color = {0, 0, 255}));
+  connect(R1.n, R2.p) annotation(
+    Line(points = {{-10, 40}, {10, 40}}, color = {0, 0, 255}));
+  connect(C3.p, R2.p) annotation(
+    Line(points = {{0, 30}, {0, 40}, {10, 40}}, color = {0, 0, 255}));
+  connect(V_out.n, ground_out.p) annotation(
+    Line(points = {{70, 0}, {80, 0}, {80, -20}}, color = {0, 0, 255}));
+  connect(V_in.p, C1.p) annotation(
+    Line(points = {{-50, 0}, {-40, 0}, {-40, -40}, {-30, -40}}, color = {0, 0, 255}));
+  connect(R2.n, C2.n) annotation(
+    Line(points = {{30, 40}, {40, 40}, {40, -40}, {30, -40}}, color = {0, 0, 255}));
+  connect(V_in.n, ground_in.p) annotation(
+    Line(points = {{-70, 0}, {-80, 0}, {-80, -20}}, color = {0, 0, 255}));
+  connect(R1.p, C1.p) annotation(
+    Line(points = {{-30, 40}, {-40, 40}, {-40, -40}, {-30, -40}}, color = {0, 0, 255}));
+  connect(R3b.p, C3.n) annotation(
+    Line(points = {{-12, -8}, {-12, -1}, {0, -1}, {0, 10}}, color = {0, 0, 255}));
+  connect(C1.n, C2.p) annotation(
+    Line(points = {{-10, -40}, {10, -40}}, color = {0, 0, 255}));
+  connect(R3a.n, C2.p) annotation(
+    Line(points = {{12, -28}, {12, -32}, {0, -32}, {0, -40}, {10, -40}}, color = {0, 0, 255}));
+  connect(R3b.n, C1.n) annotation(
+    Line(points = {{-12, -28}, {-12, -32}, {0, -32}, {0, -40}, {-10, -40}}, color = {0, 0, 255}));
+  connect(ground.p, C3.n) annotation(
+    Line(points = {{28, -8}, {28, -1}, {0, -1}, {0, 10}}, color = {0, 0, 255}));
+  annotation(uses(Modelica(version = "4.0.0")), Diagram);
+end notchFilter;

BIN
tests/octiva/rails.psd


BIN
tests/octiva/rails_0.jpg


BIN
tests/octiva/rails_1.jpg


data/mock_requirements.txt → tests/template/mock_requirements.txt


+ 14 - 0
tests/template/test_file.py

@@ -0,0 +1,14 @@
+from pathlib import Path
+from fastapi.testclient import TestClient
+from mocka.main import get_application
+from mocka.configuration import Configuration, Server
+
+client = TestClient(get_application(Configuration(Server("localhost", 8585))))
+
+
+def test_post_octiva():
+    file_path = Path("octiva/rails_0.jpg")
+    response = client.get(f"/files/{file_path}")
+    assert response.status_code == 200
+    # temp_image_loc = Path(__file__).parent / Path("rail_image_test.jpg")
+    # temp_image_loc.write_bytes(response.content)

+ 6 - 0
tests/test_artefact.py

@@ -0,0 +1,6 @@
+from mocka.artefact import Artefact
+
+
+def test_artefact():
+    artefact = Artefact("inline", "Don't be evil!", "motto.txt", "text/plain")
+    assert artefact.as_dict() == {'type': 'inline', 'content': "Don't be evil!", 'name': 'motto.txt', 'encoding': 'text/plain'}

+ 49 - 0
tests/test_notch.py

@@ -0,0 +1,49 @@
+import json
+from pathlib import Path
+
+import magic
+from fastapi.testclient import TestClient
+from mocka.main import get_application
+from mocka.configuration import Configuration, Server
+from mocka.artefact import Artefact
+
+client = TestClient(get_application(Configuration(Server("localhost", 8585))))
+
+
+def test_post_notch():
+    notch_model_file_path = Path(__file__).parent / Path("notch/notchFilter.mo")
+    mime = magic.Magic(mime=True).from_file(notch_model_file_path)
+    model_artefact = Artefact("inline", notch_model_file_path.read_text(), notch_model_file_path.name, mime)
+
+    parameters = {
+        "frequency": 500,
+        "amplitude": 1,
+        "sampling_ratio": 100,
+        "tolerance": 1e-9,
+        "data_cycles": 2,
+        "output_stabilisation_time": 1.0,
+        "output": ["time", "V_out.v","V_in.v"],
+    }
+    parameters_json = json.dumps(parameters, indent=4)
+    parameters_artefact = Artefact("inline", parameters_json, "parameters.json", "application/json")
+    mock_input = {
+        "ctrl": "cin",
+        "input": {
+            "model": model_artefact.as_dict(),
+            "parameters": parameters_artefact.as_dict(),
+        }
+    }
+    response = client.post("/notch/simulation/", json=mock_input)
+    assert response.status_code == 200
+    # orig = base64.b64decode(rail_image_data.get("content"))
+    # with temp_image_loc.open("wb") as f_output:
+    #     f_output.write(orig)
+    # annotated_image_artefact, data_artefact = rail_finder_algo_one(str(temp_image_loc))
+
+    # data_artefact = Artefact("inline", "", "", "text/plain")
+    # assert response.json() ==  {
+    #     "ctrl": "ok",
+    #     "output": {
+    #         "data": data_artefact.as_dict(),
+    #     }
+    # }

+ 46 - 0
tests/test_octiva.py

@@ -0,0 +1,46 @@
+import arklog
+from pathlib import Path
+
+import requests
+from fastapi.testclient import TestClient
+from mocka.main import get_application
+from mocka.configuration import Configuration, Server
+from mocka.artefact import Artefact
+
+client = TestClient(get_application(Configuration(Server("localhost", 8585))))
+
+
+def test_post_octiva():
+    # NOTE Needs the backend to be running to work... :(
+    image_file_path = Path("octiva/rails_0.jpg")
+    image_file_uri = f"http://localhost:5000/files/file/{image_file_path.name}"
+
+    # Send the file to the endpoint as it might not be there yet.
+    full_image_file_path = Path(__file__).parent / Path("octiva/rails_0.jpg")
+    r = requests.put(f"http://localhost:5000/files/file/{image_file_path.name}", data=full_image_file_path.read_bytes())
+    arklog.debug(r.status_code)
+
+    image_artefact = Artefact("reference", image_file_uri, image_file_path.name, "image/jpg")
+    algo_artefact = Artefact("inline", f"print('hello')", "algorithm.py", "text/plain")
+    mock_input = {
+        "ctrl": "cin",
+        "input": {
+            "image": image_artefact.as_dict(),
+            "algorithm": algo_artefact.as_dict(),
+        }
+    }
+    try:
+        response = client.post("/octiva/", json=mock_input)
+    except ConnectionError as e:
+        raise RuntimeError(f"{e} Please run the DTD backend when doing these tests.")
+    assert response.status_code == 200
+    annotated_image_artefact = Artefact("reference", f"http://localhost:5000/files/file/rail_image_annotated.jpg", "rail_image_annotated.jpg", "image/jpg")
+    expected_data_content =  "D: [[[108 847 108   0]]\n\n [[477 847 477   0]]]\nP: [0, 1]"
+    data_artefact = Artefact("inline", expected_data_content, "rail_image_annotated.txt", "text/plain")
+    assert response.json() ==  {
+        "ctrl": "ok",
+        "output": {
+            "image": annotated_image_artefact.as_dict(),
+            "data": data_artefact.as_dict(),
+        }
+    }

+ 13 - 17
tests/test_mock.py

@@ -1,10 +1,12 @@
 from pathlib import Path
 
+import magic
 from fastapi.testclient import TestClient
+
+from mocka.artefact import Artefact
 from mocka.main import get_application
 from mocka.configuration import Configuration, Server
 
-
 client = TestClient(get_application(Configuration(Server("localhost", 8585))))
 
 
@@ -19,31 +21,25 @@ client = TestClient(get_application(Configuration(Server("localhost", 8585))))
 #                 }
 #             }
 
-def test_post_main():
-    file_path = Path(__file__).parent.parent / Path("data/mock_requirements.txt")
+def test_post_template():
+    file_path = Path(__file__).parent / Path("template/mock_requirements.txt")
     requirements = file_path.read_text()
+    mime = magic.Magic(mime=True).from_file(file_path)
+    input_artefact = Artefact("inline", requirements, file_path.name, mime)
+
     mock_input = {
         "ctrl": "cin",
         "input": {
-            "din": {
-                "type": "inline",
-                "content": requirements,
-                "encoding": "text/plain"
-            }
+            "din": input_artefact.as_dict()
         }
     }
-    response = client.post("/", json=mock_input)
+    response = client.post("/template/", json=mock_input)
     assert response.status_code == 200
+
+    expected_artefact = Artefact("inline", requirements + "\n\nChecked!", file_path.name, mime)
     assert response.json() == {
         "ctrl": "ok",
         "output": {
-            "dout": {
-                "type": "inline",
-                "content": requirements + "\n\nChecked!",
-                "name": file_path.name,
-                "encoding": "text/plain"
-            }
+            "dout": expected_artefact.as_dict()
         }
     }
-
-