diff --git a/docs/notebooks/multi_obj_with_constraints.ipynb b/docs/notebooks/multi_obj_with_constraints.ipynb
index 13207d3..fdfb0fd 100644
--- a/docs/notebooks/multi_obj_with_constraints.ipynb
+++ b/docs/notebooks/multi_obj_with_constraints.ipynb
@@ -14,7 +14,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 1,
    "id": "4c8da843-670c-4f07-bd66-471ec19d3601",
    "metadata": {
     "tags": []
@@ -38,7 +38,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 2,
    "id": "311c88fa-f757-44f3-8ae5-555f715fc1b4",
    "metadata": {},
    "outputs": [],
@@ -77,10 +77,31 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 4,
    "id": "c0dc70de-f14f-42a4-9202-0e4777d33bec",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Set parameter WLSAccessID\n",
+      "Set parameter WLSSecret\n",
+      "Set parameter LicenseID to value 2512524\n",
+      "Academic license 2512524 - for non-commercial use only - registered to t.___@imperial.ac.uk\n"
+     ]
+    },
+    {
+     "data": {
+      "text/plain": [
+       "<gurobi.Constr *Awaiting Model Update*>"
+      ]
+     },
+     "execution_count": 4,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
    "source": [
     "# get optimization model\n",
     "model_gur = problem_config.get_gurobi_model_core()\n",
@@ -103,7 +124,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 5,
    "id": "61a8e860-92ee-4535-ad73-9b2acf132ad8",
    "metadata": {},
    "outputs": [],
@@ -121,7 +142,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 6,
    "id": "15bc6c5d-011f-4c57-8af0-4da9704af41e",
    "metadata": {},
    "outputs": [],
@@ -133,20 +154,120 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 7,
    "id": "e987e159-d855-4476-b7f8-3a041ab45d5f",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Set parameter MIPGap to value 0\n",
+      "Set parameter NonConvex to value 2\n",
+      "Gurobi Optimizer version 10.0.3 build v10.0.3rc0 (win64)\n",
+      "\n",
+      "CPU model: 11th Gen Intel(R) Core(TM) i7-1165G7 @ 2.80GHz, instruction set [SSE2|AVX|AVX2|AVX512]\n",
+      "Thread count: 4 physical cores, 8 logical processors, using up to 8 threads\n",
+      "\n",
+      "Academic license 2512524 - for non-commercial use only - registered to t.___@imperial.ac.uk\n",
+      "Optimize a model with 3172 rows, 1828 columns and 11438 nonzeros\n",
+      "Model fingerprint: 0xa6d043a3\n",
+      "Model has 100 SOS constraints\n",
+      "Variable types: 1803 continuous, 25 integer (24 binary)\n",
+      "Coefficient statistics:\n",
+      "  Matrix range     [5e-08, 2e+04]\n",
+      "  Objective range  [1e+00, 2e+00]\n",
+      "  Bounds range     [1e+00, 6e+00]\n",
+      "  RHS range        [2e-04, 8e+03]\n",
+      "Presolve removed 367 rows and 248 columns\n",
+      "Presolve time: 0.12s\n",
+      "Presolved: 2805 rows, 1580 columns, 9988 nonzeros\n",
+      "Presolved model has 94 SOS constraint(s)\n",
+      "Variable types: 1557 continuous, 23 integer (23 binary)\n",
+      "\n",
+      "Root relaxation: unbounded, 647 iterations, 0.04 seconds (0.02 work units)\n",
+      "\n",
+      "    Nodes    |    Current Node    |     Objective Bounds      |     Work\n",
+      " Expl Unexpl |  Obj  Depth IntInf | Incumbent    BestBd   Gap | It/Node Time\n",
+      "\n",
+      "     0     0  postponed    0               -          -      -     -    0s\n",
+      "     0     0  postponed    0               -          -      -     -    0s\n",
+      "     0     2  postponed    0               -          -      -     -    0s\n",
+      "H   32    25                      -0.2625355          -      -   421    1s\n",
+      "H   34    25                      -0.3766411          -      -   397    1s\n",
+      "\n",
+      "Explored 145 nodes (22173 simplex iterations) in 1.69 seconds (1.08 work units)\n",
+      "Thread count was 8 (of 8 available processors)\n",
+      "\n",
+      "Solution count 2: -0.376641 -0.262536 \n",
+      "\n",
+      "Optimal solution found (tolerance 0.00e+00)\n",
+      "Best objective -3.766411305508e-01, best bound -3.766411305508e-01, gap 0.0000%\n"
+     ]
+    }
+   ],
    "source": [
     "res_gur = opt_gur.solve(enting, model_core=model_gur)"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 8,
    "id": "4ae2f026-dded-4376-866b-6ca34adc5bf9",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Set parameter MIPGap to value 0\n",
+      "Set parameter NonConvex to value 2\n",
+      "Gurobi Optimizer version 10.0.3 build v10.0.3rc0 (win64)\n",
+      "\n",
+      "CPU model: 11th Gen Intel(R) Core(TM) i7-1165G7 @ 2.80GHz, instruction set [SSE2|AVX|AVX2|AVX512]\n",
+      "Thread count: 4 physical cores, 8 logical processors, using up to 8 threads\n",
+      "\n",
+      "Academic license 2512524 - for non-commercial use only - registered to t.___@imperial.ac.uk\n",
+      "Optimize a model with 3172 rows, 1828 columns and 11438 nonzeros\n",
+      "Model fingerprint: 0x7f777cf7\n",
+      "Model has 100 SOS constraints\n",
+      "Variable types: 1803 continuous, 25 integer (24 binary)\n",
+      "Coefficient statistics:\n",
+      "  Matrix range     [5e-08, 2e+04]\n",
+      "  Objective range  [1e+00, 2e+00]\n",
+      "  Bounds range     [1e+00, 6e+00]\n",
+      "  RHS range        [2e-04, 8e+03]\n",
+      "Presolve removed 367 rows and 248 columns\n",
+      "Presolve time: 0.07s\n",
+      "Presolved: 2805 rows, 1580 columns, 9984 nonzeros\n",
+      "Presolved model has 94 SOS constraint(s)\n",
+      "Variable types: 1557 continuous, 23 integer (23 binary)\n",
+      "\n",
+      "Root relaxation: unbounded, 862 iterations, 0.06 seconds (0.05 work units)\n",
+      "\n",
+      "    Nodes    |    Current Node    |     Objective Bounds      |     Work\n",
+      " Expl Unexpl |  Obj  Depth IntInf | Incumbent    BestBd   Gap | It/Node Time\n",
+      "\n",
+      "     0     0  postponed    0               -          -      -     -    0s\n",
+      "     0     0  postponed    0               -          -      -     -    0s\n",
+      "     0     2  postponed    0               -          -      -     -    0s\n",
+      "H   35    31                      -0.1298538          -      -   261    0s\n",
+      "H   42    31                      -0.3409819          -      -   218    0s\n",
+      "H   72    30                      -0.3462866          -      -   138    1s\n",
+      "H  110    18                      -0.3557482          -      -   138    1s\n",
+      "H  125    16                      -0.4010824          -      -   146    1s\n",
+      "H  144     7                      -0.4035719   -0.79918  98.0%   144    1s\n",
+      "\n",
+      "Explored 167 nodes (25805 simplex iterations) in 1.47 seconds (1.03 work units)\n",
+      "Thread count was 8 (of 8 available processors)\n",
+      "\n",
+      "Solution count 6: -0.403572 -0.401082 -0.355748 ... -0.129854\n",
+      "\n",
+      "Optimal solution found (tolerance 0.00e+00)\n",
+      "Best objective -4.035719394214e-01, best bound -4.035719394214e-01, gap 0.0000%\n"
+     ]
+    }
+   ],
    "source": [
     "# Build GurobiOptimizer object and solve optimization problem\n",
     "params_gurobi = {\"MIPGap\": 0}\n",
@@ -158,6 +279,29 @@
     "assert round(x_opt, 5) == round(y_opt, 5) and round(y_opt, 5) == round(z_opt, 5)"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "id": "f6e5a95f",
+   "metadata": {},
+   "outputs": [
+    {
+     "ename": "IndexError",
+     "evalue": "list index out of range",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[1;31mIndexError\u001b[0m                                Traceback (most recent call last)",
+      "Cell \u001b[1;32mIn[14], line 3\u001b[0m\n\u001b[0;32m      1\u001b[0m al \u001b[38;5;241m=\u001b[39m opt_gur\u001b[38;5;241m.\u001b[39m_active_leaves\n\u001b[0;32m      2\u001b[0m \u001b[38;5;66;03m# list[tuple[int, str]]\u001b[39;00m\n\u001b[1;32m----> 3\u001b[0m \u001b[43mal\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m2\u001b[39;49m\u001b[43m]\u001b[49m\n",
+      "\u001b[1;31mIndexError\u001b[0m: list index out of range"
+     ]
+    }
+   ],
+   "source": [
+    "al = opt_gur._active_leaves\n",
+    "# list[tuple[int, str]]\n"
+   ]
+  },
   {
    "cell_type": "markdown",
    "id": "24b3d335-d601-41c1-ad46-d80949c0cfcc",
@@ -228,7 +372,7 @@
    "name": "python",
    "nbconvert_exporter": "python",
    "pygments_lexer": "ipython3",
-   "version": "3.10.13"
+   "version": "3.10.11"
   }
  },
  "nbformat": 4,
diff --git a/docs/notebooks/single_obj_maximisation.ipynb b/docs/notebooks/single_obj_maximisation.ipynb
index ad30a95..8421ddf 100644
--- a/docs/notebooks/single_obj_maximisation.ipynb
+++ b/docs/notebooks/single_obj_maximisation.ipynb
@@ -11,7 +11,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 1,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -21,7 +21,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 2,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -39,7 +39,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 3,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -58,16 +58,46 @@
   },
   {
    "cell_type": "code",
-   "execution_count": null,
+   "execution_count": 10,
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "WARNING: The gurobipy module (an optional Pyomo dependency) failed to import:\n",
+      "NameError: name 'GurobiDirect' is not defined\n"
+     ]
+    },
+    {
+     "ename": "ApplicationError",
+     "evalue": "No Python bindings available for <class 'pyomo.solvers.plugins.solvers.gurobi_direct.GurobiDirect'> solver plugin",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[1;31mApplicationError\u001b[0m                          Traceback (most recent call last)",
+      "Cell \u001b[1;32mIn[10], line 4\u001b[0m\n\u001b[0;32m      1\u001b[0m params_pyomo \u001b[38;5;241m=\u001b[39m {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msolver_name\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mgurobi_direct\u001b[39m\u001b[38;5;124m\"\u001b[39m}\n\u001b[0;32m      2\u001b[0m opt_pyo \u001b[38;5;241m=\u001b[39m PyomoOptimizer(problem_config, params\u001b[38;5;241m=\u001b[39mparams_pyomo)\n\u001b[1;32m----> 4\u001b[0m res_pyo \u001b[38;5;241m=\u001b[39m \u001b[43mopt_pyo\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msolve\u001b[49m\u001b[43m(\u001b[49m\u001b[43menting\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m      5\u001b[0m res_pyo\n",
+      "File \u001b[1;32m~\\phd\\entmoot\\entmoot\\optimizers\\pyomo_opt.py:104\u001b[0m, in \u001b[0;36mPyomoOptimizer.solve\u001b[1;34m(self, tree_model, model_core, weights)\u001b[0m\n\u001b[0;32m    102\u001b[0m \u001b[38;5;66;03m# Solve optimization model\u001b[39;00m\n\u001b[0;32m    103\u001b[0m verbose \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_params\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mverbose\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m--> 104\u001b[0m \u001b[43mopt\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msolve\u001b[49m\u001b[43m(\u001b[49m\u001b[43mopt_model\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtee\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mverbose\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    106\u001b[0m \u001b[38;5;66;03m# update current solution\u001b[39;00m\n\u001b[0;32m    107\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_curr_sol, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_active_leaves \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_sol(opt_model)\n",
+      "File \u001b[1;32mc:\\Users\\tobyb\\phd\\phdvenv\\lib\\site-packages\\pyomo\\solvers\\plugins\\solvers\\direct_solver.py:75\u001b[0m, in \u001b[0;36mDirectSolver.solve\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m     72\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21msolve\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwds):\n\u001b[0;32m     73\u001b[0m \u001b[38;5;250m    \u001b[39m\u001b[38;5;124;03m\"\"\"Solve the problem\"\"\"\u001b[39;00m\n\u001b[1;32m---> 75\u001b[0m     \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mavailable\u001b[49m\u001b[43m(\u001b[49m\u001b[43mexception_flag\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\n\u001b[0;32m     76\u001b[0m     \u001b[38;5;66;03m#\u001b[39;00m\n\u001b[0;32m     77\u001b[0m     \u001b[38;5;66;03m# If the inputs are models, then validate that they have been\u001b[39;00m\n\u001b[0;32m     78\u001b[0m     \u001b[38;5;66;03m# constructed! Collect suffix names to try and import from solution.\u001b[39;00m\n\u001b[0;32m     79\u001b[0m     \u001b[38;5;66;03m#\u001b[39;00m\n\u001b[0;32m     80\u001b[0m     _model \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n",
+      "File \u001b[1;32mc:\\Users\\tobyb\\phd\\phdvenv\\lib\\site-packages\\pyomo\\solvers\\plugins\\solvers\\gurobi_direct.py:215\u001b[0m, in \u001b[0;36mGurobiDirect.available\u001b[1;34m(self, exception_flag)\u001b[0m\n\u001b[0;32m    213\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m exception_flag:\n\u001b[0;32m    214\u001b[0m         gurobipy\u001b[38;5;241m.\u001b[39mlog_import_warning(logger\u001b[38;5;241m=\u001b[39m\u001b[38;5;18m__name__\u001b[39m)\n\u001b[1;32m--> 215\u001b[0m         \u001b[38;5;28;01mraise\u001b[39;00m ApplicationError(\n\u001b[0;32m    216\u001b[0m             \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mNo Python bindings available for \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m solver plugin\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m (\u001b[38;5;28mtype\u001b[39m(\u001b[38;5;28mself\u001b[39m),)\n\u001b[0;32m    217\u001b[0m         )\n\u001b[0;32m    218\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;01mFalse\u001b[39;00m\n\u001b[0;32m    220\u001b[0m \u001b[38;5;66;03m# Ensure environment is started to check for a valid license\u001b[39;00m\n",
+      "\u001b[1;31mApplicationError\u001b[0m: No Python bindings available for <class 'pyomo.solvers.plugins.solvers.gurobi_direct.GurobiDirect'> solver plugin"
+     ]
+    }
+   ],
    "source": [
-    "params_pyomo = {\"solver_name\": \"gurobi\"}\n",
+    "params_pyomo = {\"solver_name\": \"gurobi_direct\"}\n",
     "opt_pyo = PyomoOptimizer(problem_config, params=params_pyomo)\n",
     "\n",
     "res_pyo = opt_pyo.solve(enting)\n",
     "res_pyo"
    ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
   }
  ],
  "metadata": {
diff --git a/entmoot/__init__.py b/entmoot/__init__.py
index df6dde7..9ab9456 100644
--- a/entmoot/__init__.py
+++ b/entmoot/__init__.py
@@ -1,5 +1,10 @@
 from entmoot.problem_config import ProblemConfig
 from entmoot.models.enting import Enting
+from entmoot.models.model_params import (
+    EntingParams,
+    TrainParams,
+    TreeTrainParams,
+    UncParams,
+)
 from entmoot.optimizers.gurobi_opt import GurobiOptimizer
 from entmoot.optimizers.pyomo_opt import PyomoOptimizer
-from entmoot.models.model_params import EntingParams, UncParams, TreeTrainParams, TrainParams
\ No newline at end of file
diff --git a/entmoot/benchmarks.py b/entmoot/benchmarks.py
index 4bd3add..2fcf3ec 100644
--- a/entmoot/benchmarks.py
+++ b/entmoot/benchmarks.py
@@ -1,6 +1,8 @@
+from typing import Sequence
+
 import numpy as np
 from numpy.typing import ArrayLike
-from typing import Iterable
+
 from entmoot import ProblemConfig
 
 
@@ -38,7 +40,7 @@ def eval_small_single_obj_cat_testfunc(X: ArrayLike, no_cat=False) -> np.ndarray
     # without the dtype=object paramer, each entry of X is converted into a string
     X = np.array(X, dtype=object)
 
-    def compute_objectives(xi: Iterable, no_cat=False):
+    def compute_objectives(xi: Sequence, no_cat=False):
         if no_cat:
             return (
                 xi[1] * xi[2] * np.sin(sum(xi[3:]))
@@ -110,7 +112,7 @@ def eval_multi_obj_cat_testfunc(
     # without the dtype=object paramer, each entry of X is converted into a string
     X = np.array(X, dtype=object)
 
-    def compute_objectives(xi: Iterable, no_cat=False):
+    def compute_objectives(xi: Sequence, no_cat=False):
         if no_cat:
             return (
                 xi[1] * xi[2] * np.sin(sum(xi[3:]))
diff --git a/entmoot/constraints.py b/entmoot/constraints.py
index c4f990a..b29919b 100644
--- a/entmoot/constraints.py
+++ b/entmoot/constraints.py
@@ -1,5 +1,5 @@
-from typing import TYPE_CHECKING, Callable
 from abc import ABC, abstractmethod
+from typing import TYPE_CHECKING, Callable
 
 import pyomo.environ as pyo
 
diff --git a/entmoot/models/enting.py b/entmoot/models/enting.py
index 53b7c9f..808ed6b 100644
--- a/entmoot/models/enting.py
+++ b/entmoot/models/enting.py
@@ -1,15 +1,16 @@
+from typing import Optional, Union
+
+import numpy as np
+
 from entmoot import ProblemConfig
 from entmoot.models.base_model import BaseModel
 from entmoot.models.mean_models.tree_ensemble import TreeEnsemble
+from entmoot.models.model_params import EntingParams
 from entmoot.models.uncertainty_models.distance_based_uncertainty import (
     DistanceBasedUncertainty,
 )
 from entmoot.utils import sample
 
-from entmoot.models.model_params import EntingParams
-import numpy as np
-from typing import Union
-
 
 class Enting(BaseModel):
     """
@@ -146,7 +147,7 @@ def predict_acq(self, X: np.ndarray, is_enc=False) -> list:
             acq_pred.append(mean + self._beta * unc)
         return acq_pred
 
-    def add_to_gurobipy_model(self, core_model, weights: tuple = None) -> None:
+    def add_to_gurobipy_model(self, core_model, weights: Optional[tuple[float, ...]] = None) -> None:
         """
         Enriches the core model by adding variables and constraints based on information
         from the tree model.
@@ -157,7 +158,7 @@ def add_to_gurobipy_model(self, core_model, weights: tuple = None) -> None:
         self.unc_model.add_to_gurobipy_model(core_model)
 
         core_model._mu = core_model.addVar(
-            lb=-GRB.INFINITY, ub=GRB.INFINITY, name=f"mean_obj", vtype="C"
+            lb=-GRB.INFINITY, ub=GRB.INFINITY, name="mean_obj", vtype="C"
         )
 
         if len(self._problem_config.obj_list) == 1:
@@ -183,7 +184,7 @@ def add_to_gurobipy_model(self, core_model, weights: tuple = None) -> None:
         core_model.setObjective(core_model._mu + self._beta * core_model._unc)
         core_model.update()
 
-    def add_to_pyomo_model(self, core_model, weights: tuple = None) -> None:
+    def add_to_pyomo_model(self, core_model, weights: Optional[tuple[float, ...]] = None) -> None:
         """
         Enriches the core model by adding variables and constraints based on information
         from the tree model.
diff --git a/entmoot/models/leaf_gp.py b/entmoot/models/leaf_gp.py
deleted file mode 100644
index c8d7202..0000000
--- a/entmoot/models/leaf_gp.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from entmoot.models.base_model import BaseModel
-
-
-class TreeKernelEntmoot(BaseModel):
-    def add_to_gurobipy_model(core_model, gurobi_env):
-        return NotImplementedError()
-
-    def add_to_pyomo_model(core_model):
-        return NotImplementedError()
diff --git a/entmoot/models/mean_models/lgbm_utils.py b/entmoot/models/mean_models/lgbm_utils.py
index 489aefc..ffde0cc 100644
--- a/entmoot/models/mean_models/lgbm_utils.py
+++ b/entmoot/models/mean_models/lgbm_utils.py
@@ -23,7 +23,7 @@ def add_next_nodes(node_list, node, cat_idx):
 
         try:
             new_node["split_var"] = node[-1]["split_feature"]
-            if not new_node["split_var"] in cat_idx:
+            if new_node["split_var"] not in cat_idx:
                 # read numerical variables, solver accuracy 10e-5
                 temp_node_val = round(node[-1]["threshold"], 5)
                 new_node["split_code_pred"] = temp_node_val
diff --git a/entmoot/models/mean_models/tree_ensemble.py b/entmoot/models/mean_models/tree_ensemble.py
index eb5bf98..a9a098b 100644
--- a/entmoot/models/mean_models/tree_ensemble.py
+++ b/entmoot/models/mean_models/tree_ensemble.py
@@ -1,15 +1,18 @@
+import warnings
+from dataclasses import asdict
+from typing import Optional, Union
+
+import numpy as np
+
 from entmoot.models.base_model import BaseModel
 from entmoot.models.mean_models.lgbm_utils import read_lgbm_tree_model_dict
 from entmoot.models.mean_models.meta_tree_ensemble import MetaTreeModel
 from entmoot.models.model_params import TreeTrainParams
-import warnings
-from typing import Union
-from dataclasses import asdict
-import numpy as np
+from entmoot.problem_config import ProblemConfig, Categorical
 
 
 class TreeEnsemble(BaseModel):
-    def __init__(self, problem_config, params: Union[TreeTrainParams, dict, None] = None):
+    def __init__(self, problem_config: ProblemConfig, params: Union[TreeTrainParams, dict, None] = None):
         if params is None:
             params = {}
         if isinstance(params, dict):
@@ -25,7 +28,8 @@ def __init__(self, problem_config, params: Union[TreeTrainParams, dict, None] =
 
         self._tree_dict = None
         self._meta_tree_dict = {}
-        self._min_y, self._max_y = None, None
+        self._min_y: Optional[np.ndarray] = None
+        self._max_y: Optional[np.ndarray] = None
 
     @property
     def tree_dict(self):
@@ -43,10 +47,12 @@ def meta_tree_dict(self):
 
     @property
     def min_y(self):
+        assert self._min_y is not None, "Must first cache min value"
         return self._min_y
 
     @property
     def max_y(self):
+        assert self._max_y is not None, "Must first cache max value"
         return self._max_y
 
     def fit(self, X, y):
@@ -166,7 +172,7 @@ def add_to_gurobipy_model(self, model, add_mu_var=True, normalize_mean=False):
         model._breakpoint_index = []
 
         for idx, feat in enumerate(self._problem_config.feat_list):
-            if feat.is_cat():
+            if isinstance(feat, Categorical):
                 continue
             else:
                 splits = set()
@@ -426,7 +432,7 @@ def add_to_pyomo_model(
         model._breakpoint_index = []
 
         for idx, feat in enumerate(self._problem_config.feat_list):
-            if feat.is_cat():
+            if isinstance(feat, Categorical):
                 continue
             else:
                 splits = set()
diff --git a/entmoot/models/model_params.py b/entmoot/models/model_params.py
index 039edc8..51677da 100644
--- a/entmoot/models/model_params.py
+++ b/entmoot/models/model_params.py
@@ -1,7 +1,8 @@
 """Dataclasses containing the parameters for Enting models"""
 
-from typing import Literal
 from dataclasses import dataclass, field
+from typing import Literal
+
 
 class ParamValidationError(ValueError):
     """A model parameter takes an invalid value."""
@@ -63,7 +64,7 @@ class TreeTrainParams:
     """
     This dataclass contains all parameters needed for the tree training.
     """
-    train_params: "TrainParams" = field(default_factory=dict)
+    train_params: "TrainParams" = field(default_factory=dict) # type: ignore
     train_lib: Literal["lgbm"] = "lgbm"
 
     def __post_init__(self):
@@ -82,8 +83,8 @@ class EntingParams:
     
     Provides a structured dataclass for the parameters of an Enting model, 
     alongside default values and some light data validation."""
-    unc_params: "UncParams" = field(default_factory=dict)
-    tree_train_params: "TreeTrainParams" = field(default_factory=dict)
+    unc_params: "UncParams" = field(default_factory=dict) # type: ignore
+    tree_train_params: "TreeTrainParams" = field(default_factory=dict) # type: ignore
     
     def __post_init__(self):
         if isinstance(self.unc_params, dict):
diff --git a/entmoot/models/uncertainty_models/base_distance.py b/entmoot/models/uncertainty_models/base_distance.py
index 421a9fb..0ad7f39 100644
--- a/entmoot/models/uncertainty_models/base_distance.py
+++ b/entmoot/models/uncertainty_models/base_distance.py
@@ -1,29 +1,36 @@
-from entmoot.models.base_model import BaseModel
+from typing import Optional
+
 import numpy as np
 
+from entmoot.models.base_model import BaseModel
+from entmoot.problem_config import ProblemConfig, Categorical
+
 
 class NonCatDistance(BaseModel):
-    def __init__(self, problem_config, acq_sense, dist_trafo):
+    def __init__(self, problem_config: ProblemConfig, acq_sense, dist_trafo):
         self._problem_config = problem_config
         self._acq_sense = acq_sense
         self._dist_trafo = dist_trafo
 
-        self._shift, self._scale = None, None
+        self._shift: Optional[np.ndarray] = None
+        self._scale: Optional[np.ndarray] = None
         self._x_trafo = None
 
     @property
     def shift(self):
+        assert self._shift is not None, "Must first cache shift"
         return self._shift
 
     @property
     def scale(self):
+        assert self._scale is not None, "Must first cache scale"
         return self._scale
 
     @property
     def x_trafo(self):
         assert (
             self._x_trafo is not None
-        ), f"Uncertainty model needs fit function call before it can predict."
+        ), "Uncertainty model needs fit function call before it can predict."
         return self._x_trafo
 
     def get_big_m(self):
@@ -67,29 +74,36 @@ def get_gurobipy_model_constr(self, model_core):
 
     def get_pyomo_model_constr(self, model_core):
         raise NotImplementedError()
+    
+    def get_gurobipy_model_constr_terms(self, model) -> list:
+        raise NotImplementedError()
+
+    def get_pyomo_model_constr_terms(self, model) -> list:
+        raise NotImplementedError()
+
 
 
 class CatDistance(BaseModel):
-    def __init__(self, problem_config, acq_sense):
+    def __init__(self, problem_config: ProblemConfig, acq_sense):
         self._problem_config = problem_config
         self._acq_sense = acq_sense
 
-        self._cat_x = None
-        self._sim_map = None
-        self._cache_x = None
+        self._cat_x: Optional[np.ndarray] = None
+        self._sim_map: Optional[dict[int, np.ndarray]] = None
+        self._cache_x: Optional[np.ndarray] = None
 
     @property
     def cache_x(self):
         assert (
             self._cache_x is not None
-        ), f"Uncertainty model needs fit function call before it can predict."
+        ), "Uncertainty model needs fit function call before it can predict."
         return self._cache_x
 
     @property
     def sim_map(self):
         assert (
             self._sim_map is not None
-        ), f"Uncertainty model needs fit function call before it can predict."
+        ), "Uncertainty model needs fit function call before it can predict."
         return self._sim_map
 
     def get_big_m(self):
@@ -118,8 +132,7 @@ def fit(self, X):
         # generate similarity matrix for all data points
         self._sim_map = {}
 
-        for idx in self._problem_config.cat_idx:
-            feat = self._problem_config.feat_list[idx]
+        for idx, feat in self._problem_config.get_idx_and_feat_by_type(Categorical):
             all_cats = feat.enc_cat_list
 
             # creates similarity entries for all categories of all categorical features
@@ -132,31 +145,31 @@ def fit(self, X):
             self._sim_map[idx] = mat
 
     def get_gurobipy_model_constr_terms(self, model):
-        feat = model._all_feat
+        features = model._all_feat
         constr_list = []
         for xi in self.cache_x:
             constr = 0
 
             # iterate through all categories to check which distances are active
-            for idx in self._problem_config.cat_idx:
-                for cat in self._problem_config.feat_list[idx].enc_cat_list:
+            for idx, feat in self._problem_config.get_idx_and_feat_by_type(Categorical):
+                for cat in feat.enc_cat_list:
                     sim = self.sim_map[idx][cat, int(xi[idx])]
-                    constr += (1 - sim) * feat[idx][cat]
+                    constr += (1 - sim) * features[idx][cat]
 
             constr_list.append(constr)
         return constr_list
 
     def get_pyomo_model_constr_terms(self, model):
-        feat = model._all_feat
+        features = model._all_feat
         constr_list = []
         for xi in self.cache_x:
             constr = 0
 
             # iterate through all categories to check which distances are active
-            for idx in self._problem_config.cat_idx:
-                for cat in self._problem_config.feat_list[idx].enc_cat_list:
+            for idx, feat in self._problem_config.get_idx_and_feat_by_type(Categorical):
+                for cat in feat.enc_cat_list:
                     sim = self.sim_map[idx][cat, int(xi[idx])]
-                    constr += (1 - sim) * feat[idx][cat]
+                    constr += (1 - sim) * features[idx][cat]
 
             constr_list.append(constr)
         return constr_list
diff --git a/entmoot/models/uncertainty_models/distance_based_uncertainty.py b/entmoot/models/uncertainty_models/distance_based_uncertainty.py
index f68308b..6ef3f81 100644
--- a/entmoot/models/uncertainty_models/distance_based_uncertainty.py
+++ b/entmoot/models/uncertainty_models/distance_based_uncertainty.py
@@ -1,20 +1,28 @@
+from typing import Literal, Union, overload
+
+import numpy as np
+
 from entmoot.models.base_model import BaseModel
+from entmoot.models.model_params import ParamValidationError, UncParams
 from entmoot.models.uncertainty_models.base_distance import CatDistance, NonCatDistance
 from entmoot.models.uncertainty_models.euclidean_squared_distance import (
     EuclideanSquaredDistance,
 )
+from entmoot.models.uncertainty_models.goodall4_distance import Goodall4Distance
 from entmoot.models.uncertainty_models.l1_distance import L1Distance
 from entmoot.models.uncertainty_models.l2_distance import L2Distance
-
-from entmoot.models.uncertainty_models.overlap_distance import OverlapDistance
-from entmoot.models.uncertainty_models.goodall4_distance import Goodall4Distance
 from entmoot.models.uncertainty_models.of_distance import OfDistance
+from entmoot.models.uncertainty_models.overlap_distance import OverlapDistance
+from entmoot.problem_config import ProblemConfig
 
-from entmoot.models.model_params import UncParams, ParamValidationError
-from typing import Union
-import numpy as np
 
-def distance_func_mapper(dist_name: str, cat: bool) -> Union[CatDistance, NonCatDistance]:
+@overload
+def distance_func_mapper(dist_name: str, cat: Literal[True]) -> type[CatDistance] | None: ...
+
+@overload
+def distance_func_mapper(dist_name: str, cat: Literal[False]) -> type[NonCatDistance] | None: ...
+
+def distance_func_mapper(dist_name: str, cat: bool) -> type[CatDistance] | type[NonCatDistance] | None:
     """Given a string, return the distance function"""
     non_cat_dists = {
         "euclidean_squared": EuclideanSquaredDistance,
@@ -33,7 +41,7 @@ def distance_func_mapper(dist_name: str, cat: bool) -> Union[CatDistance, NonCat
 
 
 class DistanceBasedUncertainty(BaseModel):
-    def __init__(self, problem_config, params: Union[UncParams, dict, None] = None):
+    def __init__(self, problem_config: ProblemConfig, params: Union[UncParams, dict, None] = None):
         if params is None:
             params = {}
         if isinstance(params, dict):
@@ -96,16 +104,21 @@ def __init__(self, problem_config, params: Union[UncParams, dict, None] = None):
                 acq_sense=params.acq_sense,
             )
 
+    @property
+    def bound_coeff(self):
+        assert self._bound_coeff is not None
+        return self._bound_coeff
+
     @property
     def num_cache_x(self):
         assert (
             self._num_cache_x is not None
-        ), f"Uncertainty model needs fit function call before it can predict."
+        ), "Uncertainty model needs fit function call before it can predict."
         return self._num_cache_x
 
     def fit(self, X, y):
         if self._dist_has_var_bound:
-            self._dist_bound = abs(np.var(y) * self._bound_coeff)
+            self._dist_bound = abs(np.var(y) * self.bound_coeff)
 
         self._num_cache_x = len(X)
 
@@ -128,7 +141,7 @@ def predict(self, X):
         return np.asarray(comb_pred)
 
     def add_to_gurobipy_model(self, model):
-        from gurobipy import GRB, quicksum
+        from gurobipy import GRB
 
         # define main uncertainty variables
         if self._dist_has_var_bound:
@@ -158,7 +171,7 @@ def add_to_gurobipy_model(self, model):
                     model.addVar(name=f"bin_penalty_{i}", vtype="B")
                 )
 
-                big_m_term = big_m * (1 - model._bin_penalty[-1])
+                big_m_term = big_m * (1 - model._bin_penalty[-1]) # type: ignore 
 
                 if self._dist_metric == "l2":
                     # take sqrt for l2 distance
@@ -211,7 +224,7 @@ def add_to_gurobipy_model(self, model):
             model.params.NonConvex = 2
 
         if self._acq_sense == "penalty":
-            model.addConstr(sum(model._bin_penalty) == 1, name=f"bin_penalty_sum")
+            model.addConstr(sum(model._bin_penalty) == 1, name="bin_penalty_sum")
 
         model.update()
 
diff --git a/entmoot/models/uncertainty_models/euclidean_squared_distance.py b/entmoot/models/uncertainty_models/euclidean_squared_distance.py
index b4d8015..85bbe41 100644
--- a/entmoot/models/uncertainty_models/euclidean_squared_distance.py
+++ b/entmoot/models/uncertainty_models/euclidean_squared_distance.py
@@ -1,6 +1,7 @@
-from entmoot.models.uncertainty_models.base_distance import NonCatDistance
 import numpy as np
 
+from entmoot.models.uncertainty_models.base_distance import NonCatDistance
+
 
 class EuclideanSquaredDistance(NonCatDistance):
     def _get_distance(self, x_left, x_right):
@@ -17,13 +18,13 @@ def get_gurobipy_model_constr_terms(self, model):
 
         from gurobipy import quicksum
 
-        feat = model._all_feat
+        features = model._all_feat
 
         constr_list = []
         for xi in self.x_trafo:
             constr = quicksum(
-                (xi[i] - (feat[idx] - self.shift[i]) / self.scale[i])
-                * (xi[i] - (feat[idx] - self.shift[i]) / self.scale[i])
+                (xi[i] - (features[idx] - self.shift[i]) / self.scale[i])
+                * (xi[i] - (features[idx] - self.shift[i]) / self.scale[i])
                 for i, idx in enumerate(self._problem_config.non_cat_idx)
             )
             constr_list.append(constr)
@@ -31,14 +32,14 @@ def get_gurobipy_model_constr_terms(self, model):
 
     def get_pyomo_model_constr_terms(self, model):
 
-        feat = model._all_feat
+        features = model._all_feat
 
         constr_list = []
 
         for xi in self.x_trafo:
             constr = sum(
-                (xi[i] - (feat[idx] - self.shift[i]) / self.scale[i])
-                * (xi[i] - (feat[idx] - self.shift[i]) / self.scale[i])
+                (xi[i] - (features[idx] - self.shift[i]) / self.scale[i])
+                * (xi[i] - (features[idx] - self.shift[i]) / self.scale[i])
                 for i, idx in enumerate(self._problem_config.non_cat_idx)
             )
             constr_list.append(constr)
diff --git a/entmoot/models/uncertainty_models/goodall4_distance.py b/entmoot/models/uncertainty_models/goodall4_distance.py
index 42a5ac5..bfdd281 100644
--- a/entmoot/models/uncertainty_models/goodall4_distance.py
+++ b/entmoot/models/uncertainty_models/goodall4_distance.py
@@ -1,6 +1,7 @@
-from entmoot.models.uncertainty_models.base_distance import CatDistance
 import numpy as np
 
+from entmoot.models.uncertainty_models.base_distance import CatDistance
+
 
 class Goodall4Distance(CatDistance):
     def _get_pk2(self, cat_rows, cat):
@@ -10,7 +11,7 @@ def _get_pk2(self, cat_rows, cat):
 
     def _sim_mat_rule(self, x_left, x_right, cat_idx):
         return (
-            self._get_pk2(self._cache_x[:, cat_idx], x_left)
+            self._get_pk2(self.cache_x[:, cat_idx], x_left)
             if x_left == x_right
             else 0.0
         )
diff --git a/entmoot/models/uncertainty_models/l1_distance.py b/entmoot/models/uncertainty_models/l1_distance.py
index 0f392b9..be41ff7 100644
--- a/entmoot/models/uncertainty_models/l1_distance.py
+++ b/entmoot/models/uncertainty_models/l1_distance.py
@@ -1,6 +1,7 @@
-from entmoot.models.uncertainty_models.base_distance import NonCatDistance
 import numpy as np
 
+from entmoot.models.uncertainty_models.base_distance import NonCatDistance
+
 
 class L1Distance(NonCatDistance):
     def _get_distance(self, x_left, x_right):
@@ -16,7 +17,7 @@ def _array_predict(self, X):
     def get_gurobipy_model_constr_terms(self, model):
         from gurobipy import GRB, quicksum
 
-        feat = model._all_feat
+        features = model._all_feat
 
         # define auxiliary variables
         feat_dict = {
@@ -34,7 +35,7 @@ def get_gurobipy_model_constr_terms(self, model):
             for i, idx in enumerate(self._problem_config.non_cat_idx):
                 # capture positive and negative contributions
                 model.addConstr(
-                    (xi[i] - (feat[idx] - self.shift[i]) / self.scale[i])
+                    (xi[i] - (features[idx] - self.shift[i]) / self.scale[i])
                     == aux_pos[data_idx, idx] - aux_neg[data_idx, idx],
                     name=f"unc_aux_({data_idx},{idx})",
                 )
@@ -58,7 +59,7 @@ def get_gurobipy_model_constr_terms(self, model):
     def get_pyomo_model_constr_terms(self, model):
         import pyomo.environ as pyo
 
-        feat = model._all_feat
+        features = model._all_feat
 
         # define auxiliary variables
         feat_dict = {
@@ -79,7 +80,7 @@ def get_pyomo_model_constr_terms(self, model):
         def rule_contrs_l1_pos_neg_contr(modelobj, data_idx, i, idx):
             xi = self.x_trafo[data_idx]
             return (
-                xi[i] - (feat[idx] - self.shift[i]) / self.scale[i]
+                xi[i] - (features[idx] - self.shift[i]) / self.scale[i]
             ) == modelobj.aux_pos[data_idx, idx] - modelobj.aux_neg[data_idx, idx]
 
         model.contrs_l1_pos_neg_contr = pyo.Constraint(
@@ -90,11 +91,11 @@ def rule_contrs_l1_pos_neg_contr(modelobj, data_idx, i, idx):
             (data_idx, idx): max(
                 abs(
                     self.x_trafo[data_idx][i]
-                    - (feat[idx].ub - self.shift[i]) / self.scale[i]
+                    - (features[idx].ub - self.shift[i]) / self.scale[i]
                 ),
                 abs(
                     self.x_trafo[data_idx][i]
-                    - (feat[idx].lb - self.shift[i]) / self.scale[i]
+                    - (features[idx].lb - self.shift[i]) / self.scale[i]
                 ),
             )
             for (data_idx, i, idx) in indices_l1_constraints
diff --git a/entmoot/models/uncertainty_models/l2_distance.py b/entmoot/models/uncertainty_models/l2_distance.py
index f8f6a32..97f9384 100644
--- a/entmoot/models/uncertainty_models/l2_distance.py
+++ b/entmoot/models/uncertainty_models/l2_distance.py
@@ -1,6 +1,7 @@
-from entmoot.models.uncertainty_models.base_distance import NonCatDistance
 import numpy as np
 
+from entmoot.models.uncertainty_models.base_distance import NonCatDistance
+
 
 class L2Distance(NonCatDistance):
     def _get_distance(self, x_left, x_right):
@@ -16,13 +17,13 @@ def _array_predict(self, X):
     def get_gurobipy_model_constr_terms(self, model):
         from gurobipy import quicksum
 
-        feat = model._all_feat
+        features = model._all_feat
 
         constr_list = []
         for xi in self.x_trafo:
             constr = quicksum(
-                (xi[i] - (feat[idx] - self.shift[i]) / self.scale[i])
-                * (xi[i] - (feat[idx] - self.shift[i]) / self.scale[i])
+                (xi[i] - (features[idx] - self.shift[i]) / self.scale[i])
+                * (xi[i] - (features[idx] - self.shift[i]) / self.scale[i])
                 for i, idx in enumerate(self._problem_config.non_cat_idx)
             )
             constr_list.append(constr)
@@ -30,13 +31,13 @@ def get_gurobipy_model_constr_terms(self, model):
 
     def get_pyomo_model_constr_terms(self, model):
 
-        feat = model._all_feat
+        features = model._all_feat
 
         constr_list = []
         for xi in self.x_trafo:
             constr = sum(
-                (xi[i] - (feat[idx] - self.shift[i]) / self.scale[i])
-                * (xi[i] - (feat[idx] - self.shift[i]) / self.scale[i])
+                (xi[i] - (features[idx] - self.shift[i]) / self.scale[i])
+                * (xi[i] - (features[idx] - self.shift[i]) / self.scale[i])
                 for i, idx in enumerate(self._problem_config.non_cat_idx)
             )
             constr_list.append(constr)
diff --git a/entmoot/models/uncertainty_models/of_distance.py b/entmoot/models/uncertainty_models/of_distance.py
index 639518f..a2990c8 100644
--- a/entmoot/models/uncertainty_models/of_distance.py
+++ b/entmoot/models/uncertainty_models/of_distance.py
@@ -1,7 +1,9 @@
-from entmoot.models.uncertainty_models.base_distance import CatDistance
-import numpy as np
 from math import log
 
+import numpy as np
+
+from entmoot.models.uncertainty_models.base_distance import CatDistance
+
 
 class OfDistance(CatDistance):
     def _get_of_frac(self, cat_rows, cat_left, cat_right):
@@ -12,7 +14,7 @@ def _get_of_frac(self, cat_rows, cat_left, cat_right):
 
     def _sim_mat_rule(self, x_left, x_right, cat_idx):
         return (
-            self._get_of_frac(self._cache_x[:, cat_idx], x_left, x_right)
+            self._get_of_frac(self.cache_x[:, cat_idx], x_left, x_right)
             if x_left != x_right
             else 1.0
         )
diff --git a/entmoot/optimizers/base_opt.py b/entmoot/optimizers/base_opt.py
index 670127d..1de49a9 100644
--- a/entmoot/optimizers/base_opt.py
+++ b/entmoot/optimizers/base_opt.py
@@ -2,8 +2,8 @@ class BaseOptimizer:
     def __init__(self, space, params):
         raise NotImplementedError()
 
-    def solve(model):
+    def solve(self, model):
         raise NotImplementedError()
 
-    def sample_feas(num_points):
+    def sample_feas(self, num_points):
         raise NotImplementedError()
diff --git a/entmoot/optimizers/gurobi_opt.py b/entmoot/optimizers/gurobi_opt.py
index 0840c4d..9a32909 100644
--- a/entmoot/optimizers/gurobi_opt.py
+++ b/entmoot/optimizers/gurobi_opt.py
@@ -1,9 +1,14 @@
-from collections import namedtuple
+import os
+from typing import Optional
+
+import gurobipy as gur
+import numpy as np
+
 from entmoot import Enting, ProblemConfig
 from entmoot.utils import OptResult
-import gurobipy as gur
-import os
+from entmoot.problem_config import Categorical
 
+ActiveLeavesT = list[list[tuple[int, str]]]
 
 class GurobiOptimizer:
     """
@@ -42,31 +47,31 @@ class GurobiOptimizer:
             # As expected, the optimal input of the tree model is near the origin (cf. X_opt_pyo)
             X_opt_pyo, _, _ = opt_gur.solve(enting)
     """
-    def __init__(self, problem_config: ProblemConfig, params: dict = None) -> float:
+    def __init__(self, problem_config: ProblemConfig, params: Optional[dict] = None):
         self._params = {} if params is None else params
         self._problem_config = problem_config
         self._curr_sol = None
-        self._active_leaves = None
+        self._active_leaves: Optional[ActiveLeavesT] = None
 
-    def get_curr_sol(self) -> list:
+    def get_curr_sol(self) -> list | np.ndarray:
         """
         Returns current solution (i.e. optimal points) from optimization run
         """
         assert self._curr_sol is not None, "No solution was generated yet."
         return self._curr_sol
 
-    def get_active_leaf_sol(self) -> list:
+    def get_active_leaf_sol(self) -> ActiveLeavesT:
         """
         Returns active leaves in the tree model based on the current solution
         """
-        assert self._curr_sol is not None, "No solution was generated yet."
+        assert self._active_leaves is not None, "No solution was generated yet."
         return self._active_leaves
 
     def solve(
         self,
         tree_model: Enting,
-        model_core: gur.Model = None,
-        weights: tuple = None,
+        model_core: Optional[gur.Model] = None,
+        weights: Optional[tuple[float, ...]] = None,
         use_env: bool = False,
     ) -> OptResult:
         """
@@ -75,16 +80,18 @@ def solve(
 
         if model_core is None:
             if use_env:
+                env_params = {}
                 if "CLOUDACCESSID" in os.environ:
                     # Use Gurobi Cloud
-                    connection_params_cld = {
-                        "CLOUDACCESSID": os.getenv("CLOUDACCESSID"),
-                        "CLOUDSECRETKEY": os.getenv("CLOUDSECRETKEY"),
-                        "CLOUDPOOL": os.getenv("CLOUDPOOL"),
+                    env_params = {
+                        "CLOUDACCESSID": os.getenv("CLOUDACCESSID", ""),
+                        "CLOUDSECRETKEY": os.getenv("CLOUDSECRETKEY", ""),
+                        "CLOUDPOOL": os.getenv("CLOUDPOOL", ""),
                     }
-                    env_cld = gur.Env(params=connection_params_cld)
-                    env_cld.start()
-                    opt_model = self._problem_config.get_gurobi_model_core(env=env_cld)
+                # TODO: Support passing in env params
+                env_cld = gur.Env(params=env_params)
+                env_cld.start()
+                opt_model = self._problem_config.get_gurobi_model_core(env=env_cld)
 
             else:
                 opt_model = self._problem_config.get_gurobi_model_core()
@@ -121,12 +128,12 @@ def solve(
             self._active_leaves,
         )
 
-    def _get_sol(self, solved_model: gur.Model) -> list:
+    def _get_sol(self, solved_model: gur.Model) -> tuple[list | np.ndarray, ActiveLeavesT]:
         # extract solutions from conti and discrete variables
         res = []
         for idx, feat in enumerate(self._problem_config.feat_list):
             curr_var = solved_model._all_feat[idx]
-            if feat.is_cat():
+            if isinstance(feat, Categorical):
                 # find active category
                 sol_cat = [
                     int(round(curr_var[enc_cat].x)) for enc_cat in feat.enc_cat_list
diff --git a/entmoot/optimizers/pyomo_opt.py b/entmoot/optimizers/pyomo_opt.py
index 40c6ed3..311d0f1 100644
--- a/entmoot/optimizers/pyomo_opt.py
+++ b/entmoot/optimizers/pyomo_opt.py
@@ -1,8 +1,13 @@
-from collections import namedtuple
+from typing import Optional
+
+import numpy as np
+import pyomo.environ as pyo
+
 from entmoot import Enting, ProblemConfig
 from entmoot.utils import OptResult
-import pyomo.environ as pyo
+from entmoot.problem_config import Categorical
 
+ActiveLeavesT = list[list[tuple[int, str]]]
 
 class PyomoOptimizer:
     """
@@ -43,29 +48,29 @@ class PyomoOptimizer:
             # As expected, the optimal input of the tree model is near the origin (cf. X_opt_pyo)
             X_opt_pyo, _, _ = opt_pyo.solve(enting)
     """
-    def __init__(self, problem_config: ProblemConfig, params: dict = None):
+    def __init__(self, problem_config: ProblemConfig, params: Optional[dict] = None):
         self._params = {} if params is None else params
         self._problem_config = problem_config
         self._curr_sol = None
         self._active_leaves = None
 
     @property
-    def get_curr_sol(self) -> list:
+    def get_curr_sol(self) -> list | np.ndarray:
         """
         Returns current solution (i.e. optimal points) from optimization run
         """
         assert self._curr_sol is not None, "No solution was generated yet."
         return self._curr_sol
 
-    def get_active_leaf_sol(self) -> list:
+    def get_active_leaf_sol(self) -> ActiveLeavesT:
         """
         Returns active leaves in the tree model based on the current solution
         """
-        assert self._curr_sol is not None, "No solution was generated yet."
+        assert self._active_leaves is not None, "No solution was generated yet."
         return self._active_leaves
 
     def solve(
-        self, tree_model: Enting, model_core: pyo.ConcreteModel = None, weights: tuple = None
+        self, tree_model: Enting, model_core: Optional[pyo.ConcreteModel] = None, weights: Optional[tuple[float, ...]] = None
     ) -> OptResult:
         """
         Solves the Pyomo optimization model
@@ -114,12 +119,12 @@ def solve(
             self._active_leaves
         )
 
-    def _get_sol(self, solved_model: pyo.ConcreteModel) -> list:
+    def _get_sol(self, solved_model: pyo.ConcreteModel) -> tuple[list | np.ndarray, ActiveLeavesT]:
         # extract solutions from conti and discrete variables
         res = []
         for idx, feat in enumerate(self._problem_config.feat_list):
             curr_var = solved_model._all_feat[idx]
-            if feat.is_cat():
+            if isinstance(feat, Categorical):
                 # find active category
                 sol_cat = [
                     int(round(pyo.value(curr_var[enc_cat])))
diff --git a/entmoot/optimizers/sampling_opt.py b/entmoot/optimizers/sampling_opt.py
index 83704f3..f2a1bb6 100644
--- a/entmoot/optimizers/sampling_opt.py
+++ b/entmoot/optimizers/sampling_opt.py
@@ -2,8 +2,8 @@ class SamplingOptimizer:
     def __init__(self, space, params):
         raise NotImplementedError()
 
-    def solve(model):
+    def solve(self, model):
         raise NotImplementedError()
 
-    def sample_feas(num_points):
+    def sample_feas(self, num_points):
         raise NotImplementedError()
diff --git a/entmoot/problem_config.py b/entmoot/problem_config.py
index c7ed9f8..4086775 100644
--- a/entmoot/problem_config.py
+++ b/entmoot/problem_config.py
@@ -1,7 +1,129 @@
-from typing import Tuple, List, Optional
+from typing import List, Optional, TypeVar
+from abc import ABC, abstractmethod
+
 import numpy as np
-import random
 
+BoundsT = tuple[float, float]
+CategoriesT = list[str | float | int] | tuple[str | float | int, ...]
+
+class FeatureType(ABC):
+    def __init__(self, name: str):
+        self.name = name
+
+    @abstractmethod
+    def get_enc_bnds(self):
+        pass
+
+    def is_real(self):
+        return False
+
+    def is_cat(self):
+        return False
+
+    def is_int(self):
+        return False
+
+    def is_bin(self):
+        return False
+
+    def encode(self, xi):
+        return xi
+
+    def decode(self, xi):
+        return xi
+
+
+class Real(FeatureType):
+    def __init__(self, lb: float, ub: float, name: str):
+        super().__init__(name)
+        self.lb = lb
+        self.ub = ub
+
+    def get_enc_bnds(self):
+        return (self.lb, self.ub)
+
+    def is_real(self):
+        return True
+
+
+class Categorical(FeatureType):
+    def __init__(self, cat_list: CategoriesT, name: str):
+        super().__init__(name)
+        self._cat_list = cat_list
+
+        # encode categories
+        self._enc2str, self._str2enc = {}, {}
+        self._enc_cat_list = []
+        for enc, cat in enumerate(cat_list):
+            self._enc_cat_list.append(enc)
+            self._enc2str[enc] = cat
+            self._str2enc[cat] = enc
+
+    def get_enc_bnds(self):
+        return self._enc_cat_list
+
+    @property
+    def cat_list(self):
+        return self._cat_list
+
+    @property
+    def enc_cat_list(self):
+        return self._enc_cat_list
+
+    def encode(self, xi):
+        return self._str2enc[xi]
+
+    def decode(self, xi):
+        return self._enc2str[xi]
+
+    def is_cat(self):
+        return True
+
+
+class Integer(FeatureType):
+    def __init__(self, lb: int, ub: int, name: str):
+        super().__init__(name)
+        self.lb = lb
+        self.ub = ub
+
+    def get_enc_bnds(self):
+        return (self.lb, self.ub)
+
+    def is_int(self):
+        return True
+
+    def decode(self, xi):
+        return int(xi)
+
+
+class Binary(FeatureType):
+    def __init__(self, name: str):
+        super().__init__(name)
+        self.lb = 0
+        self.ub = 1
+
+    def get_enc_bnds(self):
+        return (self.lb, self.ub)
+
+    def decode(self, xi):
+        return abs(int(xi))
+
+    def is_bin(self):
+        return True
+
+class Objective:
+    def __init__(self, name):
+        self.name = name
+
+class MinObjective(Objective):
+    sign = 1
+
+class MaxObjective(Objective):
+    sign = -1
+
+AnyFeatureT = Real | Integer | Categorical | Binary
+FeatureT = TypeVar("FeatureT", bound=FeatureType)
+AnyObjectiveT = MinObjective | MaxObjective
 
 class ProblemConfig:
     def __init__(self, rnd_seed: Optional[int] = None):
@@ -12,22 +134,22 @@ def __init__(self, rnd_seed: Optional[int] = None):
 
     @property
     def cat_idx(self):
-        return tuple([i for i, feat in enumerate(self.feat_list) if feat.is_cat()])
+        return tuple([i for i, feat in enumerate(self.feat_list) if isinstance(feat, Categorical)])
 
     @property
     def non_cat_idx(self):
-        return tuple([i for i, feat in enumerate(self.feat_list) if not feat.is_cat()])
+        return tuple([i for i, feat in enumerate(self.feat_list) if not isinstance(feat, Categorical)])
 
     @property
     def non_cat_lb(self):
         return tuple(
-            [feat.lb for i, feat in enumerate(self.feat_list) if not feat.is_cat()]
+            [feat.lb for i, feat in enumerate(self.feat_list) if not isinstance(feat, Categorical)]
         )
 
     @property
     def non_cat_ub(self):
         return tuple(
-            [feat.ub for i, feat in enumerate(self.feat_list) if not feat.is_cat()]
+            [feat.ub for i, feat in enumerate(self.feat_list) if not isinstance(feat, Categorical)]
         )
 
     @property
@@ -36,16 +158,19 @@ def non_cat_bnd_diff(self):
             [
                 feat.ub - feat.lb
                 for i, feat in enumerate(self.feat_list)
-                if not feat.is_cat()
+                if not isinstance(feat, Categorical)
             ]
         )
 
+    def get_idx_and_feat_by_type(self, feature_type: type[FeatureT]) -> tuple[tuple[int, FeatureT], ...]:
+        return tuple([(i, feat) for i, feat in enumerate(self.feat_list) if isinstance(feat, feature_type)])
+
     @property
-    def feat_list(self):
+    def feat_list(self) -> list[AnyFeatureT]:
         return self._feat_list
 
     @property
-    def obj_list(self):
+    def obj_list(self) -> list[AnyObjectiveT]:
         return self._obj_list
 
     @property
@@ -53,7 +178,7 @@ def rnd_seed(self):
         return self._rnd_seed
 
     def get_enc_bnd(self):
-        return [feat.get_enc_bnds() for feat in self._feat_list]
+        return [feat.get_enc_bnds() for feat in self.feat_list]
 
     def _encode_xi(self, xi: List):
         return np.asarray(
@@ -81,12 +206,15 @@ def decode(self, X: List):
             dec = [self._decode_xi(xi) for xi in X]
             return np.asarray(dec)
 
-    def add_feature(self, feat_type: str, bounds: Tuple = None, name: str = None):
+    def add_feature(self, feat_type: str, bounds: Optional[BoundsT | CategoriesT] = None, name: Optional[str] = None):
         if name is None:
             name = f"feat_{len(self.feat_list)}"
 
-        if bounds is None and feat_type in ("real", "integer", "categorical"):
-            raise IOError(
+        if bounds is None:
+            if feat_type == "binary":
+                self._feat_list.append(Binary(name=name))
+                return
+            raise ValueError(
                 "Please provide bounds for feature types in '(real, integer, categorical)'"
             )
 
@@ -127,10 +255,7 @@ def add_feature(self, feat_type: str, bounds: Tuple = None, name: str = None):
                     f"smaller than upper bound. Check feature '{name}'."
                 )
 
-                self._feat_list.append(Integer(lb=lb, ub=ub, name=name))
-
-        elif feat_type == "binary":
-            self._feat_list.append(Binary(name=name))
+                self._feat_list.append(Integer(lb=lb, ub=ub, name=name))            
 
         elif feat_type == "categorical":
             assert len(bounds) > 1, (
@@ -148,20 +273,20 @@ def add_feature(self, feat_type: str, bounds: Tuple = None, name: str = None):
                 set(bounds)
             ), f"Categories of feat_type '{feat_type}' are not all unique."
 
-            self._feat_list.append(Categorical(cat_list=bounds, name=name))
+            self._feat_list.append(Categorical(cat_list=bounds, name=name)) # type: ignore
 
         else:
-            raise IOError(
+            raise ValueError(
                 f"No support for feat_type '{feat_type}'. Check feature '{name}'."
             )
 
-    def add_min_objective(self, name: str = None):
+    def add_min_objective(self, name: Optional[str] = None):
         if name is None:
             name = f"obj_{len(self.obj_list)}"
 
         self._obj_list.append(MinObjective(name=name))
 
-    def add_max_objective(self, name: str = None):
+    def add_max_objective(self, name: Optional[str] = None):
         if name is None:
             name = f"obj_{len(self.obj_list)}"
 
@@ -177,17 +302,18 @@ def transform_objective(self, y: np.ndarray) -> np.ndarray:
 
     def get_rnd_sample_numpy(self, num_samples):
         # returns np.array for faster processing
+        # TODO: defer sample logic to feature
         array_list = []
         for feat in self.feat_list:
-            if feat.is_real():
+            if isinstance(feat, Real):
                 array_list.append(
                     self.rng.uniform(low=feat.lb, high=feat.ub, size=num_samples)
                 )
-            elif feat.is_cat():
+            elif isinstance(feat, Categorical):
                 array_list.append(
                     self.rng.integers(0, len(feat.cat_list), size=num_samples)
                 )
-            elif feat.is_int() or feat.is_bin():
+            else:
                 array_list.append(
                     self.rng.integers(
                         low=feat.lb, high=feat.ub+1, size=num_samples
@@ -201,14 +327,14 @@ def get_rnd_sample_list(self, num_samples=1, cat_enc=False):
         for _ in range(num_samples):
             sample = []
             for feat in self.feat_list:
-                if feat.is_real():
+                if isinstance(feat, Real):
                     sample.append(self.rng.uniform(feat.lb, feat.ub))
-                elif feat.is_cat():
+                elif isinstance(feat, Categorical):
                     if cat_enc:
                         sample.append(self.rng.integers(0, len(feat.cat_list)))
                     else:
                         sample.append(self.rng.choice(feat.cat_list))
-                elif feat.is_int() or feat.is_bin():
+                else:
                     sample.append(self.rng.integers(feat.lb, feat.ub+1))
             sample_list.append(tuple(sample))
         return sample_list if len(sample_list) > 1 else sample_list[0]
@@ -226,20 +352,20 @@ def get_gurobi_model_core(self, env=None):
         model._all_feat = []
 
         for i, feat in enumerate(self.feat_list):
-            if feat.is_real():
+            if isinstance(feat, Real):
                 model._all_feat.append(
                     model.addVar(lb=feat.lb, ub=feat.ub, name=feat.name, vtype="C")
                 )
-            elif feat.is_cat():
+            elif isinstance(feat, Categorical):
                 model._all_feat.append(dict())
                 for enc, cat in zip(feat.enc_cat_list, feat.cat_list):
                     comb_name = f"{feat.name}_{cat}"
                     model._all_feat[i][enc] = model.addVar(name=comb_name, vtype="B")
-            elif feat.is_int():
+            elif isinstance(feat, Integer):
                 model._all_feat.append(
                     model.addVar(lb=feat.lb, ub=feat.ub, name=feat.name, vtype="I")
                 )
-            elif feat.is_bin():
+            elif isinstance(feat, Binary):
                 model._all_feat.append(model.addVar(name=feat.name, vtype="B"))
 
         model.update()
@@ -260,7 +386,7 @@ def copy_gurobi_model_core(self, model_core):
 
         # transfer feature var list to model copy
         for i, feat in enumerate(self.feat_list):
-            if feat.is_cat():
+            if isinstance(feat, Categorical):
                 copy_model_core._all_feat.append(dict())
                 for enc, cat in zip(feat.enc_cat_list, feat.cat_list):
                     var_name = model_core._all_feat[i][enc].VarName
@@ -296,18 +422,18 @@ def get_pyomo_model_core(self):
         index_to_var_domain = {}
         index_to_var_bounds = {}
         for i, feat in enumerate(self.feat_list):
-            if feat.is_real():
+            if isinstance(feat, Real):
                 index_to_var_domain[i] = pyo.Reals
                 index_to_var_bounds[i] = (feat.lb, feat.ub)
-            elif feat.is_cat():
+            elif isinstance(feat, Categorical):
                 for enc, cat in zip(feat.enc_cat_list, feat.cat_list):
                     # We encode the index of this variable by (i, enc, cat), where 'i' is the position in the list of
                     # features, 'enc' is the corresponding encoded numerical value and 'cat' is the category
                     index_to_var_domain[i, enc, cat] = pyo.Binary
-            elif feat.is_int():
+            elif isinstance(feat, Integer):
                 index_to_var_domain[i] = pyo.Integers
                 index_to_var_bounds[i] = (feat.lb, feat.ub)
-            elif feat.is_bin():
+            elif isinstance(feat, Binary):
                 index_to_var_domain[i] = pyo.Binary
 
         # Build Pyomo index set from dictionary keys
@@ -352,11 +478,11 @@ def i_to_bounds(model, i):
         return model
 
     def __str__(self):
-        out_str = list(["\nPROBLEM SUMMARY"])
+        out_str = ["\nPROBLEM SUMMARY"]
         out_str.append(len(out_str[-1][:-1]) * "-")
         out_str.append("features:")
         for feat in self.feat_list:
-            if feat.is_cat():
+            if isinstance(feat, Categorical):
                 out_str.append(
                     f"{feat.name} :: {feat.__class__.__name__} :: {feat.cat_list} "
                 )
@@ -371,104 +497,3 @@ def __str__(self):
         return "\n".join(out_str)
 
 
-class FeatureType:
-    def get_enc_bnds(self):
-        return (self.lb, self.ub)
-
-    def is_real(self):
-        return False
-
-    def is_cat(self):
-        return False
-
-    def is_int(self):
-        return False
-
-    def is_bin(self):
-        return False
-
-    def encode(self, xi):
-        return xi
-
-    def decode(self, xi):
-        return xi
-
-
-class Real(FeatureType):
-    def __init__(self, lb, ub, name):
-        self.lb = lb
-        self.ub = ub
-        self.name = name
-
-    def is_real(self):
-        return True
-
-
-class Categorical(FeatureType):
-    def __init__(self, cat_list, name):
-        self._cat_list = cat_list
-        self.name = name
-
-        # encode categories
-        self._enc2str, self._str2enc = {}, {}
-        self._enc_cat_list = []
-        for enc, cat in enumerate(cat_list):
-            self._enc_cat_list.append(enc)
-            self._enc2str[enc] = cat
-            self._str2enc[cat] = enc
-
-    def get_enc_bnds(self):
-        return self._enc_cat_list
-
-    @property
-    def cat_list(self):
-        return self._cat_list
-
-    @property
-    def enc_cat_list(self):
-        return self._enc_cat_list
-
-    def encode(self, xi):
-        return self._str2enc[xi]
-
-    def decode(self, xi):
-        return self._enc2str[xi]
-
-    def is_cat(self):
-        return True
-
-
-class Integer(FeatureType):
-    def __init__(self, lb, ub, name):
-        self.lb = lb
-        self.ub = ub
-        self.name = name
-
-    def is_int(self):
-        return True
-
-    def decode(self, xi):
-        return int(xi)
-
-
-class Binary(FeatureType):
-    def __init__(self, name):
-        self.lb = 0
-        self.ub = 1
-        self.name = name
-
-    def decode(self, xi):
-        return abs(int(xi))
-
-    def is_bin(self):
-        return True
-
-class Objective:
-    def __init__(self, name):
-        self.name = name
-
-class MinObjective(Objective):
-    sign = 1
-
-class MaxObjective(Objective):
-    sign = -1
\ No newline at end of file
diff --git a/entmoot/utils.py b/entmoot/utils.py
index c23e925..8f51520 100644
--- a/entmoot/utils.py
+++ b/entmoot/utils.py
@@ -1,8 +1,9 @@
 from collections import namedtuple
+from typing import Optional
+
 import numpy as np
 from scipy.special import comb
 
-
 OptResult = namedtuple(
     "OptResult",
     ["opt_point", "opt_val", "mu_unscaled", "unc_unscaled", "active_leaf_enc"],
@@ -63,7 +64,7 @@ def grid(dimension: int, levels: int) -> np.ndarray:
     return out / n
 
 
-def sample(dimension: int, n_samples: int = 1, rng: np.random.Generator = None) -> np.ndarray:
+def sample(dimension: int, n_samples: int = 1, rng: Optional[np.random.Generator] = None) -> np.ndarray:
     """Sample uniformly from the unit simplex.
 
     Args:
@@ -73,6 +74,7 @@ def sample(dimension: int, n_samples: int = 1, rng: np.random.Generator = None)
     Returns:
         array, shape=(n_samples, dimesnion): Random samples from the unit simplex.
     """
-    if rng is None: rng = np.random.default_rng()
+    if rng is None: 
+        rng = np.random.default_rng()
     s = rng.standard_exponential((n_samples, dimension))
     return (s.T / s.sum(axis=1)).T
diff --git a/pyproject.toml b/pyproject.toml
index eeca066..f995ca4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -2,4 +2,13 @@
 markers = [
     "pipeline_test: marks tests as test which can be run in the pipeline (deselect with '-m \"not pipeline_test\"')",
     "consistent: marks tests that check consistency of results, i.e. may vary for different machines (deselect with '-m \"not consistent\"')"
-]
\ No newline at end of file
+]
+
+[tool.ruff.lint]
+select = ["E4", "E7", "E9", "F", "I001"]
+ignore = ["E721", "E731", "F722", "F821"]
+ignore-init-module-imports = true
+
+[tool.ruff.lint.per-file-ignores]
+"__init__.py" = ["F401"]
+"conf.py" = ["F401", "I001"]
\ No newline at end of file
diff --git a/requirements-dev.txt b/requirements-dev.txt
index c4a32d1..a95d6f2 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,6 +1,6 @@
 numpy<2.0.0
 lightgbm>=4.0.0
-pyomo
+pyomo==6.7.0
 gurobipy
 pytest-cov
 IPython
diff --git a/requirements.txt b/requirements.txt
index ad67895..3d8733d 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,4 @@
 numpy<2.0.0
 lightgbm>=4.0.0
 gurobipy
-pyomo
\ No newline at end of file
+pyomo==6.7.0
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 6f8d791..16096ec 100644
--- a/setup.py
+++ b/setup.py
@@ -20,12 +20,12 @@
         "numpy<=2.0.0",
         "lightgbm>=4.0.0",
         "gurobipy",
-        "pyomo"
+        "pyomo==6.7.0"
     ],
     setup_requires=[
         "numpy<=2.0.0",
         "lightgbm>=4.0.0",
         "gurobipy",
-        "pyomo"
+        "pyomo==6.7.0"
     ],
 )
diff --git a/tests/test_curr.py b/tests/test_curr.py
index 35da508..3a4a359 100644
--- a/tests/test_curr.py
+++ b/tests/test_curr.py
@@ -33,7 +33,7 @@ def test_core_model_copy():
     assert len(core_model_pyomo._all_feat) == len(core_model_pyomo_copy._all_feat)
 
 
-# @pytest.mark.pipeline_test
+@pytest.mark.pipeline_test
 def test_multiobj_constraints():
     # define problem
     problem_config = ProblemConfig(rnd_seed=73)
@@ -99,7 +99,7 @@ def test_multiobj_constraints():
     assert round(x_opt, 5) == round(y_opt, 5) and round(y_opt, 5) == round(z_opt, 5)
 
 
-# @pytest.mark.pipeline_test
+@pytest.mark.pipeline_test
 def test_simple_test():
     def my_func(x: float) -> float:
         return x**2 + 1 + random.uniform(-0.2, 0.2)
@@ -173,7 +173,7 @@ def test_compare_pyomo_gurobipy_multiobj():
             assert math.isclose(res_gur.opt_val, res_pyo.opt_val, abs_tol=0.01)
 
 
-# @pytest.mark.pipeline_test
+@pytest.mark.pipeline_test
 def test_compare_pyomo_gurobipy_singleobj():
     """
     Ensures for a single objective example with l1  and l2 uncertainty metric and mixed feature types that optimization