diff --git a/machine-and-deep-learning/ollama/README.md b/machine-and-deep-learning/ollama/README.md
index 3aea1a243db2e74ec72975fec7f9cd3b3440a595..9854fb0aad8cee493e3bcd0d58ece419542d5cb4 100644
--- a/machine-and-deep-learning/ollama/README.md
+++ b/machine-and-deep-learning/ollama/README.md
@@ -16,14 +16,17 @@ Please find more information to Ollama in the following links:
 
 To demonstrate how to use Ollama with the `ollama-python` library, you first need to create a Python virtual environment. Run the following command **ONCE**:
 ```bash
-# Specify the Ollama root directory, where binaries should be placed and where venv should be created, such as:
+# Specify the Ollama root directory
 export OLLAMA_ROOT_DIR=${HOME}/ollama
-
-# initialize environment variables that refer to installation and virtual environment
+# set further relative path variables
 source set_paths.sh
 
 # create the venv
-zsh create_venv.sh
+module load Python
+mkdir -p ${OLLAMA_ROOT_DIR}
+python -m venv ${OLLAMA_VENV_DIR}
+source ${OLLAMA_VENV_DIR}/bin/activate
+pip install ollama
 ```
 
 ## 1. Running Ollama
@@ -32,7 +35,18 @@ zsh create_venv.sh
 
 ## 1.1. Running Ollama with the official container
 
-Since an Ollama container will be centrally provided on our HPC system **very soon**, you can start using the examples right away, either in your current shell or by submitting a batch job to run them on a backend node:
+An Ollama container will be centrally provided on our HPC system **very soon**. However, for now lets assume we created one with the following command:
+```bash
+# Specify the Ollama root directory
+export OLLAMA_ROOT_DIR=${HOME}/ollama
+# set further relative path variables
+source set_paths.sh
+
+# build Ollama apptainer container
+apptainer build ${OLLAMA_COINTAINER_IMAGE} docker://ollama/ollama
+```
+
+, you can start using the examples right away, either in your current shell or by submitting a batch job to run them on a backend node:
 ```bash
 # run in current active shell
 zsh submit_job_container.sh
@@ -47,14 +61,15 @@ Before beeing able to execute Ollama and run the examples, you need to download
 
 Execute the following instructions **ONCE** to download Ollama:
 ```bash
-# Specify the Ollama root directory, where binaries should be placed and where venv should be created, such as:
+# Specify the Ollama root directory
 export OLLAMA_ROOT_DIR=${HOME}/ollama
-
-# initialize environment variables that refer to installation and virtual environment
+# set further relative path variables
 source set_paths.sh
 
-# download and extract the binariesthe venv
-zsh download_and_extract.sh
+# create required directory and download Ollama binaries
+mkdir -p ${OLLAMA_INSTALL_DIR} && cd ${OLLAMA_INSTALL_DIR}
+curl -L https://ollama.com/download/ollama-linux-amd64.tgz -o ollama-linux-amd64.tgz
+tar -xzf ollama-linux-amd64.tgz
 ```
 
 Now you can execute the examples, either in the current shell or by submitting a batch job that runs the examples on a backend node:
diff --git a/machine-and-deep-learning/ollama/create_venv.sh b/machine-and-deep-learning/ollama/create_venv.sh
deleted file mode 100644
index f37f930ed1c23607da3cc55ff132c41842e1072d..0000000000000000000000000000000000000000
--- a/machine-and-deep-learning/ollama/create_venv.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/zsh
-
-# create required directory
-mkdir -p ${OLLAMA_ROOT_DIR}
-
-# create Python virtual
-module load Python
-python -m venv ${OLLAMA_VENV_DIR}
-# activate the environment
-source ${OLLAMA_VENV_DIR}/bin/activate
-# install the ollama-python library
-pip install ollama
\ No newline at end of file
diff --git a/machine-and-deep-learning/ollama/download_and_extract.sh b/machine-and-deep-learning/ollama/download_and_extract.sh
deleted file mode 100644
index b0d774ed827c50d1a803472533578efe9bd2ef56..0000000000000000000000000000000000000000
--- a/machine-and-deep-learning/ollama/download_and_extract.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/zsh
-
-# create required directory and download Ollama binaries
-mkdir -p ${OLLAMA_INSTALL_DIR} && cd ${OLLAMA_INSTALL_DIR}
-curl -L https://ollama.com/download/ollama-linux-amd64.tgz -o ollama-linux-amd64.tgz
-tar -xzf ollama-linux-amd64.tgz
\ No newline at end of file
diff --git a/machine-and-deep-learning/ollama/set_paths.sh b/machine-and-deep-learning/ollama/set_paths.sh
index 01704559c2f071a63680ac3b12b78c6c55c62929..feca70c6a5732bde07e409f067368d60f494523c 100644
--- a/machine-and-deep-learning/ollama/set_paths.sh
+++ b/machine-and-deep-learning/ollama/set_paths.sh
@@ -7,7 +7,7 @@ export OLLAMA_INSTALL_DIR=${OLLAMA_ROOT_DIR}/install
 export OLLAMA_VENV_DIR=${OLLAMA_ROOT_DIR}/venv_ollama
 
 # path to Ollama container image
-export OLLAMA_COINTAINER_IMAGE=${HOME}/ollama/ollama.sif
+export OLLAMA_COINTAINER_IMAGE=${OLLAMA_ROOT_DIR}/ollama.sif
 
 # extend path to make it executable in the shell
 export PATH="${OLLAMA_INSTALL_DIR}/bin:${PATH}"
\ No newline at end of file
diff --git a/machine-and-deep-learning/ollama/submit_job_container.sh b/machine-and-deep-learning/ollama/submit_job_container.sh
index 89a6e33d109982b847fadc790ef3392450ef37e2..e2d5429c0b8d1b3d60a61cbb18109387000c9452 100644
--- a/machine-and-deep-learning/ollama/submit_job_container.sh
+++ b/machine-and-deep-learning/ollama/submit_job_container.sh
@@ -14,10 +14,9 @@
 ### Load modules or software
 ############################################################
 
-# specify your Ollama root directory
+# Specify the Ollama root directory
 export OLLAMA_ROOT_DIR=${HOME}/ollama
-
-# set dependent paths
+# set further relative path variables
 source set_paths.sh
 
 # load Python and activate venv
diff --git a/machine-and-deep-learning/ollama/submit_job_venv.sh b/machine-and-deep-learning/ollama/submit_job_venv.sh
index ef6521b56c542b69365e2df8cf231fdb462e8acc..b67164a9212cf68fe5c91d71b244213f1589f807 100644
--- a/machine-and-deep-learning/ollama/submit_job_venv.sh
+++ b/machine-and-deep-learning/ollama/submit_job_venv.sh
@@ -14,10 +14,9 @@
 ### Load modules or software
 ############################################################
 
-# specify your Ollama root directory
+# Specify the Ollama root directory
 export OLLAMA_ROOT_DIR=${HOME}/ollama
-
-# set dependent paths
+# set further relative path variables
 source set_paths.sh
 
 # load Python and activate venv