diff --git a/maap_jupyter/.ipynb_checkpoints/maap_notebook-checkpoint.ipynb b/maap_jupyter/.ipynb_checkpoints/maap_notebook-checkpoint.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..c8afb0defb080c9dad98f3e389ab44e3df67cbb7
--- /dev/null
+++ b/maap_jupyter/.ipynb_checkpoints/maap_notebook-checkpoint.ipynb
@@ -0,0 +1,297 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "id": "1a70970d",
+   "metadata": {},
+   "source": [
+    "# <center> Tree Classification Demo </center>"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "bf3c2a51",
+   "metadata": {},
+   "source": [
+    "<center>Tests for geopandas and gdal<center>"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "c55216cb",
+   "metadata": {},
+   "source": [
+    "## Import Packages"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "id": "4d9291fa",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from datetime import datetime\n",
+    "import json\n",
+    "import time\n",
+    "import configparser\n",
+    "import requests\n",
+    "# On NASA , install with pip install geopandas\n",
+    "# On ESA; use a PolinSAR stack ( geopandas already embedded) \n",
+    "import geopandas as gpd\n",
+    "import pandas as pd\n",
+    "from creodias_finder import query\n",
+    "from shapely.geometry import Polygon,shape\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "711e5ecd",
+   "metadata": {},
+   "source": [
+    "## Define Functions"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "id": "2b694047",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def __get_result_S1( geometry_search, str_plateformeS1, str_product_type, str_sensor_mode, start_date, end_date):\n",
+    "    '''\n",
+    "        Recupere dans un dataframe les produits S1 qui intersectent la zone de recherche\n",
+    "\n",
+    "        :geometry_search: geometrie de la zone de recherche\n",
+    "        :str_plateformeS1: nom de la plateforme\n",
+    "        :str_product_type: nom du product type\n",
+    "        :str_sensor_mode: nom du sensor mode\n",
+    "        :start_date: date debut interval recherche au format datetime\n",
+    "        :end_date: date fin interval recherche au format datetime\n",
+    "\n",
+    "\n",
+    "        return: df_groupby\n",
+    "        :rtype: dataframe\n",
+    "    '''\n",
+    "\n",
+    "    # liste des produits de l'api creodias\n",
+    "    results = query.query(\n",
+    "        str_plateformeS1,\n",
+    "        start_date=start_date,\n",
+    "        end_date=end_date,\n",
+    "        productType=str_product_type,\n",
+    "        sensorMode=str_sensor_mode,\n",
+    "        status=\"ONLINE\",\n",
+    "        geometry=geometry_search\n",
+    "    )\n",
+    "\n",
+    "    # init des listes des parametres a conserver \n",
+    "    list_title = []\n",
+    "    list_date = []\n",
+    "    list_orbit = []\n",
+    "    list_orbitDirection = []\n",
+    "    list_geom = []\n",
+    "    list_url = []\n",
+    "\n",
+    "    # remplissage des listes avec les parametres des produits du results de l'api\n",
+    "    for idproduct, dict_product in results.items():\n",
+    "        list_title.append(dict_product['properties']['title'])\n",
+    "        list_orbit.append(dict_product['properties']['relativeOrbitNumber'])\n",
+    "        list_orbitDirection.append(dict_product['properties']['orbitDirection'])\n",
+    "        list_geom.append(shape(dict_product['geometry']))\n",
+    "        list_url.append(dict_product['properties']['productIdentifier'])\n",
+    "        list_date.append(dict_product['properties']['title'][17:25])\n",
+    "\n",
+    "    # genere le gdf avec les produits s1\n",
+    "    gdf_product = gpd.GeoDataFrame({'title' : list_title, 'date' : list_date, 'orbit' : list_orbit, 'direction' : list_orbitDirection,'geometry' : list_geom, 'url' : list_url}, crs = \"4326\")\n",
+    "    # regroupe les produits qui ont les memes parametres de date, orbit et direction\n",
+    "    df_groupby = gdf_product.groupby(['date', 'orbit', 'direction']).agg({'title' : list, 'url' : list, 'geometry' : list}).reset_index()\n",
+    "\n",
+    "    return df_groupby\n",
+    "\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "2573ab84",
+   "metadata": {},
+   "source": [
+    "## Define the study area or list of tile"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "id": "7e14efe8",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "search_zone = [[4.751930185604931,45.37522170186796],[4.468812157001305,45.24092464324643],[4.018935832120382,44.88158298080388],[3.852035966569818,44.75014456444579],[3.898047329546759,44.56923769773521],[4.038870891160542,44.31329708744024],[4.241954580160842,44.26894961414283],[4.674279531570155,44.26246269728098],[4.882500597010154,44.8652889083827],[4.905098845459158,45.06277528749476],[4.782524828980907,45.368587240900965],[4.751930185604931,45.37522170186796]]\n",
+    "search_poly = Polygon(search_zone)\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "9b8ee178",
+   "metadata": {},
+   "source": [
+    "## Search S1 products"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "id": "f7fd2a1c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# parametres de recherche\n",
+    "str_plateformeS1 = 'Sentinel1'\n",
+    "str_product_type = 'GRD'\n",
+    "str_sensor_mode = 'IW'\n",
+    "start_date = datetime(2022, 6, 3)\n",
+    "end_date = datetime(2023, 1, 31)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "id": "3cb6aa8f",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "query_url http://datahub.creodias.eu/resto/api/collections/Sentinel1/search.json?maxRecords=1000&startDate=2022-06-03T00%3A00%3A00&completionDate=2023-01-31T23%3A59%3A59&geometry=POLYGON+%28%284.751930185604931+45.37522170186796%2C+4.468812157001305+45.24092464324643%2C+4.018935832120382+44.88158298080388%2C+3.852035966569818+44.75014456444579%2C+3.898047329546759+44.56923769773521%2C+4.038870891160542+44.31329708744024%2C+4.241954580160842+44.26894961414283%2C+4.674279531570155+44.26246269728098%2C+4.882500597010154+44.8652889083827%2C+4.905098845459158+45.06277528749476%2C+4.782524828980907+45.368587240900965%2C+4.751930185604931+45.37522170186796%29%29&status=ONLINE&productType=GRD&sensorMode=IW\n",
+      "<Response [200]>\n"
+     ]
+    }
+   ],
+   "source": [
+    "\n",
+    "df_product = __get_result_S1( search_poly, str_plateformeS1, str_product_type, str_sensor_mode, start_date, end_date)\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "id": "568856bd",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "20\n"
+     ]
+    }
+   ],
+   "source": [
+    "df_product\n",
+    "df_product = df_product[df_product['orbit']==37]\n",
+    "df_product\n",
+    "print(len(df_product))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "f85d0273",
+   "metadata": {},
+   "source": [
+    "## Extract list of URL "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "id": "6cfbf9c7",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[\"Sentinel-1/SAR/GRD/2022/06/04/S1A_IW_GRDH_1SDV_20220604T055236_20220604T055301_043509_0531E5_DC1B.SAFE,\"Sentinel-1/SAR/GRD/2022/06/16/S1A_IW_GRDH_1SDV_20220616T055236_20220616T055301_043684_05371A_E51A.SAFE,\"Sentinel-1/SAR/GRD/2022/06/28/S1A_IW_GRDH_1SDV_20220628T055237_20220628T055302_043859_053C5F_3096.SAFE,\"Sentinel-1/SAR/GRD/2022/07/22/S1A_IW_GRDH_1SDV_20220722T055238_20220722T055303_044209_0546C9_677E.SAFE,\"Sentinel-1/SAR/GRD/2022/08/03/S1A_IW_GRDH_1SDV_20220803T055239_20220803T055304_044384_054BF3_4ACD.SAFE,\"Sentinel-1/SAR/GRD/2022/08/15/S1A_IW_GRDH_1SDV_20220815T055240_20220815T055305_044559_05516B_B0D3.SAFE,\"Sentinel-1/SAR/GRD/2022/08/27/S1A_IW_GRDH_1SDV_20220827T055240_20220827T055305_044734_055755_77D7.SAFE,\"Sentinel-1/SAR/GRD/2022/09/08/S1A_IW_GRDH_1SDV_20220908T055241_20220908T055306_044909_055D33_8C27.SAFE,\"Sentinel-1/SAR/GRD/2022/09/20/S1A_IW_GRDH_1SDV_20220920T055241_20220920T055306_045084_056320_8ED8.SAFE,\"Sentinel-1/SAR/GRD/2022/10/02/S1A_IW_GRDH_1SDV_20221002T055242_20221002T055307_045259_0568FD_84DA.SAFE,\"Sentinel-1/SAR/GRD/2022/10/14/S1A_IW_GRDH_1SDV_20221014T055242_20221014T055307_045434_056EDB_346E.SAFE,\"Sentinel-1/SAR/GRD/2022/10/26/S1A_IW_GRDH_1SDV_20221026T055242_20221026T055307_045609_05740C_5826.SAFE,\"Sentinel-1/SAR/GRD/2022/11/07/S1A_IW_GRDH_1SDV_20221107T055241_20221107T055306_045784_0579F9_649C.SAFE,\"Sentinel-1/SAR/GRD/2022/11/19/S1A_IW_GRDH_1SDV_20221119T055241_20221119T055306_045959_057FDC_E997.SAFE,\"Sentinel-1/SAR/GRD/2022/12/01/S1A_IW_GRDH_1SDV_20221201T055240_20221201T055305_046134_0585CE_836F.SAFE,\"Sentinel-1/SAR/GRD/2022/12/13/S1A_IW_GRDH_1SDV_20221213T055240_20221213T055305_046309_058BCE_F90F.SAFE,\"Sentinel-1/SAR/GRD/2022/12/25/S1A_IW_GRDH_1SDV_20221225T055239_20221225T055304_046484_0591C3_7C2C.SAFE,\"Sentinel-1/SAR/GRD/2023/01/06/S1A_IW_GRDH_1SDV_20230106T055239_20230106T055304_046659_0597A7_7ED7.SAFE,\"Sentinel-1/SAR/GRD/2023/01/18/S1A_IW_GRDH_1SDV_20230118T055238_20230118T055303_046834_059D93_5C5F.SAFE,\"Sentinel-1/SAR/GRD/2023/01/30/S1A_IW_GRDH_1SDV_20230130T055238_20230130T055303_047009_05A37A_61B6.SAFE\"]\n"
+     ]
+    }
+   ],
+   "source": [
+    "tmp_list_product_path = []\n",
+    "for prd in df_product['url']:\n",
+    "    hour_1 = int(prd[0].split('/')[-1].split('_')[4].split('T')[1])\n",
+    "    hour_2 = int(prd[1].split('/')[-1].split('_')[4].split('T')[1])\n",
+    "    \n",
+    "    if hour_1 > hour_2:\n",
+    "        tmp_list_product_path.append(prd[0])\n",
+    "    else:\n",
+    "        tmp_list_product_path.append(prd[1])\n",
+    "\n",
+    "list_product_path = []\n",
+    "for path_url in tmp_list_product_path:\n",
+    "    list_product_path.append(path_url[8:])\n",
+    "\n",
+    "prod_payload = \"[\\\"\"+\",\\\"\".join(list_product_path)+\"\\\"]\"\n",
+    "print(prod_payload)\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "id": "0142ecbc",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "## Test Gdal"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 15,
+   "id": "ae1c9c71",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Creating output file that is 4680P x 5400L.\n",
+      "0...10...20...30...40...50...60...70...80...90...100 - done.\n",
+      "\n",
+      "[02/28 16:51:44] [INFO dem.py] Bounds: -156.0 18.8 -154.7 20.3\n",
+      "[02/28 16:51:44] [INFO cop_dem.py] Creating /projects/treeclassificationdemo/driver/L1_MSI_pixel_value_Composite_123.tif\n",
+      "[02/28 16:51:44] [INFO cop_dem.py] Fetching remote tiles...\n",
+      "[02/28 16:51:44] [INFO cop_dem.py] Running GDAL command:\n",
+      "[02/28 16:51:44] [INFO cop_dem.py] gdalwarp /vsicurl/https://raw.githubusercontent.com/scottstanie/sardem/master/sardem/data/cop_global.vrt /projects/treeclassificationdemo/driver/L1_MSI_pixel_value_Composite_123.tif -of GTiff -ot Int16 -te -156 18.8000000000000007 -154.699999999999989 20.3000000000000007 -tr 0.000277777777777777778 0.000277777777777777778 -s_srs \"epsg:4326+3855\" -t_srs \"epsg:4326\" -wo NUM_THREADS=4 -r nearest -wm 5000 -multi\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "!conda run -n maap sardem --bbox -156 18.8 -154.7 20.3 --data-source COP -o \"/projects/treeclassificationdemo/driver/maap_utils/L1_MSI_pixel_value_Composite_123.tif\" --output-format GTiff\n"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Maap",
+   "language": "python",
+   "name": "maap"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.7.7"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/maap_jupyter/maap_notebook.ipynb b/maap_jupyter/maap_notebook.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..c8afb0defb080c9dad98f3e389ab44e3df67cbb7
--- /dev/null
+++ b/maap_jupyter/maap_notebook.ipynb
@@ -0,0 +1,297 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "id": "1a70970d",
+   "metadata": {},
+   "source": [
+    "# <center> Tree Classification Demo </center>"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "bf3c2a51",
+   "metadata": {},
+   "source": [
+    "<center>Tests for geopandas and gdal<center>"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "c55216cb",
+   "metadata": {},
+   "source": [
+    "## Import Packages"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "id": "4d9291fa",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from datetime import datetime\n",
+    "import json\n",
+    "import time\n",
+    "import configparser\n",
+    "import requests\n",
+    "# On NASA , install with pip install geopandas\n",
+    "# On ESA; use a PolinSAR stack ( geopandas already embedded) \n",
+    "import geopandas as gpd\n",
+    "import pandas as pd\n",
+    "from creodias_finder import query\n",
+    "from shapely.geometry import Polygon,shape\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "711e5ecd",
+   "metadata": {},
+   "source": [
+    "## Define Functions"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "id": "2b694047",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def __get_result_S1( geometry_search, str_plateformeS1, str_product_type, str_sensor_mode, start_date, end_date):\n",
+    "    '''\n",
+    "        Recupere dans un dataframe les produits S1 qui intersectent la zone de recherche\n",
+    "\n",
+    "        :geometry_search: geometrie de la zone de recherche\n",
+    "        :str_plateformeS1: nom de la plateforme\n",
+    "        :str_product_type: nom du product type\n",
+    "        :str_sensor_mode: nom du sensor mode\n",
+    "        :start_date: date debut interval recherche au format datetime\n",
+    "        :end_date: date fin interval recherche au format datetime\n",
+    "\n",
+    "\n",
+    "        return: df_groupby\n",
+    "        :rtype: dataframe\n",
+    "    '''\n",
+    "\n",
+    "    # liste des produits de l'api creodias\n",
+    "    results = query.query(\n",
+    "        str_plateformeS1,\n",
+    "        start_date=start_date,\n",
+    "        end_date=end_date,\n",
+    "        productType=str_product_type,\n",
+    "        sensorMode=str_sensor_mode,\n",
+    "        status=\"ONLINE\",\n",
+    "        geometry=geometry_search\n",
+    "    )\n",
+    "\n",
+    "    # init des listes des parametres a conserver \n",
+    "    list_title = []\n",
+    "    list_date = []\n",
+    "    list_orbit = []\n",
+    "    list_orbitDirection = []\n",
+    "    list_geom = []\n",
+    "    list_url = []\n",
+    "\n",
+    "    # remplissage des listes avec les parametres des produits du results de l'api\n",
+    "    for idproduct, dict_product in results.items():\n",
+    "        list_title.append(dict_product['properties']['title'])\n",
+    "        list_orbit.append(dict_product['properties']['relativeOrbitNumber'])\n",
+    "        list_orbitDirection.append(dict_product['properties']['orbitDirection'])\n",
+    "        list_geom.append(shape(dict_product['geometry']))\n",
+    "        list_url.append(dict_product['properties']['productIdentifier'])\n",
+    "        list_date.append(dict_product['properties']['title'][17:25])\n",
+    "\n",
+    "    # genere le gdf avec les produits s1\n",
+    "    gdf_product = gpd.GeoDataFrame({'title' : list_title, 'date' : list_date, 'orbit' : list_orbit, 'direction' : list_orbitDirection,'geometry' : list_geom, 'url' : list_url}, crs = \"4326\")\n",
+    "    # regroupe les produits qui ont les memes parametres de date, orbit et direction\n",
+    "    df_groupby = gdf_product.groupby(['date', 'orbit', 'direction']).agg({'title' : list, 'url' : list, 'geometry' : list}).reset_index()\n",
+    "\n",
+    "    return df_groupby\n",
+    "\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "2573ab84",
+   "metadata": {},
+   "source": [
+    "## Define the study area or list of tile"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "id": "7e14efe8",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "search_zone = [[4.751930185604931,45.37522170186796],[4.468812157001305,45.24092464324643],[4.018935832120382,44.88158298080388],[3.852035966569818,44.75014456444579],[3.898047329546759,44.56923769773521],[4.038870891160542,44.31329708744024],[4.241954580160842,44.26894961414283],[4.674279531570155,44.26246269728098],[4.882500597010154,44.8652889083827],[4.905098845459158,45.06277528749476],[4.782524828980907,45.368587240900965],[4.751930185604931,45.37522170186796]]\n",
+    "search_poly = Polygon(search_zone)\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "9b8ee178",
+   "metadata": {},
+   "source": [
+    "## Search S1 products"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "id": "f7fd2a1c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# parametres de recherche\n",
+    "str_plateformeS1 = 'Sentinel1'\n",
+    "str_product_type = 'GRD'\n",
+    "str_sensor_mode = 'IW'\n",
+    "start_date = datetime(2022, 6, 3)\n",
+    "end_date = datetime(2023, 1, 31)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "id": "3cb6aa8f",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "query_url http://datahub.creodias.eu/resto/api/collections/Sentinel1/search.json?maxRecords=1000&startDate=2022-06-03T00%3A00%3A00&completionDate=2023-01-31T23%3A59%3A59&geometry=POLYGON+%28%284.751930185604931+45.37522170186796%2C+4.468812157001305+45.24092464324643%2C+4.018935832120382+44.88158298080388%2C+3.852035966569818+44.75014456444579%2C+3.898047329546759+44.56923769773521%2C+4.038870891160542+44.31329708744024%2C+4.241954580160842+44.26894961414283%2C+4.674279531570155+44.26246269728098%2C+4.882500597010154+44.8652889083827%2C+4.905098845459158+45.06277528749476%2C+4.782524828980907+45.368587240900965%2C+4.751930185604931+45.37522170186796%29%29&status=ONLINE&productType=GRD&sensorMode=IW\n",
+      "<Response [200]>\n"
+     ]
+    }
+   ],
+   "source": [
+    "\n",
+    "df_product = __get_result_S1( search_poly, str_plateformeS1, str_product_type, str_sensor_mode, start_date, end_date)\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "id": "568856bd",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "20\n"
+     ]
+    }
+   ],
+   "source": [
+    "df_product\n",
+    "df_product = df_product[df_product['orbit']==37]\n",
+    "df_product\n",
+    "print(len(df_product))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "f85d0273",
+   "metadata": {},
+   "source": [
+    "## Extract list of URL "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "id": "6cfbf9c7",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[\"Sentinel-1/SAR/GRD/2022/06/04/S1A_IW_GRDH_1SDV_20220604T055236_20220604T055301_043509_0531E5_DC1B.SAFE,\"Sentinel-1/SAR/GRD/2022/06/16/S1A_IW_GRDH_1SDV_20220616T055236_20220616T055301_043684_05371A_E51A.SAFE,\"Sentinel-1/SAR/GRD/2022/06/28/S1A_IW_GRDH_1SDV_20220628T055237_20220628T055302_043859_053C5F_3096.SAFE,\"Sentinel-1/SAR/GRD/2022/07/22/S1A_IW_GRDH_1SDV_20220722T055238_20220722T055303_044209_0546C9_677E.SAFE,\"Sentinel-1/SAR/GRD/2022/08/03/S1A_IW_GRDH_1SDV_20220803T055239_20220803T055304_044384_054BF3_4ACD.SAFE,\"Sentinel-1/SAR/GRD/2022/08/15/S1A_IW_GRDH_1SDV_20220815T055240_20220815T055305_044559_05516B_B0D3.SAFE,\"Sentinel-1/SAR/GRD/2022/08/27/S1A_IW_GRDH_1SDV_20220827T055240_20220827T055305_044734_055755_77D7.SAFE,\"Sentinel-1/SAR/GRD/2022/09/08/S1A_IW_GRDH_1SDV_20220908T055241_20220908T055306_044909_055D33_8C27.SAFE,\"Sentinel-1/SAR/GRD/2022/09/20/S1A_IW_GRDH_1SDV_20220920T055241_20220920T055306_045084_056320_8ED8.SAFE,\"Sentinel-1/SAR/GRD/2022/10/02/S1A_IW_GRDH_1SDV_20221002T055242_20221002T055307_045259_0568FD_84DA.SAFE,\"Sentinel-1/SAR/GRD/2022/10/14/S1A_IW_GRDH_1SDV_20221014T055242_20221014T055307_045434_056EDB_346E.SAFE,\"Sentinel-1/SAR/GRD/2022/10/26/S1A_IW_GRDH_1SDV_20221026T055242_20221026T055307_045609_05740C_5826.SAFE,\"Sentinel-1/SAR/GRD/2022/11/07/S1A_IW_GRDH_1SDV_20221107T055241_20221107T055306_045784_0579F9_649C.SAFE,\"Sentinel-1/SAR/GRD/2022/11/19/S1A_IW_GRDH_1SDV_20221119T055241_20221119T055306_045959_057FDC_E997.SAFE,\"Sentinel-1/SAR/GRD/2022/12/01/S1A_IW_GRDH_1SDV_20221201T055240_20221201T055305_046134_0585CE_836F.SAFE,\"Sentinel-1/SAR/GRD/2022/12/13/S1A_IW_GRDH_1SDV_20221213T055240_20221213T055305_046309_058BCE_F90F.SAFE,\"Sentinel-1/SAR/GRD/2022/12/25/S1A_IW_GRDH_1SDV_20221225T055239_20221225T055304_046484_0591C3_7C2C.SAFE,\"Sentinel-1/SAR/GRD/2023/01/06/S1A_IW_GRDH_1SDV_20230106T055239_20230106T055304_046659_0597A7_7ED7.SAFE,\"Sentinel-1/SAR/GRD/2023/01/18/S1A_IW_GRDH_1SDV_20230118T055238_20230118T055303_046834_059D93_5C5F.SAFE,\"Sentinel-1/SAR/GRD/2023/01/30/S1A_IW_GRDH_1SDV_20230130T055238_20230130T055303_047009_05A37A_61B6.SAFE\"]\n"
+     ]
+    }
+   ],
+   "source": [
+    "tmp_list_product_path = []\n",
+    "for prd in df_product['url']:\n",
+    "    hour_1 = int(prd[0].split('/')[-1].split('_')[4].split('T')[1])\n",
+    "    hour_2 = int(prd[1].split('/')[-1].split('_')[4].split('T')[1])\n",
+    "    \n",
+    "    if hour_1 > hour_2:\n",
+    "        tmp_list_product_path.append(prd[0])\n",
+    "    else:\n",
+    "        tmp_list_product_path.append(prd[1])\n",
+    "\n",
+    "list_product_path = []\n",
+    "for path_url in tmp_list_product_path:\n",
+    "    list_product_path.append(path_url[8:])\n",
+    "\n",
+    "prod_payload = \"[\\\"\"+\",\\\"\".join(list_product_path)+\"\\\"]\"\n",
+    "print(prod_payload)\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "id": "0142ecbc",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "## Test Gdal"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 15,
+   "id": "ae1c9c71",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Creating output file that is 4680P x 5400L.\n",
+      "0...10...20...30...40...50...60...70...80...90...100 - done.\n",
+      "\n",
+      "[02/28 16:51:44] [INFO dem.py] Bounds: -156.0 18.8 -154.7 20.3\n",
+      "[02/28 16:51:44] [INFO cop_dem.py] Creating /projects/treeclassificationdemo/driver/L1_MSI_pixel_value_Composite_123.tif\n",
+      "[02/28 16:51:44] [INFO cop_dem.py] Fetching remote tiles...\n",
+      "[02/28 16:51:44] [INFO cop_dem.py] Running GDAL command:\n",
+      "[02/28 16:51:44] [INFO cop_dem.py] gdalwarp /vsicurl/https://raw.githubusercontent.com/scottstanie/sardem/master/sardem/data/cop_global.vrt /projects/treeclassificationdemo/driver/L1_MSI_pixel_value_Composite_123.tif -of GTiff -ot Int16 -te -156 18.8000000000000007 -154.699999999999989 20.3000000000000007 -tr 0.000277777777777777778 0.000277777777777777778 -s_srs \"epsg:4326+3855\" -t_srs \"epsg:4326\" -wo NUM_THREADS=4 -r nearest -wm 5000 -multi\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "!conda run -n maap sardem --bbox -156 18.8 -154.7 20.3 --data-source COP -o \"/projects/treeclassificationdemo/driver/maap_utils/L1_MSI_pixel_value_Composite_123.tif\" --output-format GTiff\n"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Maap",
+   "language": "python",
+   "name": "maap"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.7.7"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/maap_jupyter/maap_utils/.ipynb_checkpoints/L1_MSI_pixel_value_Composite_123-checkpoint.tif b/maap_jupyter/maap_utils/.ipynb_checkpoints/L1_MSI_pixel_value_Composite_123-checkpoint.tif
new file mode 100644
index 0000000000000000000000000000000000000000..5447e95147c80263594956abd221e88699450e74
Binary files /dev/null and b/maap_jupyter/maap_utils/.ipynb_checkpoints/L1_MSI_pixel_value_Composite_123-checkpoint.tif differ
diff --git a/maap_jupyter/maap_utils/L1_MSI_pixel_value_Composite_123.tif b/maap_jupyter/maap_utils/L1_MSI_pixel_value_Composite_123.tif
new file mode 100644
index 0000000000000000000000000000000000000000..196d9a657e5be335fd1a8828f1b548908f3b4770
Binary files /dev/null and b/maap_jupyter/maap_utils/L1_MSI_pixel_value_Composite_123.tif differ
diff --git a/maap_jupyter/maap_utils/__pycache__/maap_authenticator.cpython-310.pyc b/maap_jupyter/maap_utils/__pycache__/maap_authenticator.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2a41a6ea83c4aca778f723e844bbc89b51d4924d
Binary files /dev/null and b/maap_jupyter/maap_utils/__pycache__/maap_authenticator.cpython-310.pyc differ
diff --git a/maap_jupyter/maap_utils/__pycache__/maap_wpst.cpython-310.pyc b/maap_jupyter/maap_utils/__pycache__/maap_wpst.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6a3a1eaa8eb27f2ec9c4b2bc40fe32810da25cdd
Binary files /dev/null and b/maap_jupyter/maap_utils/__pycache__/maap_wpst.cpython-310.pyc differ
diff --git a/maap_jupyter/maap_utils/maap_authenticator.py b/maap_jupyter/maap_utils/maap_authenticator.py
new file mode 100644
index 0000000000000000000000000000000000000000..24b94b6a56dd02add5bd109136cf618b439a6f85
--- /dev/null
+++ b/maap_jupyter/maap_utils/maap_authenticator.py
@@ -0,0 +1,140 @@
+import configparser
+import requests
+import json
+import base64
+import hashlib
+import re
+from bs4 import BeautifulSoup
+from urllib.parse import urlparse
+from urllib.parse import parse_qs
+import os
+
+
+class MaapAuthenticator(object):
+
+    def __init__(self,auth_config_path,maap_config_path) -> None:
+
+        config = configparser.ConfigParser()
+        config.read(auth_config_path)
+
+        #Retrieve auth values
+        self.login = config['auth']['email']
+        self.password = config['auth']['password']
+
+        config = configparser.ConfigParser()
+        config.read(maap_config_path)
+
+        #Retrieve maap values
+        self.client_id = config['maap']['client_id']
+        self.url_token = config['maap']['url_token']
+
+
+    
+    def get_esa_token_with_esa_cred(self) -> str:
+
+        response = requests.post(self.url_token, data={'client_id': self.client_id, 'username': self.login, 'password': self.password,
+                                                        "grant_type": "password", "scope": "profile"})
+        data = json.loads(response.text)
+        return data['access_token']
+    
+
+    def get_esa_token_with_nasa_cred(self) -> str:
+        
+        session = requests.Session()
+
+        response = session.get("https://auth.val.esa-maap.org/realms/maap/.well-known/openid-configuration")
+        openid_config = json.loads(response.text)
+
+        response = session.get(openid_config["jwks_uri"])
+        certs = json.loads(response.text)
+
+
+
+        code_verifier = base64.urlsafe_b64encode(os.urandom(40)).decode('utf-8')
+        code_verifier = re.sub('[^a-zA-Z0-9]+', '', code_verifier)
+
+        code_challenge = hashlib.sha256(code_verifier.encode('utf-8')).digest()
+        code_challenge = base64.urlsafe_b64encode(code_challenge).decode('utf-8')
+        code_challenge = code_challenge.replace('=', '')
+
+        response = session.get(openid_config["authorization_endpoint"],
+                                params={"redirect_uri":"https://portal.val.esa-maap.org/portal-val/ESA/home",
+                                        "response_type":"code",
+                                        "client_id":"portal",
+                                        "scope":"openid profile offline_access email",
+                                        "code_challenge_method":"S256",
+                                        "code_challenge":code_challenge})
+
+        soup = BeautifulSoup(response.text, 'html.parser')
+        nasa_broker_url = ""
+        for link in soup.find_all('a'):
+            if 'broker/NASA/' in link.get('href'):
+                nasa_broker_url = link.get('href')
+
+        # Click on NASA in keycloak
+        response = session.get("https://auth.val.esa-maap.org"+nasa_broker_url)
+
+        soup = BeautifulSoup(response.text, 'html.parser')
+
+        redirect_url = ""
+        for link in soup.find_all('a'):
+            if'EarthData' in link.text:
+                redirect_url = link.get("href")
+
+
+        # Click on Earth data in CAS
+        response = session.get("https://auth.maap-project.org/cas/"+redirect_url)
+
+        soup = BeautifulSoup(response.text, 'html.parser')
+
+        authenticity_token=""
+        client_id=""
+        redirect_uri=""
+
+        for tag in soup.find_all("input", type="hidden"):
+            if tag.get("name") == "authenticity_token":
+                authenticity_token = tag.get("value")
+            if tag.get("name") == "client_id":
+                client_id = tag.get("value")
+            if tag.get("name") == "redirect_uri":
+                redirect_uri = tag.get("value")
+    
+        
+
+        data_urs = {
+            "authenticity_token": authenticity_token,
+            "username":self.login,
+            "password": self.password,
+            "client_id": client_id,
+            "redirect_uri":redirect_uri,
+            "response_type":"code",
+            }
+
+        # Click on login in URS
+        response = session.post("https://urs.earthdata.nasa.gov/login",data = data_urs)
+
+
+        soup = BeautifulSoup(response.text, 'html.parser')
+
+        for tag in soup.find_all("a", id="redir_link"):
+            redirect_url = tag.get("href")
+
+        # Follow redirection
+        response = session.get(redirect_url)
+
+        parsed_url  = urlparse(response.history[-1].headers['Location'])
+        code = parse_qs(parsed_url.query)['code'][0]
+
+
+
+        response = session.post("https://auth.val.esa-maap.org/realms/maap/protocol/openid-connect/token",
+                                data={
+                                    "grant_type":"authorization_code",
+                                    "code":code,
+                                    "client_id":"portal",
+                                    "code_verifier":code_verifier,
+                                    "redirect_uri":"https://portal.val.esa-maap.org/portal-val/ESA/home"
+                                
+        })
+
+        return json.loads(response.text)['access_token']
diff --git a/maap_jupyter/maap_utils/maap_wpst.py b/maap_jupyter/maap_utils/maap_wpst.py
new file mode 100644
index 0000000000000000000000000000000000000000..e9d832d6d76a2380437e2f01a746ec35a5795e4e
--- /dev/null
+++ b/maap_jupyter/maap_utils/maap_wpst.py
@@ -0,0 +1,114 @@
+import requests
+import json
+import time
+from typing import List
+
+
+class MaapProcess(object):
+
+    def __init__(self,id:str, title:str) -> None:
+        self.id = id
+        self.title = title
+
+class MaapJob(object):
+
+    def __init__(self,p_id:str, job_id:str) -> None:
+        self.p_id = p_id
+        self.job_id = job_id
+        self.status = "NONE"
+
+
+
+class MaapWPST(object):
+
+    def __init__(self,copa_backend_url: str,oauth_token: str) -> None:
+
+        self.copa_backend_url = copa_backend_url
+        self.oauth_token = oauth_token
+        self.process_list = self.__load_process()
+        
+
+
+    def __load_process(self) -> List[MaapProcess]:
+
+        response = requests.get(self.copa_backend_url+'wpst/processes',headers = {'Authorization': 'Bearer '+ self.oauth_token})
+        response.raise_for_status()
+
+        results = []
+        for process_json in response.json()['processes']:
+            results.append(MaapProcess(process_json['id'],process_json['title']))
+
+        return results
+    
+    
+    def job_status(self,maap_job: MaapJob) -> str:
+
+        response = requests.get(self.copa_backend_url+'wpst/processes/'+maap_job.p_id+'/jobs/'+maap_job.job_id,headers = {'Authorization': 'Bearer '+ self.oauth_token})
+        response.raise_for_status()
+
+        res_json = response.json()
+        if 'status' in res_json:
+            result = res_json['status']
+
+        return result
+
+
+    def launch_process(self,title,inputs) -> MaapJob:
+
+        p_id = None
+        for process in self.process_list:
+            if title == process.title:
+                p_id = process.id
+
+        job_id = None
+
+        if p_id is not None:                             
+            payload = {'inputs':inputs,'outputs':[],'mode':'ASYNC','response':'RAW'}
+            response = requests.post(self.copa_backend_url+'wpst/processes/'+p_id+'/jobs',json=payload,headers = {'Authorization': 'Bearer '+ self.oauth_token})
+            response.raise_for_status()
+
+            res_json = response.json()
+
+            if 'jobId' in res_json:
+                job_id = response.json()['jobId']
+
+            
+        else:
+            print("ERROR : Can not launch job for process :"+title+" !")
+
+        return MaapJob(p_id,job_id)
+    
+    def wait_for_final_status(self, maap_job):
+            
+        job_status = 'RUNNING'
+
+        while job_status not in ['SUCCEEDED','FAILED']:
+
+            response = requests.get(self.copa_backend_url+'wpst/processes/{}/jobs/{}'.format(maap_job.p_id, maap_job.job_id),
+                              headers = {'Authorization': 'Bearer '+ self.oauth_token})
+            job_status = json.loads(response.content).get('status')
+            time.sleep(30)
+
+        maap_job.status = job_status
+
+    
+    def write_outputs(self,maap_job, out_dir):
+
+        if maap_job.status == 'SUCCEEDED':
+            result_proc = requests.get('{}wpst/processes/{}/jobs/{}/result'.format(self.copa_backend_url, maap_job.p_id, maap_job.job_id), 
+                               headers = {'Authorization': 'Bearer '+ self.oauth_token})
+            json_res = json.loads(result_proc.content)
+            
+            if 'outputs' in json_res :
+
+                for out in json_res['outputs']:
+                    name = out['href'].split('?')[0].split('/')[-1]
+ 
+                    r =  requests.get(out['href'])
+                    open(out_dir+'/'+name, 'wb').write(r.content)
+    
+    def delete_job(self,maap_job):
+        requests.delete('{}wpst/processes/{}/jobs/{}'.format(self.copa_backend_url, maap_job.p_id, maap_job.job_id),
+                        headers = {'Authorization': 'Bearer '+ self.oauth_token})
+
+