Skip to content
Snippets Groups Projects
Commit 0a2026c4 authored by edupin's avatar edupin
Browse files

Add conda initialisation and install plugin outside dockerfile

parent f65df6a4
No related branches found
No related tags found
No related merge requests found
auto_activate_base: false
envs_dirs:
- /projects/.conda/envs
......@@ -36,53 +36,40 @@ RUN sudo apt-get clean
RUN apt-get -y install sudo wget
RUN wget https://repo.anaconda.com/archive/Anaconda3-2020.02-Linux-x86_64.sh
RUN chmod +x Anaconda3-2020.02-Linux-x86_64.sh
RUN wget https://repo.anaconda.com/miniconda/Miniconda3-py37_4.9.2-Linux-x86_64.sh
RUN chmod +x Miniconda3-py37_4.9.2-Linux-x86_64.sh
RUN apt-get install -y --no-install-recommends rsync software-properties-common python3-lxml python3-pip python3-dev gdal-bin libgdal-dev octave && \
apt-get install -y libgl1-mesa-glx libegl1-mesa libxrandr2 libxrandr2 libxss1 libxcursor1 libxcomposite1 libasound2 libxi6 libxtst6 && \
pip3 install scipy && \
apt-get -y install sudo wget unzip sqlite3 libsqlite3-dev && \
pip3 install matplotlib && \
pip3 install property && \
pip install pillow && \
#RUN apt-get install -y --no-install-recommends rsync software-properties-common python3-lxml python3-pip python3-dev gdal-bin libgdal-dev octave && \
# apt-get install -y libgl1-mesa-glx libegl1-mesa libxrandr2 libxrandr2 libxss1 libxcursor1 libxcomposite1 libasound2 libxi6 libxtst6 && \
RUN apt-get -y install sudo wget unzip sqlite3 libsqlite3-dev && \
pip install -U scikit-learn && \
pip install pandas && \
pip install --user fiona && \
pip install Shapely && \
pip install opencv-python && \
pip install octave_kernel && \
#apt-get -y install doxygen && \
apt-get -y install jq && \
pip3 install --upgrade pip && \
apt-get -y install gcc && \
apt-get -y install g++ && \
add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable && \
apt-get install gdal-bin && \
apt-get install libgdal-dev && \
pip install requests && \
# add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable && \
# apt-get install gdal-bin && \
# apt-get install libgdal-dev && \
pip install OWSLib && \
apt-get install -y unzip git curl && \
bash Anaconda3-2020.02-Linux-x86_64.sh -b -p $HOME/miniconda && \
export PATH="$HOME/miniconda/bin:$PATH" && \
rm Anaconda3-2020.02-Linux-x86_64.sh && \
bash Miniconda3-py37_4.9.2-Linux-x86_64.sh -b -p $HOME/miniconda3 && \
export PATH="$HOME/miniconda3/bin:$PATH" && \
rm Miniconda3-py37_4.9.2-Linux-x86_64.sh && \
git clone https://github.com/geopandas/geopandas.git && \
cd geopandas && \
cd geopandas && \
pip install .
#RUN $HOME/miniconda3/bin/conda init
#RUN export CPLUS_INCLUDE_PATH=/usr/include/gdal
#RUN export C_INCLUDE_PATH=/usr/include/gdal
RUN pip install --global-option=build_ext --global-option="-I/usr/include/gdal" GDAL==`gdal-config --version`
RUN pip3 install namedlist==1.7 && \
pip install scikit-image && \
pip install equi7grid==0.0.10 && \
pip install numpydoc==0.8.0 && \
pip install packaging==19.0 && \
pip install pyproj==2.1.1 && \
pip install pytileproj
#RUN pip install --global-option=build_ext --global-option="-I/usr/include/gdal" GDAL==`gdal-config --version`
# installation of GDL
RUN apt -y install gnudatalanguage
......@@ -100,25 +87,43 @@ COPY RestClient.py /usr/bmap/RestClient.py
COPY quicklook_raster.py /usr/bmap/quicklook_raster.py
COPY ingestData.py /usr/bmap/ingestData.py
COPY ingestData.sh /usr/bmap/ingestData.sh
COPY OWSLib /usr/modules/OWSLib
COPY maap-s3.py /usr/bmap/maap-s3.py
COPY .condarc /usr/bmap/.condarc
COPY installLib.sh /usr/bmap/.installLib.sh
RUN chmod a+rwx /usr/bmap/
RUN chmod +x /usr/modules/OWSLib
RUN chmod +x /usr/bmap/initTemplate.sh
RUN chmod +x /usr/bmap/shareAlgorithm.sh
RUN chmod +x /usr/bmap/ingestData.sh
RUN chmod +x /usr/bmap/maap-s3.py
ENV PATH="/usr/bmap/:${PATH}"
ENV PATH="/home/user/miniconda3/bin:/usr/bmap/:${PATH}"
ENV PYTHONPATH="/usr/bmap/:${PYTHONPATH}"
ENV PATH="/usr/modules/:${PATH}"
ENV PYTHONPATH="/usr/modules/:${PYTHONPATH}"
#We add env variable to request the back end
ENV BMAP_BACKEND_URL=http://backend-val.biomass-maap.com/bmap-web/
RUN $HOME/miniconda3/bin/conda init && \
$HOME/miniconda3/bin/conda update -n base -c defaults conda -y
RUN conda config --set auto_activate_base false && \
echo "echo 'wait, we create conda environment...'" >> $HOME/.bashrc && \
touch $HOME/.bashrc && \
echo "cp /usr/bmap/.condarc /home/user/miniconda3/" >> $HOME/.bashrc && \
echo "cp /usr/bmap/.installLib.sh /projects" >> $HOME/.bashrc && \
echo "if [[ ! -d "/projects/.conda" ]]; then" >> $HOME/.bashrc && \
echo " conda create -n maap python=3.7 -y 2>&1 >/dev/null" >> $HOME/.bashrc && \
echo "fi" >> $HOME/.bashrc && \
echo "conda activate maap 2>&1 >/dev/null" >> $HOME/.bashrc && \
echo "conda install pip -y 2>&1 >/dev/null" >> $HOME/.bashrc && \
echo "echo 'Workspace is ready'" >> $HOME/.bashrc && \
echo "(&>/dev/null ./.installLib.sh &)" >> $HOME/.bashrc && \
echo "cd /projects >& /dev/null" >> $HOME/.bashrc && \
echo "export PATH=".conda/envs/maap/bin:/usr/bmap/:${PATH}"" >> $HOME/.bashrc
#RUN $HOME/miniconda3/bin/conda init
#USER user
USER 1001
......
#zip -r Project_template.zip Project_template
docker build -t kosted/bmap-stack:0.2 .
docker run --rm kosted/bmap-stack:0.2 pip freeze
docker push kosted/bmap-stack:0.2
export VERSION=0.3
docker build -t kosted/bmap-stack:$VERSION .
docker run --rm kosted/bmap-stack:$VERSION pip freeze
docker push kosted/bmap-stack:$VERSION
#docker tag kosted/bmap-stack:$VERSION registry.eu-west-0.prod-cloud-ocb.orange-business.com/cloud-biomass-maap/bmap-stack:$VERSION
#docker push registry.eu-west-0.prod-cloud-ocb.orange-business.com/cloud-biomass-maap/bmap-stack:$VERSION
wget -O .requirements.txt https://repo.uat.maap-project.org/esa-common/python-librairies/-/raw/master/requirements.txt -q
/projects/.conda/envs/maap/bin/pip install -r .requirements.txt -q
rm .requirements.txt
......@@ -14,17 +14,26 @@ import getopt
MAAP_ENV_TYPE = os.getenv("MAAP_ENV_TYPE")
CLIENT_ID = os.getenv("CLIENT_ID")
BEARER=""
USER_INFO_FILE_PATH="/usr/bmap/maap-s3-userinfo.json"
#if windows we take the current folder
if sys.platform == 'win32':
USER_INFO_FILE_PATH=os.getcwd()+"\maap-s3-userinfo.json"
USER_LAST_UPLOAD_INFO_FILE_PATH=os.getcwd()+"\maap-s3-multipartinfo.json"
else :
USER_INFO_FILE_PATH="/usr/bmap/maap-s3-userinfo.json"
USER_LAST_UPLOAD_INFO_FILE_PATH="/usr/bmap/maap-s3-multipartinfo.json"
userinfo = {}
multipartinfo = {}
def display_help():
print('Usage: [option...] {-f|-u|-d|-l|-r}')
print('Usage: [option...] {upload|download|list|delete|refresh|resume}')
#print('-i Get a fresh token before any request. It ask for email and password')
print('-u myFile.tiff locally path/myFile.tiff in the S3 Upload data in the S3')
print('-d path/in/S3/file.tiff myFileName.tiff Download a data from the S3')
print('-l folder/path List data in a subfolder')
print('-r path/in/S3/file.tiff Delete an existing data on S3')
print('-f refresh credentials and password')
print('upload myFile.tiff locally path/myFile.tiff in the S3 Upload data in the S3')
print('download myFileName.tiff path/in/S3/file.tiff Download a data from the S3')
print('list folder/path List data in a subfolder')
print('delete path/in/S3/file.tiff Delete an existing data on S3')
print('refresh Refresh credentials and password')
print('resume Resume last interrupted multipart upload')
sys.exit(2)
......@@ -32,6 +41,8 @@ def display_help():
# Init the bearer #
#########################
def init():
if os.path.isfile(USER_INFO_FILE_PATH):
print("[INFO] Personal user info is find")
#Check if the file is created less than one hour
......@@ -206,7 +217,7 @@ def upload_multipart(sourceFile, destination):
listPresignedUrl = str1.replace('"','').split(",")
# we load the data
print(listPresignedUrl)
#print(listPresignedUrl)
parts = []
#sys.stdout = open("log.txt", "w")
......@@ -224,6 +235,18 @@ def upload_multipart(sourceFile, destination):
parts.append({'eTag': etag, 'partNumber': int(i+1)})
print(parts)
i = i+1
#We save also the multipart
#So we can resume it if upload failed
multipartinfo = {
'uploadId': uploadId,
'partsUpploaded': parts,
'sourceFile': sourceFile,
'destination': destination
}
#add the json in the file
with open(USER_LAST_UPLOAD_INFO_FILE_PATH, 'w') as outfile:
json.dump(multipartinfo, outfile)
#sys.stdout.close()
......@@ -231,7 +254,109 @@ def upload_multipart(sourceFile, destination):
url = "https://gravitee-gateway."+MAAP_ENV_TYPE.lower()+".esa-maap.org/s3/completeMultiPartUploadRequest"
params={'bucketName': 'bmap-catalogue-data', 'objectKey': key, 'nbParts': nbParts, 'uploadId': uploadId}
response = requests.get(url, data=str(parts), params = params, headers = {'Authorization': 'Bearer '+token})
#delete the file of multipart info because upload was success
os.remove(USER_LAST_UPLOAD_INFO_FILE_PATH)
###################################
# Resume failed multi part upload #
###################################
def resume():
print("[INFO] Resume the last multipart upload")
print("[INFO] Check last multipart upload metadata")
if os.path.isfile(USER_LAST_UPLOAD_INFO_FILE_PATH):
#Generate or get a token
init()
print("[INFO] Previous multi part upload file found")
token=''
#Get the token
with open(USER_INFO_FILE_PATH) as json_file:
userinfo = json.load(json_file)
#Get the info
token=userinfo['token']
#Get the data in the json file
with open(USER_LAST_UPLOAD_INFO_FILE_PATH) as json_file:
multipartinfo = json.load(json_file)
#Get the info
uploadId=multipartinfo['uploadId']
destination=multipartinfo['destination']
sourceFile=multipartinfo['sourceFile']
partsUpploaded=multipartinfo['partsUpploaded']
#We get the presigned url
fileSize = os.stat(sourceFile).st_size
print("Size "+ str(fileSize))
#Set max to split to 5M
max_size = 5 * 1024 * 1024 # Approach 1: Assign the size
nbParts = math.ceil(fileSize/max_size) #calculate nbParts
finalPart = nbParts - len(partsUpploaded)
print("[INFO] We will have "+ str(nbParts)+" parts minus already uploaded parts. We have to push "+ str(finalPart) +" parts")
#Generate presigned urls
url = "https://gravitee-gateway."+MAAP_ENV_TYPE.lower()+".esa-maap.org/s3/generateListPresignedUrls"
params={'bucketName': 'bmap-catalogue-data', 'objectKey': destination, 'nbParts': finalPart, 'uploadId': uploadId}
response = requests.get(url, params = params, headers = {'Authorization': 'Bearer '+token})
stringList = response.text
str1 = stringList.replace(']','').replace('[','')
listPresignedUrl = str1.replace('"','').split(",")
#Get the json uploaded list
#we iterate over the data and repush
with open(sourceFile, 'rb') as f:
i = 0
presignedUrlIndex = 0
while i < nbParts:
file_data = f.read(max_size)
#We upload only if we have new nn uploaded part
if i > len(partsUpploaded):
print("Upload part "+ str(i))
headers={'Content-Length': str(max_size)}
#print(listPresignedUrl[i])
response = requests.put(listPresignedUrl[presignedUrlIndex], data=file_data, headers=headers)
#print(response.headers)
#print(response.text)
etag = response.headers['ETag']
partsUpploaded.append({'eTag': etag, 'partNumber': int(i+1)})
presignedUrlIndex = presignedUrlIndex + 1
#We save also the multipart
#So we can resume it if upload failed
multipartinfo = {
'uploadId': uploadId,
'partsUpploaded': partsUpploaded,
'sourceFile': sourceFile,
'destination': destination
}
#add the json in the file
with open(USER_LAST_UPLOAD_INFO_FILE_PATH, 'w') as outfile:
json.dump(multipartinfo, outfile)
#We increase the data
i = i+1
#complete the multi part
url = "https://gravitee-gateway."+MAAP_ENV_TYPE.lower()+".esa-maap.org/s3/completeMultiPartUploadRequest"
params={'bucketName': 'bmap-catalogue-data', 'objectKey': key, 'nbParts': nbParts, 'uploadId': uploadId}
response = requests.get(url, data=str(parts), params = params, headers = {'Authorization': 'Bearer '+token})
#delete the file of multipart info because upload was success
os.remove(USER_LAST_UPLOAD_INFO_FILE_PATH)
else:
print("[INFO] Please run upload. There are no upload to be resume")
###################
# Delete the data #
......@@ -250,9 +375,10 @@ def delete(destination):
#call the api to delete the data
#Get the presigned url to delete the data
url = "https://gravitee-gateway."+MAAP_ENV_TYPE.lower()+".esa-maap.org/s3/"+destination
response = requests.delete(url, headers = {'Authorization': 'Bearer '+token}, allow_redirects=False)
url = "http://gravitee-gateway."+MAAP_ENV_TYPE.lower()+".esa-maap.org/s3/"+destination
print(url)
response = requests.delete(url, headers = {'Authorization': 'Bearer '+token}, allow_redirects=False)
location = response.headers['Location']
#We have the
......@@ -293,9 +419,9 @@ def download(path, name):
if location:
#We download the data using the location
print("[INFO] we are about to download the data")
response = requests.get(location)
open(name, 'wb').write(response.content)
download_file(location, name)
#response = requests.get(location)
#open(name, 'wb').write(response.content)
print("[INFO] Download finished")
else:
......@@ -303,6 +429,22 @@ def download(path, name):
##########################
# download file using url #
##########################
def download_file(url, name):
#local_filename = url.split('/')[-1]
# NOTE the stream=True parameter below
with requests.get(url, stream=True) as r:
r.raise_for_status()
with open(name, 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
# If you have chunk encoded response uncomment if
# and set chunk_size parameter to None.
#if chunk:
f.write(chunk)
return name
##########################
# list data in s3 folder #
......@@ -335,52 +477,46 @@ def list(path):
# Store argument variable omitting the script name
argv = sys.argv[1:]
# Initialize result variable
result=0
try:
# Define getopt short and long options
options, args = getopt.getopt(sys.argv[1:], 'u:r:l:d:i:f:h', ['upload=', 'remove=','list=','download=','init=','refresh=','help='])
# Read each option using for loop
for opt, arg in options:
# Calculate the sum if the option is -a or --add
if opt in ('-u', '--upload'):
# Upload a data
if len(argv) != 3:
display_help()
else:
upload(argv[1], argv[2])
# Delete the data
elif opt in ('-r', '--remove'):
# Delete a data
if len(argv) != 2:
display_help()
else:
delete(argv[1])
elif opt in ('-l', '--list'):
# list a folder
if len(argv) != 2:
if len(argv) == 0:
display_help()
else:
if argv[0] == 'resume':
resume()
elif argv[0] == 'refresh':
refresh()
elif argv[0] == 'upload':
# Upload a data
if len(argv) != 3:
display_help()
else:
upload(argv[1], argv[2])
elif argv[0] == 'delete':
# Delete a data
if len(argv) != 2:
display_help()
else:
delete(argv[1])
elif argv[0] == 'download':
# Download a data
if len(argv) != 3:
display_help()
else:
download(argv[1], argv[2])
elif argv[0] == 'list':
# list a folder
if len(argv) != 2:
display_help()
else:
list(argv[1])
elif argv[0] == 'help':
display_help()
else:
list(argv[1])
elif opt in ('-d', '--download'):
# Download a data
if len(argv) != 3:
else:
display_help()
else:
download(argv[1], argv[2])
elif opt in ('-i', '--init'):
# Download a data
init()
elif opt in ('-f', '--refresh'):
# Download a data
refresh()
elif opt in ('-h', '--help'):
# Print the option
display_help()
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment