Import the Required Libraries#

import openeo
import rasterio
from rasterio.plot import show
import matplotlib.pyplot as plt
import matplotlib.colors
import matplotlib.patches as mpatches
import geopandas as gpd
import json
import folium
import scipy
import numpy as np

Connect to the Cloud Server#

connection = openeo.connect("openeo.dataspace.copernicus.eu").authenticate_oidc()
Authenticated using refresh token.

Define Area of Interest#

def read_json(filename: str) -> dict:
    with open(filename) as input:
        field = json.load(input)
    return field


pre_date = ["2018-03-03", "2018-09-03"]
post_date = ["2018-09-09", "2018-12-09"]
aoi = read_json("aoi/aoi.geojson")
m = folium.Map([42.75, 141.96], zoom_start=11)
folium.GeoJson(aoi).add_to(m)

tile = folium.TileLayer(
      tiles = 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}',
      attr = 'Esri',
      name = 'Esri Satellite',
      overlay = False,
      control = True
      ).add_to(m)
m
Make this Notebook Trusted to load map: File -> Trust Notebook
# define a function to identify cloud-free pixels in the available data-cube

def getBAP(scl, data, reducer="first"):
    mask = (scl == 3) | (scl == 8) | (scl == 9) | (scl == 10)

    # mask is a bit noisy, so we apply smoothening
    # 2D gaussian kernel
    g = scipy.signal.windows.gaussian(11, std=1.6)
    kernel = np.outer(g, g)
    kernel = kernel / kernel.sum()

    # Morphological dilation of mask: convolution + threshold
    mask = mask.apply_kernel(kernel)
    mask = mask > 0.1

    data_masked = data.mask(mask)

    # now select Best Available Pixel based on the mask
    return data_masked.reduce_dimension(reducer=reducer, dimension="t")
# load S2 pre-collection
s2pre = connection.load_collection(
    "SENTINEL2_L2A",
    temporal_extent=pre_date,
    spatial_extent=aoi,
    bands=["B04", "B08", "B12"],
    max_cloud_cover=90,
)

s2pre_scl = connection.load_collection(
    "SENTINEL2_L2A",
    temporal_extent=pre_date,
    spatial_extent=aoi,
    bands=["SCL"],
    max_cloud_cover=90,
)

# calculate ndvi
ndvi_pre = s2pre.ndvi()
# Create a Pre-event cloud free mosiac
ndvi_pre = getBAP(s2pre_scl, ndvi_pre, reducer="median")

Post Event Mode (For Scientific Purpose)#

# load S2 post collection
s2post = connection.load_collection(
    "SENTINEL2_L2A",
    temporal_extent=post_date,
    spatial_extent=aoi,
    bands=["B04", "B08", "B12"],
    max_cloud_cover=90,
)

s2post_scl = connection.load_collection(
    "SENTINEL2_L2A",
    temporal_extent=post_date,
    spatial_extent=aoi,
    bands=["SCL"],
    max_cloud_cover=90,
)

# calculate post ndvi mosaic
ndvi_post = s2post.ndvi()
ndvi_post = getBAP(s2post_scl, ndvi_post, reducer="median")
ndvi_post.download("Result/NDVIDiff_post.tiff"
)

ndvi_pre.download("Result/NDVIDiff_pre.tiff"
)
# calculate difference in NDVI
diff = (ndvi_pre - ndvi_post)/ndvi_pre
# lets execute the process
diff.execute_batch(
    title="Landslides in Japan Mosiac",
    outputfile="Result/NDVIDiff.tiff"
)
0:00:00 Job 'j-2411148b64304a90bc89dfa7f770d361': send 'start'
0:00:14 Job 'j-2411148b64304a90bc89dfa7f770d361': created (progress 0%)
0:00:20 Job 'j-2411148b64304a90bc89dfa7f770d361': created (progress 0%)
0:00:26 Job 'j-2411148b64304a90bc89dfa7f770d361': created (progress 0%)
0:00:35 Job 'j-2411148b64304a90bc89dfa7f770d361': created (progress 0%)
0:00:44 Job 'j-2411148b64304a90bc89dfa7f770d361': created (progress 0%)
0:00:57 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:01:12 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:01:32 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:01:56 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:02:26 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:03:04 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:03:51 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:04:49 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:05:49 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:06:49 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:07:50 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:08:50 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:09:50 Job 'j-2411148b64304a90bc89dfa7f770d361': running (progress N/A)
0:10:59 Job 'j-2411148b64304a90bc89dfa7f770d361': finished (progress 100%)

Now you can also download the DEM (Separately) to do some post processing#

dem = connection.load_collection(
    "COPERNICUS_30",
    spatial_extent=aoi,
    bands=["DEM"],
)
dem.download("Result/DEM.tiff")