Google Earth Engine Classification – Supervised Classification from Collocated Sentinel 1 and 2 Images

google-earth-engineimage classification

I was trying to perform a supervised classification of a mangrove forest using collocated sentinel 1 and 2 images using this code. With the help provided by @Xunilk, I finally managed to run the code. However, there is an error, which is:
image: Layer error: Can't encode object: min()

Reduces an image collection by calculating the minimum value of each
pixel across the stack of all matching bands. Bands are matched by
name.

Args: this:collection (ImageCollection): The image collection to
reduce.

Other results seem as expected. I am not understanding the problem!

Best Answer

I checked your code out and it looks now without any problem. I also fixed an issue to export classified image to Google Drive.

// Load Sentinel-1 and Sentinel-2 Image Collections
var sentinel1Collection = ee.ImageCollection('COPERNICUS/S1_GRD');
var sentinel2Collection = ee.ImageCollection('COPERNICUS/S2');

// Filter Sentinel-1 collection for the study area and time range
var sentinel1Filtered = sentinel1Collection
  .filterBounds(studyArea)
  .filterDate('2022-12-01', '2023-12-30');

print(sentinel1Filtered); 

// Filter Sentinel-2 collection for the study area and time range
var sentinel2Filtered = sentinel2Collection
  .filterBounds(studyArea)
  .filterDate('2022-12-01', '2023-12-30')
  .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 5));
  
print(sentinel2Filtered); 

// Define a function to collocate Sentinel-1 and Sentinel-2 images
var collocateImages = function(image) {
  var date = ee.Image(image).date();
  
  // Filter Sentinel-1 collection for the specific date
  var sentinel1Image = sentinel1Filtered
    .filterDate(date, date.advance(1, 'day'))
    .first();
  
  // Collocate the two images using the closest in time principle
  var collocatedImage = ee.Image(image).addBands(sentinel1Image.select(['VV', 'VH']))
                                       .copyProperties(sentinel1Image); // Copy properties from the Sentinel-1 image
  
  return ee.Algorithms.If(sentinel1Image, collocatedImage, 0); 

};

// Map over the Sentinel-2 images and collocate with Sentinel-1
var collocatedImages = ee.ImageCollection(sentinel2Filtered.toList(sentinel2Filtered.size())
                                        .map(collocateImages)
                                        .removeAll([0]));

print(collocatedImages);

// Function to create a composite from a collocated image
var createComposite = function(collocatedImage) {
  // Select bands from Sentinel-2 and Sentinel-1 images
  var s2Bands = collocatedImage.select(['B2', 'B3']); // Select Sentinel-2 bands B2 and B3
  var s1Bands = collocatedImage.select(['VV', 'VH']); // Select Sentinel-1 bands VV and VH
  
  // Calculate the band ratio of VV and VH from Sentinel-1 image
  var vvRatioVH = s1Bands.select('VV').divide(s1Bands.select('VH'));
  
  // Create a composite by blending Sentinel-2 bands, Sentinel-1 band ratio, and VH band
  var composite = ee.Image.cat([s2Bands, vvRatioVH, s1Bands.select('VH')]);
  
  return composite.copyProperties(collocatedImage); // Copy properties from the original collocated image

};

// Map over the collocated images and create composites
var CIcomposites = collocatedImages.map(createComposite);

print(CIcomposites);

Map.addLayer(studyArea);
Map.addLayer(CIcomposites);

//Supervised classification//
// Median composite of Collocated Image bands

var SCcomposite = CIcomposites.median().clip(studyArea);

// Display the input composite.
Map.addLayer(SCcomposite, {}, 'image');

// Create sample featureCollection
var gcps = shallowater.merge(deepwater).merge(mudflats).merge(Barelands).merge(Builtups).merge(Agriculture).merge(Crabfarms).merge(shrimpfarms).merge(trees).merge(shrubs);

// Add a random column and split the GCPs into training and validation set
var gcps = gcps.randomColumn();

// In this hypothetical case we'll use 60% of the points for validation
// It is common to use 70% for training and 30% for validation
// Note that the number of points is very low in this example
var trainingGcps = gcps.filter(ee.Filter.lt('random', 0.6));
var validationGcps = gcps.filter(ee.Filter.gte('random', 0.6));

// Overlay the points on the image to get training data
var training = SCcomposite.sampleRegions({
  collection: trainingGcps,
  properties: ['landcover'],
  scale: 10,
  tileScale: 16
});

// Train a classifier
var classifier = ee.Classifier.libsvm()
.train({
  features: training,
  classProperty: 'landcover',
  inputProperties: SCcomposite.bandNames()
});

// Classify the image
var classified = SCcomposite.classify(classifier);

// Visualize result
var classVis = {
  min: 0,
  max: 9,
  palette: ['aqua', 'blue', 'brown', 'gray', 'red', 'yellow', 'orange', 'maroon', 'green', 'olive',]
};

Map.addLayer(classified, classVis, 'Mongla_LULC');

Export.image.toDrive({
  image: classified,
  description: 'Sundarbans_LULC',
  scale: 10,
  region: studyArea,
  maxPixels: 1e13
});

//************************************************************************** 
// Accuracy Assessment
//************************************************************************** 

// Use classification map to assess accuracy using the validation fraction
// of the overall training set created above.
var test = classified.sampleRegions({
  collection: validationGcps,
  properties: ['landcover'],
  scale: 10,
  tileScale: 16
});

var testConfusionMatrix = test.errorMatrix('landcover', 'classification');
// Printing of confusion matrix may time out. Alternatively, you can export it as CSV
print('Confusion Matrix', testConfusionMatrix);
print('Test Accuracy', testConfusionMatrix.accuracy());

//Area Measurement for multiple class
var areaImage = ee.Image.pixelArea().addBands(
      classified);
 
var areas = areaImage.reduceRegion({
      reducer: ee.Reducer.sum().group({
      groupField: 1,
      groupName: 'class',
    }),
    geometry: studyArea,
    scale: 500,
    maxPixels: 1e10
    }); 
 
print(areas);

var nestedList = ee.List(
  [['a', 'b'], ['c', 'd'], ['e', 'f']]);
print(nestedList); 
// Output: [["a","b"],["c","d"],["e","f"]]
print(nestedList.flatten());
// Output: ["a","b","c","d","e","f"]

var classAreas = ee.List(areas.get('groups'));
 
var classAreaLists = classAreas.map(function(item) {
  var areaDict = ee.Dictionary(item);
  var classNumber = ee.Number(areaDict.get('class')).format();
  var area = ee.Number(
    areaDict.get('sum')).divide(1e6).round();
  return ee.List([classNumber, area]);
});
 
var result = ee.Dictionary(classAreaLists.flatten());
print(result);

Map.centerObject(studyArea, 9);

After running it in the GEE code editor, I got the output as below picture. Notice that the export task also ran successfully.

enter image description here