Convert a List into Feature Collection in Google Earth Engine

feature-collectiongoogle-earth-enginelist

I'm trying to convert a list into a Feature Collection in Google Earth Engine in order to export as CSV file. I'm getting this error

FeatureCollection (Error)
Collection, argument 'features': Invalid type.
Expected type: List.
Actual type: List<Dictionary>.

How could I solve this problem?
My sample code is as below

https://code.earthengine.google.com/69b6d4dacc25d1b53cebf69793e2a8cb

// Paracou 
var aoi = 
    /* color: #0b4a8b */
    /* shown: false */
    /* displayProperties: [
      {
        "type": "rectangle"
      },
      {
        "type": "rectangle"
      },
      {
        "type": "rectangle"
      },
      {
        "type": "rectangle"
      }
    ] */
    ee.Geometry.MultiPolygon(
        [[[[-52.6965668016105, 4.103697563685889],
           [-52.6965668016105, 4.028527912014533],
           [-52.66257784897378, 4.028527912014533],
           [-52.66257784897378, 4.103697563685889]]],
         [[[-52.944764020870714, 5.289271129398686],
           [-52.944764020870714, 5.247049874677615],
           [-52.91446578783849, 5.247049874677615],
           [-52.91446578783849, 5.289271129398686]]],
         [[[11.550656910465511, -0.15383260074272592],
           [11.550656910465511, -0.240177754768104],
           [11.645242329166683, -0.240177754768104],
           [11.645242329166683, -0.15383260074272592]]],
         [[[9.85547793421202, -1.8999002299831818],
           [9.85547793421202, -1.9469089457924809],
           [9.892556791633895, -1.9469089457924809],
           [9.892556791633895, -1.8999002299831818]]]], null, false);


// Load Sentinel-2 spectral reflectance data.
var filter = ee.Filter.and(
  ee.Filter.bounds(point),
  ee.Filter.date('2019-01-01', '2020-01-01')
  
  )
var S2composite = ee.ImageCollection(
    ee.Join.saveFirst('cloudProbability').apply({
        primary: ee.ImageCollection('COPERNICUS/S2_SR').filter(filter),
        secondary: ee.ImageCollection('COPERNICUS/S2_CLOUD_PROBABILITY').filter(filter),
        condition: ee.Filter.equals({leftField: 'system:index', rightField: 'system:index'})
    })
  ).map(function (image) {
  var cloudFree = ee.Image(image.get('cloudProbability')).lt(30)
  return image.updateMask(cloudFree).divide(10000)
  })
   .select(
      ['B2','B3','B4','B8','B11','B12'],
      ['Blue','Green','Red','NIR','SWIR1','SWIR2'])
   .map(function(image) {
var ndvi = image.expression(
'((NIR - Red) / (NIR + Red))', {
  'NIR': image.select('NIR'),
  'Red': image.select('Red')
}).rename('NDVI');
return image.addBands(ndvi,null,true);
})
var median = S2composite.median();

var neighborhoods = median.neighborhoodToArray(ee.Kernel.square(1));
var extracted = neighborhoods.reduceRegions({
  collection: point,
  reducer: ee.Reducer.first(),
  scale: 25,  // meters
  tileScale:16
});
Map.centerObject(aoi, 3)
Map.addLayer(point);
print(point.limit(100))
print(extracted.limit(10));

var extracted_list = extracted.toList(extracted.size());

var extracted_list = extracted_list.slice(0,20);
var medianBandDict = ee.FeatureCollection(extracted_list.map(function (ele){
  var id = ee.Feature(ele).id();
  var geometry = ee.Feature(ele).geometry();
  var IDS = ee.Feature(ele).get('IDS');
  
  var blue_median = ee.Array(ee.Feature(ele).get('Blue')).toList()
            .flatten().reduce(ee.Reducer.median());

  var green_median = ee.Array(ee.Feature(ele).get('Green')).toList()
            .flatten().reduce(ee.Reducer.median());

  var ndvi_median = ee.Array(ee.Feature(ele).get('NDVI')).toList()
            .flatten().reduce(ee.Reducer.median());

  var nir_median = ee.Array(ee.Feature(ele).get('NIR')).toList()
            .flatten().reduce(ee.Reducer.median());

  var red_median = ee.Array(ee.Feature(ele).get('Red')).toList()
            .flatten().reduce(ee.Reducer.median());

  var swir1_median = ee.Array(ee.Feature(ele).get('SWIR1')).toList()
            .flatten().reduce(ee.Reducer.median());

  var swir2_median = ee.Array(ee.Feature(ele).get('SWIR2')).toList()
            .flatten().reduce(ee.Reducer.median());
  
  return {'id':id,
          'IDS' : IDS,
          'geometry' :geometry,
          'blue_median': blue_median, 
          'green_median' :green_median, 
          'ndvi_median': ndvi_median, 
          'nir_median': nir_median, 
          'red_median': red_median,
          'swir1_median':swir1_median,
          'swir2_median': swir2_median
          };
  
})).flatten();

print("medianBandDict", medianBandDict);

Export.table.toDrive({
  collection: medianBandDict,
  description:'Kernel',
  fileFormat: 'CSV'
});

Best Answer

Following code can do that for all 6037 points in your feature collection (point 1502 was not included because it has null properties). Complete code in this link.

// Paracou 
var aoi = ee.Geometry.MultiPolygon(
        [[[[-52.6965668016105, 4.103697563685889],
           [-52.6965668016105, 4.028527912014533],
           [-52.66257784897378, 4.028527912014533],
           [-52.66257784897378, 4.103697563685889]]],
         [[[-52.944764020870714, 5.289271129398686],
           [-52.944764020870714, 5.247049874677615],
           [-52.91446578783849, 5.247049874677615],
           [-52.91446578783849, 5.289271129398686]]],
         [[[11.550656910465511, -0.15383260074272592],
           [11.550656910465511, -0.240177754768104],
           [11.645242329166683, -0.240177754768104],
           [11.645242329166683, -0.15383260074272592]]],
         [[[9.85547793421202, -1.8999002299831818],
           [9.85547793421202, -1.9469089457924809],
           [9.892556791633895, -1.9469089457924809],
           [9.892556791633895, -1.8999002299831818]]]], null, false);

// Load Sentinel-2 spectral reflectance data.
var filter = ee.Filter.and(
  ee.Filter.bounds(point),
  ee.Filter.date('2019-01-01', '2020-01-01')
  
  )
var S2composite = ee.ImageCollection(
    ee.Join.saveFirst('cloudProbability').apply({
        primary: ee.ImageCollection('COPERNICUS/S2_SR').filter(filter),
        secondary: ee.ImageCollection('COPERNICUS/S2_CLOUD_PROBABILITY').filter(filter),
        condition: ee.Filter.equals({leftField: 'system:index', rightField: 'system:index'})
    })
  ).map(function (image) {
  var cloudFree = ee.Image(image.get('cloudProbability')).lt(30)
  return image.updateMask(cloudFree).divide(10000)
  })
   .select(
      ['B2','B3','B4','B8','B11','B12'],
      ['Blue','Green','Red','NIR','SWIR1','SWIR2'])
   .map(function(image) {
var ndvi = image.expression(
'((NIR - Red) / (NIR + Red))', {
  'NIR': image.select('NIR'),
  'Red': image.select('Red')
}).rename('NDVI');
return image.addBands(ndvi,null,true);
})
var median = S2composite.median();

var neighborhoods = median.neighborhoodToArray(ee.Kernel.square(1));
var extracted = neighborhoods.reduceRegions({
  collection: point,
  reducer: ee.Reducer.first(),
  scale: 25,  // meters
  tileScale:16
});

Map.centerObject(aoi, 3);
Map.addLayer(point);

var feat1 = extracted.first();

print("coordinates", ee.Feature(feat1).geometry().coordinates());

print("count points", point.size());

print("count extracted", extracted.size());

var extracted_list = extracted.toList(extracted.size());

print(extracted_list);

extracted_list = extracted_list.slice(0, 1502).cat(extracted_list.slice(1503, 6038));

var medianBandDict = extracted_list.map(function (ele){
  
  var id = ee.Feature(ele).id();
  
  var coor = ee.Feature(ele).geometry().coordinates();
  
  var blue_median = ee.Array(ee.Feature(ele).get('Blue')).toList()
            .flatten().reduce(ee.Reducer.median());

  var green_median = ee.Array(ee.Feature(ele).get('Green')).toList()
            .flatten().reduce(ee.Reducer.median());

  var ndvi_median = ee.Array(ee.Feature(ele).get('NDVI')).toList()
            .flatten().reduce(ee.Reducer.median());

  var nir_median = ee.Array(ee.Feature(ele).get('NIR')).toList()
            .flatten().reduce(ee.Reducer.median());

  var red_median = ee.Array(ee.Feature(ele).get('Red')).toList()
            .flatten().reduce(ee.Reducer.median());

  var swir1_median = ee.Array(ee.Feature(ele).get('SWIR1')).toList()
            .flatten().reduce(ee.Reducer.median());

  var swir2_median = ee.Array(ee.Feature(ele).get('SWIR2')).toList()
            .flatten().reduce(ee.Reducer.median());
  
  return {'id':id,
          'long': coor.get(0),
          'lat': coor.get(1),
          'blue_median': blue_median, 
          'green_median' :green_median, 
          'ndvi_median': ndvi_median, 
          'nir_median': nir_median, 
          'red_median': red_median,
          'swir1_median':swir1_median,
          'swir2_median': swir2_median
          };
  
});

print("medianBandDict", medianBandDict);

var myFeatures = ee.FeatureCollection(medianBandDict.map(function(el){
  el = ee.List(el); // cast every element of the list

  return ee.Feature(null, {
    'dictionary': el
  });
}));

Export.table.toDrive({
  collection: myFeatures,
  folder: 'GEE_Folder',
  description:'Kernel',
  fileFormat: 'CSV'
});

In following picture can be observed exported CVS opened with LibreOffice. It was 6037 records. CSV can be edited for a better visualization.

enter image description here