Google Earth Engine – Fixing ‘Image.reduceRegions: Computed Value is Too Large (Error Code: 3)’

extractgoogle-earth-enginegoogle-earth-engine-javascript-apireduceregion

I'm trying to extract about 100000 datapoints with several bands in composite using Image.reduceRegions but I'm getting this error when I tried to export as CSV:

Error: Image.reduceRegions: Computed value is too large. (Error code: 3)

Does anyone know how to solve this problem?
My sample code:

var  afri = 
    /* color: #00ff00 */
    /* displayProperties: [
      {
        "type": "rectangle"
      }
    ] */
    ee.Geometry.Polygon(
        [[[10.752981103928594, 0.7627966289396413],
          [10.752981103928594, -0.7615114430026542],
          [13.104055322678594, -0.7615114430026542],
          [13.104055322678594, 0.7627966289396413]]], null, false);
          
          
var S1composite = ee.ImageCollection('COPERNICUS/S1_GRD_FLOAT')
    .filter(ee.Filter.and(
      ee.Filter.bounds(afri),
      ee.Filter.date('2019-01-01', '2021-01-01'),
      ee.Filter.eq('instrumentMode', 'IW'),
      ee.Filter.listContains('transmitterReceiverPolarisation', 'VV'),
      ee.Filter.listContains('transmitterReceiverPolarisation', 'VH')
    ))
    .map(function (image) {
      return maskBorder(
      
          toGamma0(image)
       
      ).select(['VV', 'VH']);
    })

  .map(function (img) {
    return img.addBands(img.select(['VV', 'VH']).log10().multiply(10).rename('VVdb','VHdb'));
    })
    .map(function(img) {
    var rvi = img.expression(
    '(4*VHdb)/ (VVdb+VHdb)', {
      'VHdb': img.select('VHdb'),
      'VVdb': img.select('VVdb')
  }).rename('RVI');
    return img.addBands(rvi,null,true);
  })
  .map(function(img) {
    return img.addBands(
      img.select('VVdb').divide(img.select('VHdb')).rename('ratio_VV_VH'))
    })
    .median()
  
    
// Terrain correction
  function toGamma0(image) {
    var gamma0 = image.expression('i/(cos(angle * pi / 180))', {
      'i': image.select(['VV', 'VH']),
      'angle': image.select('angle'),
      'pi': Math.PI
    });
    return image.addBands(gamma0, null, true) ;  
  }
    
  function maskBorder(image) {
    var angle = image.select('angle');
    return image
      .updateMask(
        angle.gt(31).and(angle.lt(45))
      )   ;
  }    
///////////////////////////////////////////////////////////////////////////////////////////////////////
// Load Palsar-2 data.

var Pal2composite = ee.ImageCollection('JAXA/ALOS/PALSAR/YEARLY/SAR')
     .filter(ee.Filter.and(
      ee.Filter.bounds(afri),
      ee.Filter.date('2019-01-01', '2021-01-01')

      ))
    .select(['HV','HH'])
    .map(function (img) {
    return img.addBands(img.select(['HV', 'HH']).pow(2).log10().multiply(10).subtract(83).rename('HVdb','HHdb'));
    })
    .map(function(img) {
    var rvipal = img.expression(
    '(4*HVdb)/ (HHdb+HVdb)', {
      'HVdb': img.select('HVdb'),
      'HHdb': img.select('HHdb')
  }).rename('RVIpal');
    return img.addBands(rvipal,null,true);
  })
  .map(function(img) {
    return img.addBands(
      img.select('HVdb').divide(img.select('HHdb')).rename('ratio_HV_HH'))
    })
  .median()
          
// Load Sentinel-2 spectral reflectance data.
var filter = ee.Filter.and(
  ee.Filter.bounds(afri),
  ee.Filter.date('2019-01-01', '2021-01-01')
  
  )
var S2composite = ee.ImageCollection(
    ee.Join.saveFirst('cloudProbability').apply({
        primary: ee.ImageCollection('COPERNICUS/S2_SR').filter(filter),
        secondary: ee.ImageCollection('COPERNICUS/S2_CLOUD_PROBABILITY').filter(filter),
        condition: ee.Filter.equals({leftField: 'system:index', rightField: 'system:index'})
    })
  ).map(function (image) {
  var cloudFree = ee.Image(image.get('cloudProbability')).lt(30)
  return image.updateMask(cloudFree).divide(10000)
  })
   .select(
      ['B2','B3','B4','B8','B11','B12'],
      ['Blue','Green','Red','NIR','SWIR1','SWIR2'])
   .map(function(image) {
var ndvi = image.expression(
'((NIR - Red) / (NIR + Red))', {
  'NIR': image.select('NIR'),
  'Red': image.select('Red')
}).rename('NDVI');
return image.addBands(ndvi,null,true);
}).median()
var composite = S2composite
  .addBands(S1composite.select(['RVI','VVdb','VHdb','ratio_VV_VH']))
  .addBands(Pal2composite.select(['RVIpal','HVdb','HHdb','ratio_HV_HH']));
var neighborhoods = composite.neighborhoodToArray(ee.Kernel.square(1));
var extracted = neighborhoods.reduceRegions({
  collection: point,
  reducer: ee.Reducer.first(),
  scale: 30,  // meters
  tileScale:16
});

Map.centerObject(afri, 12)
Map.addLayer(point);
print(point.limit(100))
print(point.limit(100))

print(extracted.limit(10));

var extracted_list = extracted.toList(extracted.size());

var extracted_list = extracted_list.slice(0,70000);
var medianBandDict = extracted_list.map(function (ele){
  var id = ee.Feature(ele).id();
  var coor = ee.Feature(ele).geometry().coordinates()
// var IDS = ee.Feature(ele).get('IDS');
  var RVI = ee.Array(ee.Feature(ele).get('RVI')).toList()
            .flatten().reduce(ee.Reducer.median());
  var VVdb = ee.Array(ee.Feature(ele).get('VVdb')).toList()
            .flatten().reduce(ee.Reducer.median());
  var VHdb = ee.Array(ee.Feature(ele).get('VHdb')).toList()
            .flatten().reduce(ee.Reducer.median());
  var ratio_VV_VH = ee.Array(ee.Feature(ele).get('ratio_VV_VH')).toList()
            .flatten().reduce(ee.Reducer.median());
  var RVIpal = ee.Array(ee.Feature(ele).get('RVIpal')).toList()
            .flatten().reduce(ee.Reducer.median());
  var HVdb = ee.Array(ee.Feature(ele).get('HVdb')).toList()
            .flatten().reduce(ee.Reducer.median());
  var HHdb = ee.Array(ee.Feature(ele).get('HHdb')).toList()
            .flatten().reduce(ee.Reducer.median());
  var ratio_HV_HH = ee.Array(ee.Feature(ele).get('ratio_HV_HH')).toList()
            .flatten().reduce(ee.Reducer.median());
  var Blue = ee.Array(ee.Feature(ele).get('Blue')).toList()
            .flatten().reduce(ee.Reducer.median());
  var Green = ee.Array(ee.Feature(ele).get('Green')).toList()
            .flatten().reduce(ee.Reducer.median());
  var NDVI = ee.Array(ee.Feature(ele).get('NDVI')).toList()
            .flatten().reduce(ee.Reducer.median());
  var NIR = ee.Array(ee.Feature(ele).get('NIR')).toList()
            .flatten().reduce(ee.Reducer.median());
  var Red = ee.Array(ee.Feature(ele).get('Red')).toList()
            .flatten().reduce(ee.Reducer.median());
  var SWIR1 = ee.Array(ee.Feature(ele).get('SWIR1')).toList()
            .flatten().reduce(ee.Reducer.median());
  var SWIR2 = ee.Array(ee.Feature(ele).get('SWIR2')).toList()
            .flatten().reduce(ee.Reducer.median());

  return {'id':id,
          //'IDS' : IDS,
          'long': coor.get(0),
          'lat': coor.get(1),          
          'Blue': Blue, 
          'Green ' :Green , 
          'NDVI': NDVI, 
          'NIR': NIR, 
          'Red': Red,
          'SWIR1':SWIR1,
          'SWIR2': SWIR2,
          'RVI':RVI,
          'VVdb':VVdb,
          'VHdb':VHdb,
          'ratio_VV_VH':ratio_VV_VH,
          'RVIpal':RVIpal,
          'HVdb':HVdb,
          'HHdb':HHdb,
          'ratio_HV_HH':ratio_HV_HH,
          };
  
});

print("medianBandDict", medianBandDict);

var myFeatures = ee.FeatureCollection(medianBandDict.map(function(el){
  el = ee.List(el); // cast every element of the list

  return ee.Feature(null, {
    'dictionary': el
  });
}));
Export.table.toDrive({
  collection: myFeatures,
  folder: 'GEE',
  description:'Kernel',
  fileFormat: 'CSV'
});

https://code.earthengine.google.com/9cf6b43a14080feab91cbb97f41ff096

Best Answer

When having a lot of regions, instead of using reduceRegions(), it's often better to map over them and do reduceRegion() for each of them. That way, EE can split up the work better. I still doubt you'll be able to print your medianBandDict, with its 70,000 elements.

var extracted = point.map(function (feature) {
  return ee.Feature(feature.geometry(), neighborhoods.reduceRegion({
    reducer: ee.Reducer.first(), 
    geometry: feature.geometry(), 
    scale: 30
  }))
})

https://code.earthengine.google.com/754a9e14721a4ac72173f7bc02493ff6

Related Question