How can I correctly display my slope in Google Earth Engine, at the Map.addLayer
function? I don’t quite understand how to set up the min and max values on scale or offset. I want to show increasing, decreasing and no trends.
var addTime = function(image) {
// Scale milliseconds by a large constant to avoid very small slopes
// in the linear regression output.
return image.addBands(image.metadata('system:time_start').divide(1e18));
};
var Zimbabwe = ee.FeatureCollection('ft:1tdSwUL7MVpOauSgRzqVTOwdfy17KDbw-1d9omPw').filterMetadata('Country', 'equals', 'Zimbabwe');
Map.addLayer(Zimbabwe);
// Load several years of CHIRPS ( 2000 to 2017)
var collection = 'UCSB-CHG/CHIRPS/DAILY'; //daily precipitaion
var band = ('precipitation');
//var resolution = 500;
var startDate = '2000';
var endDate = '2017';
var startMonthDay = '-11-01';
var endMonthDay = '-04-30';
var startMonthDay2 = '-01-01';
var endMonthDay2 = '-05-01';
// Choose whether or not to add layers to map view and generate charts
var addToMap = 'TRUE';
var printCharts = 'TRUE';
var collection = ee.ImageCollection(collection).map(addTime);
Map.centerObject(Zimbabwe, 6);
if(addToMap=='TRUE') {
var empty = ee.Image().byte();
var regionBoundary = empty.paint({
featureCollection: Zimbabwe,
width: 3
});
Map.addLayer(regionBoundary, {palette: '252525'});
}
// create a blank image ..read abt add.Band
var imageblank = ee.Image();
var list = [];
for (var i = startDate; i <= endDate; i++) {
var iMod = i - 1;
var collectionDates = collection.filterDate(iMod+startMonthDay, i+endMonthDay);
var collectionMean = collectionDates.sum().clip(Zimbabwe);
var collectionDates2 = collection.filterDate(i+startMonthDay2, i+endMonthDay2);
var collectionMean = collectionDates2.sum().clip(Zimbabwe);
//create blank image and use .addband image to display the image collection
imageblank = imageblank.addBands(collectionMean);
Map.addLayer(collectionMean.select(['precipitation']),{min:400,max:1295,palette:['white','green','blue']}, 'Prec_Mean-before');
}
if(printCharts=='TRUE') {
var timeSeries = ee.ImageCollection(collection).select(band).filterDate(iMod+startMonthDay,i+endMonthDay);
var display = {
title: collection+' '+band+' '+iMod+startMonthDay+' through '+i+endMonthDay+' '+Zimbabwe,
fontSize: 12,
hAxis: {title: 'Year'},
vAxis: {title: band},
trendlines: {0: {color: '#aa3319', visibleInLegend: true}},
series: {0: {color: '#161616'}}};
print(ui.Chart.image.series(timeSeries, Zimbabwe, ee.Reducer.mean(), 1444).setOptions(display));
}
print('imageblank',imageblank);
var linearFit = ee.ImageCollection(imageblank).select(['system:time_start', 'precipitation']).reduce(ee.Reducer.linearFit());
//Display the trend with increasing slopes in green, decreasing in red [bands:..].
Map.addLayer(linearFit,
{min: 0, max: [-1, 8e-5, 1], bands: ['scale', 'offset', 'scale']}, 'fit');
Best Answer
I guess you were trying to follow this example by Google: example
I am not familiar with the results of this regression, but I adapted your code so it works similar to the example.
In your for loop, you ended up with one image and added every years precipitation as a new band. The input of the linearFit you are using in the example asked for an image collection as input with bandnames similar in every image. If you change this lines at the end of your loop, you will return an image collection without the dummy image:
You can then follow the example by Google to calculate and display the linear regression using linearFit:
Link to full code