Here's some python code that does what you want, reading GDAL files that represent data at specific times and writing to a single NetCDF file that is CF-Compliant
#!/usr/bin/env python
'''
Convert a bunch of GDAL readable grids to a NetCDF Time Series.
Here we read a bunch of files that have names like:
/usgs/data0/prism/1890-1899/us_tmin_1895.01
/usgs/data0/prism/1890-1899/us_tmin_1895.02
...
/usgs/data0/prism/1890-1899/us_tmin_1895.12
'''
import numpy as np
import datetime as dt
import os
import gdal
import netCDF4
import re
ds = gdal.Open('/usgs/data0/prism/1890-1899/us_tmin_1895.01')
a = ds.ReadAsArray()
nlat,nlon = np.shape(a)
b = ds.GetGeoTransform() #bbox, interval
lon = np.arange(nlon)*b[1]+b[0]
lat = np.arange(nlat)*b[5]+b[3]
basedate = dt.datetime(1858,11,17,0,0,0)
# create NetCDF file
nco = netCDF4.Dataset('time_series.nc','w',clobber=True)
# chunking is optional, but can improve access a lot:
# (see: http://www.unidata.ucar.edu/blogs/developer/entry/chunking_data_choosing_shapes)
chunk_lon=16
chunk_lat=16
chunk_time=12
# create dimensions, variables and attributes:
nco.createDimension('lon',nlon)
nco.createDimension('lat',nlat)
nco.createDimension('time',None)
timeo = nco.createVariable('time','f4',('time'))
timeo.units = 'days since 1858-11-17 00:00:00'
timeo.standard_name = 'time'
lono = nco.createVariable('lon','f4',('lon'))
lono.units = 'degrees_east'
lono.standard_name = 'longitude'
lato = nco.createVariable('lat','f4',('lat'))
lato.units = 'degrees_north'
lato.standard_name = 'latitude'
# create container variable for CRS: lon/lat WGS84 datum
crso = nco.createVariable('crs','i4')
csro.long_name = 'Lon/Lat Coords in WGS84'
crso.grid_mapping_name='latitude_longitude'
crso.longitude_of_prime_meridian = 0.0
crso.semi_major_axis = 6378137.0
crso.inverse_flattening = 298.257223563
# create short integer variable for temperature data, with chunking
tmno = nco.createVariable('tmn', 'i2', ('time', 'lat', 'lon'),
zlib=True,chunksizes=[chunk_time,chunk_lat,chunk_lon],fill_value=-9999)
tmno.units = 'degC'
tmno.scale_factor = 0.01
tmno.add_offset = 0.00
tmno.long_name = 'minimum monthly temperature'
tmno.standard_name = 'air_temperature'
tmno.grid_mapping = 'crs'
tmno.set_auto_maskandscale(False)
nco.Conventions='CF-1.6'
#write lon,lat
lono[:]=lon
lato[:]=lat
pat = re.compile('us_tmin_[0-9]{4}\.[0-9]{2}')
itime=0
#step through data, writing time and data to NetCDF
for root, dirs, files in os.walk('/usgs/data0/prism/1890-1899/'):
dirs.sort()
files.sort()
for f in files:
if re.match(pat,f):
# read the time values by parsing the filename
year=int(f[8:12])
mon=int(f[13:15])
date=dt.datetime(year,mon,1,0,0,0)
print(date)
dtime=(date-basedate).total_seconds()/86400.
timeo[itime]=dtime
# min temp
tmn_path = os.path.join(root,f)
print(tmn_path)
tmn=gdal.Open(tmn_path)
a=tmn.ReadAsArray() #data
tmno[itime,:,:]=a
itime=itime+1
nco.close()
GDAL and NetCDF4 Python can be a bit of a pain to build, but the good news is that they are part of most scientific python distributions (Python(x,y), Enthought Python Distribution, Anaconda, ...)
Update:
I haven't done polar stereographic in CF-compliant NetCDF yet, but I should look something like this. Here I've assumed that central_meridian
and latitude_of_origin
in GDAL are the same as straight_vertical_longitude_from_pole
and latitude_of_projection_origin
in CF:
#!/usr/bin/env python
'''
Convert a bunch of GDAL readable grids to a NetCDF Time Series.
Here we read a bunch of files that have names like:
/usgs/data0/prism/1890-1899/us_tmin_1895.01
/usgs/data0/prism/1890-1899/us_tmin_1895.02
...
/usgs/data0/prism/1890-1899/us_tmin_1895.12
'''
import numpy as np
import datetime as dt
import os
import gdal
import netCDF4
import re
ds = gdal.Open('/usgs/data0/prism/1890-1899/us_tmin_1895.01')
a = ds.ReadAsArray()
ny,nx = np.shape(a)
b = ds.GetGeoTransform() #bbox, interval
x = np.arange(nx)*b[1]+b[0]
y = np.arange(ny)*b[5]+b[3]
basedate = dt.datetime(1858,11,17,0,0,0)
# create NetCDF file
nco = netCDF4.Dataset('time_series.nc','w',clobber=True)
# chunking is optional, but can improve access a lot:
# (see: http://www.unidata.ucar.edu/blogs/developer/entry/chunking_data_choosing_shapes)
chunk_x=16
chunk_y=16
chunk_time=12
# create dimensions, variables and attributes:
nco.createDimension('x',nx)
nco.createDimension('y',ny)
nco.createDimension('time',None)
timeo = nco.createVariable('time','f4',('time'))
timeo.units = 'days since 1858-11-17 00:00:00'
timeo.standard_name = 'time'
xo = nco.createVariable('x','f4',('x'))
xo.units = 'm'
xo.standard_name = 'projection_x_coordinate'
yo = nco.createVariable('y','f4',('y'))
yo.units = 'm'
yo.standard_name = 'projection_y_coordinate'
# create container variable for CRS: x/y WGS84 datum
crso = nco.createVariable('crs','i4')
crso.grid_mapping_name='polar_stereographic'
crso.straight_vertical_longitude_from_pole = -45.
crso.latitude_of_projection_origin = 70.
crso.scale_factor_at_projection_origin = 1.0
crso.false_easting = 0.0
crso.false_northing = 0.0
crso.semi_major_axis = 6378137.0
crso.inverse_flattening = 298.257223563
# create short integer variable for temperature data, with chunking
tmno = nco.createVariable('tmn', 'i2', ('time', 'y', 'x'),
zlib=True,chunksizes=[chunk_time,chunk_y,chunk_x],fill_value=-9999)
tmno.units = 'degC'
tmno.scale_factor = 0.01
tmno.add_offset = 0.00
tmno.long_name = 'minimum monthly temperature'
tmno.standard_name = 'air_temperature'
tmno.grid_mapping = 'crs'
tmno.set_auto_maskandscale(False)
nco.Conventions='CF-1.6'
#write x,y
xo[:]=x
yo[:]=y
pat = re.compile('us_tmin_[0-9]{4}\.[0-9]{2}')
itime=0
#step through data, writing time and data to NetCDF
for root, dirs, files in os.walk('/usgs/data0/prism/1890-1899/'):
dirs.sort()
files.sort()
for f in files:
if re.match(pat,f):
# read the time values by parsing the filename
year=int(f[8:12])
mon=int(f[13:15])
date=dt.datetime(year,mon,1,0,0,0)
print(date)
dtime=(date-basedate).total_seconds()/86400.
timeo[itime]=dtime
# min temp
tmn_path = os.path.join(root,f)
print(tmn_path)
tmn=gdal.Open(tmn_path)
a=tmn.ReadAsArray() #data
tmno[itime,:,:]=a
itime=itime+1
nco.close()
Here's a world file for you (as per Gene's suggestion). Cut and paste the following into a text editor and name the file the same as your full-sized map but give it the extension '.pgw' instead on '.png':
1.0
0.0
0.0
-1.0
0.0
3231.0
I've made the following assumptions:
- Your map has a resolution (pixel size) of 1m square (i.e. each pixel is a reasonable stride length for a fantasy game humanoid character). If I am wrong, edit lines 1 and 4 accordingly.
- I have assumed that your map is not rotated.
- I have assumed that your LL corner is at 0,0 in your world. If your pixel size is bigger than 1m square then you will need to multiply your Y coordinate (3231.0) accordingly.
- Technically the UL coordinate (last two lines) is the center of your pixel. I have been lazy and set it to the corner. This in theory means that your coordinates would be consistently half a meter out but it's a fantasy world so nobody will know; it is consistent so won't screw up any distant calculations; and it saves a negative value as one of your origin coordinates. Adjust if you are sufficiently worried :)
I would then re-tile your map from the georeferenced full-sized one (this saves you making a world file for each tile)
Now, if you load your fantasy map into a GIS or similar system, you will probably be asked for the Spatial Reference System (SRS). The World file doesn't encapsulate that. It just locates and orients the image file in space relative to a nominal 0,0 origin. Since this is a fantasy map, all you need to do is pretend that it is in the real world somewhere and pick any coordinate system that gives you a flat projection with units in meters (or feet if you prefer but you will may have to adjust the values for resolution in the world file). A couple of suggestions for SRS could be:
- EPSG: 3857 (aka 900913)
- EPSG: 27700 (UK National Grid - the advantage of a national grid system UK or otherwise, is that you mostly stay clear of the poles and the international dateline and can hopefully avoid any weirdness which may result from such a location)
Your choice might not matter at all... on the other hand, you choice of SRS might be influenced by what you intend to do with your KML/CSV file (like you might be making a game with a post-apocalyptic theme but set in Quebec ...with zombies...). If you are using Google Earth or similar as a 'platform' for your game/fantasy world, so long as all your other data are in the same coordinate system, all should be well.
However, I do wonder if you may be better sticking with your PNG file as a text format like KML/CSV will be big. A PNG may be better for an internet-based game (reducing bandwidth). If you have a stand-alone scenario, most programming languages understand image formats well (especially if you call the GDAL libraries) and so I'm not convinced that a text-based format is even necessary here - but that's your choice of course.
Best Answer
Here is an example that does roughly what you ask for. The main parameters are the
geotransform
array that gdal uses to describe a raster location (position, pixel scale, and skew) and theepsg
code of the projection. With that, the following code should properly georeference the raster and specify its projection.I did not test this much, but it seemed to work for me. You would have to make sure the coordinates I put in are correct and probably change the projection and the pixel scale/size. Hope it helps.