when calculating maxResolution from maxExtent and tileSize we dont want to the actual tile size for single layers
This commit is contained in:
@@ -1010,7 +1010,7 @@ OpenLayers.Layer = OpenLayers.Class({
|
||||
this.maxExtent != null) {
|
||||
// maxResolution for default grid sets assumes that at zoom
|
||||
// level zero, the whole world fits on one tile.
|
||||
var tileSize = this.tileSize || this.map.getTileSize();
|
||||
var tileSize = this.map.getTileSize();
|
||||
maxResolution = Math.max(
|
||||
this.maxExtent.getWidth() / tileSize.w,
|
||||
this.maxExtent.getHeight() / tileSize.h
|
||||
|
||||
Reference in New Issue
Block a user