Merged r[280]:[358] from source:/sandbox/follower/b-singlefile/ (Enable building of Single File Library version of OpenLayers library).
git-svn-id: http://svn.openlayers.org/trunk/openlayers@359 dc9f47b5-9b13-0410-9fdd-eb0c1a62fdaf
This commit is contained in:
9
build/library.cfg
Normal file
9
build/library.cfg
Normal file
@@ -0,0 +1,9 @@
|
||||
[first]
|
||||
Prototype.js
|
||||
Rico/Corner.js
|
||||
_sfl_header.js
|
||||
OpenLayers.js
|
||||
|
||||
[last]
|
||||
|
||||
[exclude]
|
||||
@@ -29,8 +29,23 @@ OpenLayers._getScriptLocation = function () {
|
||||
return scriptLocation;
|
||||
}
|
||||
|
||||
try{new OpenLayers.Map();}
|
||||
catch(e){
|
||||
/*
|
||||
`_OPENLAYERS_SFL_` is a flag indicating this file is being included
|
||||
in a Single File Library build of the OpenLayers Library.
|
||||
|
||||
When we are *not* part of a SFL build we dynamically include the
|
||||
OpenLayers library code.
|
||||
|
||||
When we *are* part of a SFL build we do not dynamically include the
|
||||
OpenLayers library code as it will be appended at the end of this file.
|
||||
*/
|
||||
if (typeof(_OPENLAYERS_SFL_) == "undefined") {
|
||||
/*
|
||||
The original code appeared to use a try/catch block
|
||||
to avoid polluting the global namespace,
|
||||
we now use a anonymous function to achieve the same result.
|
||||
*/
|
||||
(function() {
|
||||
var jsfiles=new Array(
|
||||
"Prototype.js",
|
||||
"Rico/Corner.js",
|
||||
@@ -78,4 +93,5 @@ catch(e){
|
||||
allScriptTags += currentScriptTag;
|
||||
}
|
||||
document.write(allScriptTags);
|
||||
};
|
||||
})();
|
||||
}
|
||||
|
||||
2
lib/_sfl_header.js
Normal file
2
lib/_sfl_header.js
Normal file
@@ -0,0 +1,2 @@
|
||||
_OPENLAYERS_SFL_=true
|
||||
|
||||
1
tools/README.txt
Normal file
1
tools/README.txt
Normal file
@@ -0,0 +1 @@
|
||||
This directory contains tools used in the packaging or deployment of OpenLayers.
|
||||
226
tools/mergejs.py
Executable file
226
tools/mergejs.py
Executable file
@@ -0,0 +1,226 @@
|
||||
#!/usr/bin/python2.3
|
||||
#
|
||||
# Merge multiple JavaScript source code files into one.
|
||||
#
|
||||
# Usage:
|
||||
# This script requires source files to have dependencies specified in them.
|
||||
#
|
||||
# Dependencies are specified with a comment of the form:
|
||||
#
|
||||
# // @require: <file path>
|
||||
#
|
||||
# e.g.
|
||||
#
|
||||
# // @require: Geo/DataSource.js
|
||||
#
|
||||
# This script should be executed like so:
|
||||
#
|
||||
# mergejs.py <output.js> <directory> [...]
|
||||
#
|
||||
# e.g.
|
||||
#
|
||||
# mergejs.py openlayers.js Geo/ CrossBrowser/
|
||||
#
|
||||
# This example will cause the script to walk the `Geo` and
|
||||
# `CrossBrowser` directories--and subdirectories thereof--and import
|
||||
# all `*.js` files encountered. The dependency declarations will be extracted
|
||||
# and then the source code from imported files will be output to
|
||||
# a file named `openlayers.js` in an order which fulfils the dependencies
|
||||
# specified.
|
||||
#
|
||||
#
|
||||
# Note: This is a very rough initial version of this code.
|
||||
#
|
||||
# -- Copyright 2005-2006 MetaCarta, Inc. / OpenLayers project --
|
||||
#
|
||||
|
||||
# TODO: Allow files to be excluded. e.g. `Crossbrowser/DebugMode.js`?
|
||||
# TODO: Report error when dependency can not be found rather than KeyError.
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
|
||||
SUFFIX_JAVASCRIPT = ".js"
|
||||
|
||||
RE_REQUIRE = "@require: (.*)\n" # TODO: Ensure in comment?
|
||||
class SourceFile:
|
||||
"""
|
||||
Represents a Javascript source code file.
|
||||
"""
|
||||
|
||||
def __init__(self, filepath, source):
|
||||
"""
|
||||
"""
|
||||
self.filepath = filepath
|
||||
self.source = source
|
||||
|
||||
self.requiredBy = []
|
||||
|
||||
|
||||
def _getRequirements(self):
|
||||
"""
|
||||
Extracts the dependencies specified in the source code and returns
|
||||
a list of them.
|
||||
"""
|
||||
# TODO: Cache?
|
||||
return re.findall(RE_REQUIRE, self.source)
|
||||
|
||||
requires = property(fget=_getRequirements, doc="")
|
||||
|
||||
|
||||
|
||||
def usage(filename):
|
||||
"""
|
||||
Displays a usage message.
|
||||
"""
|
||||
print "%s [-c <config file>] <output.js> <directory> [...]" % filename
|
||||
|
||||
|
||||
class Config:
|
||||
"""
|
||||
Represents a parsed configuration file.
|
||||
|
||||
A configuration file should be of the following form:
|
||||
|
||||
[first]
|
||||
3rd/prototype.js
|
||||
core/application.js
|
||||
core/params.js
|
||||
|
||||
[last]
|
||||
core/api.js
|
||||
|
||||
[exclude]
|
||||
3rd/logger.js
|
||||
|
||||
All headings are required.
|
||||
|
||||
The files listed in the `first` section will be forced to load
|
||||
*before* all other files (in the order listed). The files in `last`
|
||||
section will be forced to load *after* all the other files (in the
|
||||
order listed).
|
||||
|
||||
The files list in the `exclude` section will not be imported.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, filename):
|
||||
"""
|
||||
Parses the content of the named file and stores the values.
|
||||
"""
|
||||
lines = [line[:-1] # Assumes end-of-line character is present
|
||||
for line in open(filename)
|
||||
if line != "\n"] # Skip blank lines
|
||||
|
||||
self.forceFirst = \
|
||||
lines[lines.index("[first]") + 1:lines.index("[last]")]
|
||||
|
||||
self.forceLast = \
|
||||
lines[lines.index("[last]") + 1:lines.index("[exclude]")]
|
||||
|
||||
self.exclude = lines[lines.index("[exclude]") + 1:]
|
||||
|
||||
if __name__ == "__main__":
|
||||
import getopt
|
||||
|
||||
options, args = getopt.getopt(sys.argv[1:], "-c:")
|
||||
|
||||
try:
|
||||
outputFilename = args[0]
|
||||
except IndexError:
|
||||
usage(sys.argv[0])
|
||||
raise SystemExit
|
||||
else:
|
||||
sourceDirectory = args[1]
|
||||
if not sourceDirectory:
|
||||
usage(sys.argv[0])
|
||||
raise SystemExit
|
||||
|
||||
cfg = None
|
||||
if options and options[0][0] == "-c":
|
||||
filename = options[0][1]
|
||||
print "Parsing configuration file: %s" % filename
|
||||
|
||||
cfg = Config(filename)
|
||||
|
||||
allFiles = []
|
||||
|
||||
## Find all the Javascript source files
|
||||
for root, dirs, files in os.walk(sourceDirectory):
|
||||
for filename in files:
|
||||
if filename.endswith(SUFFIX_JAVASCRIPT):
|
||||
filepath = os.path.join(root, filename)[len(sourceDirectory)+1:]
|
||||
if (not cfg) or (filepath not in cfg.exclude):
|
||||
allFiles.append(filepath)
|
||||
|
||||
## Header inserted at the start of each file in the output
|
||||
HEADER = "/* " + "=" * 70 + "\n"\
|
||||
" %s\n" +\
|
||||
" " + "=" * 70 + " */\n\n"
|
||||
|
||||
files = {}
|
||||
|
||||
order = [] # List of filepaths to output, in a dependency satisfying order
|
||||
|
||||
## Import file source code
|
||||
## TODO: Do import when we walk the directories above?
|
||||
for filepath in allFiles:
|
||||
print "Importing: %s" % filepath
|
||||
fullpath = os.path.join(sourceDirectory, filepath)
|
||||
content = open(fullpath, "U").read() # TODO: Ensure end of line @ EOF?
|
||||
files[filepath] = SourceFile(filepath, content) # TODO: Chop path?
|
||||
|
||||
## Resolve the dependencies
|
||||
print "\nResolving dependencies...\n"
|
||||
|
||||
from toposort import toposort
|
||||
|
||||
nodes = []
|
||||
routes = []
|
||||
|
||||
for filepath, info in files.items():
|
||||
nodes.append(filepath)
|
||||
for neededFilePath in info.requires:
|
||||
routes.append((neededFilePath, filepath))
|
||||
|
||||
for dependencyLevel in toposort(nodes, routes):
|
||||
for filepath in dependencyLevel:
|
||||
order.append(filepath)
|
||||
|
||||
|
||||
## Move forced first and last files to the required position
|
||||
if cfg:
|
||||
print "Re-ordering files...\n"
|
||||
order = cfg.forceFirst + \
|
||||
[item
|
||||
for item in order
|
||||
if ((item not in cfg.forceFirst) and
|
||||
(item not in cfg.forceLast))] + \
|
||||
cfg.forceLast
|
||||
|
||||
## Double check all dependencies have been met
|
||||
for fp in order:
|
||||
if max([order.index(rfp) for rfp in files[fp].requires] +
|
||||
[order.index(fp)]) != order.index(fp):
|
||||
print "Inconsistent!"
|
||||
raise SystemExit
|
||||
|
||||
|
||||
## Output the files in the determined order
|
||||
result = []
|
||||
|
||||
for fp in order:
|
||||
f = files[fp]
|
||||
print "Exporting: ", f.filepath
|
||||
result.append(HEADER % f.filepath)
|
||||
source = f.source
|
||||
result.append(source)
|
||||
if not source.endswith("\n"):
|
||||
result.append("\n")
|
||||
|
||||
print "\nTotal files merged: %d " % len(allFiles)
|
||||
|
||||
print "\nGenerating: %s" % (outputFilename)
|
||||
|
||||
open(outputFilename, "w").write("".join(result))
|
||||
260
tools/toposort.py
Normal file
260
tools/toposort.py
Normal file
@@ -0,0 +1,260 @@
|
||||
#
|
||||
# According to <http://www.vrplumber.com/programming/> this file
|
||||
# is licensed under a BSD-style license. We only use the section
|
||||
# originally by Tim Peters.
|
||||
#
|
||||
# TODO: The use of this code needs to be okayed by someone.
|
||||
#
|
||||
|
||||
class RecursionError( OverflowError, ValueError ):
|
||||
'''Unable to calculate result because of recursive structure'''
|
||||
|
||||
|
||||
def sort(nodes, routes, noRecursion=1):
|
||||
'''Passed a list of node IDs and a list of source,dest ID routes
|
||||
attempt to create a list of stages where each sub list
|
||||
is one stage in a process.
|
||||
'''
|
||||
children, parents = _buildChildrenLists(routes)
|
||||
# first stage is those nodes
|
||||
# having no incoming routes...
|
||||
stage = []
|
||||
stages = [stage]
|
||||
taken = []
|
||||
for node in nodes:
|
||||
if (not parents.get(node)):
|
||||
stage.append (node)
|
||||
if nodes and not stage:
|
||||
# there is no element which does not depend on
|
||||
# some other element!!!
|
||||
stage.append( nodes[0])
|
||||
taken.extend( stage )
|
||||
nodes = filter ( lambda x, l=stage: x not in l, nodes )
|
||||
while nodes:
|
||||
previousStageChildren = []
|
||||
nodelen = len(nodes)
|
||||
# second stage are those nodes
|
||||
# which are direct children of the first stage
|
||||
for node in stage:
|
||||
for child in children.get (node, []):
|
||||
if child not in previousStageChildren and child not in taken:
|
||||
previousStageChildren.append(child)
|
||||
elif child in taken and noRecursion:
|
||||
raise RecursionError( (child, node) )
|
||||
# unless they are children of other direct children...
|
||||
# TODO, actually do that...
|
||||
stage = previousStageChildren
|
||||
removes = []
|
||||
for current in stage:
|
||||
currentParents = parents.get( current, [] )
|
||||
for parent in currentParents:
|
||||
if parent in stage and parent != current:
|
||||
# might wind up removing current...
|
||||
if not current in parents.get(parent, []):
|
||||
# is not mutually dependent...
|
||||
removes.append( current )
|
||||
for remove in removes:
|
||||
while remove in stage:
|
||||
stage.remove( remove )
|
||||
stages.append( stage)
|
||||
taken.extend( stage )
|
||||
nodes = filter ( lambda x, l=stage: x not in l, nodes )
|
||||
if nodelen == len(nodes):
|
||||
if noRecursion:
|
||||
raise RecursionError( nodes )
|
||||
else:
|
||||
stages.append( nodes[:] )
|
||||
nodes = []
|
||||
return stages
|
||||
|
||||
def _buildChildrenLists (routes):
|
||||
childrenTable = {}
|
||||
parentTable = {}
|
||||
for sourceID,destinationID in routes:
|
||||
currentChildren = childrenTable.get( sourceID, [])
|
||||
currentParents = parentTable.get( destinationID, [])
|
||||
if not destinationID in currentChildren:
|
||||
currentChildren.append ( destinationID)
|
||||
if not sourceID in currentParents:
|
||||
currentParents.append ( sourceID)
|
||||
childrenTable[sourceID] = currentChildren
|
||||
parentTable[destinationID] = currentParents
|
||||
return childrenTable, parentTable
|
||||
|
||||
|
||||
def toposort (nodes, routes, noRecursion=1):
|
||||
'''Topological sort from Tim Peters, fairly efficient
|
||||
in comparison (it seems).'''
|
||||
#first calculate the recursion depth
|
||||
|
||||
dependencies = {}
|
||||
inversedependencies = {}
|
||||
if not nodes:
|
||||
return []
|
||||
if not routes:
|
||||
return [nodes]
|
||||
for node in nodes:
|
||||
dependencies[ node ] = (0, node)
|
||||
inversedependencies[ node ] = []
|
||||
|
||||
|
||||
for depended, depends in routes:
|
||||
# is it a null rule
|
||||
try:
|
||||
newdependencylevel, object = dependencies.get ( depends, (0, depends))
|
||||
except TypeError:
|
||||
print depends
|
||||
raise
|
||||
dependencies[ depends ] = (newdependencylevel + 1, depends)
|
||||
# "dependency (existence) of depended-on"
|
||||
newdependencylevel,object = dependencies.get ( depended, (0, depended) )
|
||||
dependencies[ depended ] = (newdependencylevel, depended)
|
||||
# Inverse dependency set up
|
||||
dependencieslist = inversedependencies.get ( depended, [])
|
||||
dependencieslist.append (depends)
|
||||
inversedependencies[depended] = dependencieslist
|
||||
### Now we do the actual sorting
|
||||
# The first task is to create the sortable
|
||||
# list of dependency-levels
|
||||
sortinglist = dependencies.values()
|
||||
sortinglist.sort ()
|
||||
output = []
|
||||
while sortinglist:
|
||||
deletelist = []
|
||||
generation = []
|
||||
output.append( generation)
|
||||
while sortinglist and sortinglist[0][0] == 0:
|
||||
number, object = sortinglist[0]
|
||||
generation.append ( object )
|
||||
deletelist.append( object )
|
||||
for inverse in inversedependencies.get(object, () ):
|
||||
try:
|
||||
oldcount, inverse = dependencies [ inverse]
|
||||
if oldcount > 0:
|
||||
# will be dealt with on later pass
|
||||
dependencies [ inverse] = (oldcount-1, inverse)
|
||||
else:
|
||||
# will be dealt with on this pass,
|
||||
# so needs not to be in the sorting list next time
|
||||
deletelist.append( inverse )
|
||||
# just in case a loop comes through
|
||||
inversedependencies[object] = []
|
||||
except KeyError:
|
||||
# dealing with a recursion-breaking run...
|
||||
pass
|
||||
del sortinglist [0]
|
||||
# if no elements could be deleted, then
|
||||
# there is something which depends upon itself
|
||||
if not deletelist:
|
||||
if noRecursion:
|
||||
raise RecursionError( sortinglist )
|
||||
else:
|
||||
# hack so that something gets deleted...
|
||||
## import pdb
|
||||
## pdb.set_trace()
|
||||
dependencies[sortinglist[0][1]] = (0,sortinglist[0][1])
|
||||
# delete the items that were dealt with
|
||||
for item in deletelist:
|
||||
try:
|
||||
del dependencies [ item ]
|
||||
except KeyError:
|
||||
pass
|
||||
# need to recreate the sortinglist
|
||||
sortinglist = dependencies.values()
|
||||
if not generation:
|
||||
output.remove( generation )
|
||||
sortinglist.sort ()
|
||||
return output
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
nodes = ['a', 'b', 'c', 'd', 'e', 'f']
|
||||
route = [('a', 'b'), ('b', 'c'), ('b', 'd'), ('e','f')]
|
||||
|
||||
for x in toposort( nodes, route):
|
||||
for a in x:
|
||||
print a
|
||||
|
||||
raise SystemExit
|
||||
|
||||
|
||||
|
||||
import pprint, traceback
|
||||
nodes= [ 0,1,2,3,4,5 ]
|
||||
testingValues = [
|
||||
[ (0,1),(1,2),(2,3),(3,4),(4,5)],
|
||||
[ (0,1),(0,2),(1,2),(3,4),(4,5)],
|
||||
[
|
||||
(0,1),
|
||||
(0,2),
|
||||
(0,2),
|
||||
(2,4),
|
||||
(2,5),
|
||||
(3,2),
|
||||
(0,3)],
|
||||
[
|
||||
(0,1), # 3-element cycle test, no orphan nodes
|
||||
(1,2),
|
||||
(2,0),
|
||||
(2,4),
|
||||
(2,5),
|
||||
(3,2),
|
||||
(0,3)],
|
||||
[
|
||||
(0,1),
|
||||
(1,1),
|
||||
(1,1),
|
||||
(1,4),
|
||||
(1,5),
|
||||
(1,2),
|
||||
(3,1),
|
||||
(2,1),
|
||||
(2,0)],
|
||||
[
|
||||
(0,1),
|
||||
(1,0),
|
||||
(0,2),
|
||||
(0,3),
|
||||
],
|
||||
[
|
||||
(0,1),
|
||||
(1,0),
|
||||
(0,2),
|
||||
(3,1),
|
||||
],
|
||||
]
|
||||
print 'sort, no recursion allowed'
|
||||
for index in range(len(testingValues)):
|
||||
## print ' %s -- %s'%( index, testingValues[index])
|
||||
try:
|
||||
print ' ', sort( nodes, testingValues[index] )
|
||||
except:
|
||||
print 'exception raised'
|
||||
print 'toposort, no recursion allowed'
|
||||
for index in range(len(testingValues)):
|
||||
## print ' %s -- %s'%( index, testingValues[index])
|
||||
try:
|
||||
print ' ', toposort( nodes, testingValues[index] )
|
||||
except:
|
||||
print 'exception raised'
|
||||
print 'sort, recursion allowed'
|
||||
for index in range(len(testingValues)):
|
||||
## print ' %s -- %s'%( index, testingValues[index])
|
||||
try:
|
||||
print ' ', sort( nodes, testingValues[index],0 )
|
||||
except:
|
||||
print 'exception raised'
|
||||
print 'toposort, recursion allowed'
|
||||
for index in range(len(testingValues)):
|
||||
## print ' %s -- %s'%( index, testingValues[index])
|
||||
try:
|
||||
print ' ', toposort( nodes, testingValues[index],0 )
|
||||
except:
|
||||
print 'exception raised'
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user