Merge pull request #76 from twpayne/pure-python-build

Building on Windows still has a few problems, mainly thanks to Windows. However, this patch does eliminate as many build dependencies as possible (make, wget, sh, find, ruby).
This commit is contained in:
Tom Payne
2012-11-09 02:53:33 -08:00
8 changed files with 602 additions and 276 deletions

View File

@@ -3,8 +3,8 @@ before_install:
- "git clone https://github.com/jsdoc3/jsdoc"
before_script:
- "make plovr"
- "make serve-precommit &"
- "./build.py plovr"
- "./build.py serve-precommit &"
- "sleep 3"
script: "make JSDOC=jsdoc/jsdoc precommit"
script: "./build.py JSDOC=jsdoc/jsdoc precommit"

View File

@@ -20,7 +20,7 @@ Linter](https://developers.google.com/closure/utilities/docs/linter_howto) to
check source files for potential syntax and coding style issues. To execute the
linter run the following after making your changes:
$ make lint
$ ./build.py lint
This command assumes that the `gjslint` command in on your PATH.
@@ -30,7 +30,7 @@ The OpenLayers 3 is compiled and type-checked using the [Closure
Compiler](https://developers.google.com/closure/compiler/). To
compile the code use:
$ make build-all
$ ./build.py build-all
### Documentation
@@ -38,7 +38,7 @@ We use [jsdoc3](https://github.com/jsdoc3/jsdoc) to generate the API
documentation. To ensure that your changes are properly documented (and don't
break API doc generation), run the following:
$ make doc
$ ./build.py doc
This command assumes that the `jsdoc` command is on your PATH.
@@ -48,7 +48,7 @@ Any modifications must not break existing tests. We use
[PhantomJS](http://phantomjs.org/) to run tests *headlessly*.
Use the following to run the tests:
$ make test
$ ./build.py test
This command assumes that the `phantomjs` command is on your PATH.
@@ -56,10 +56,10 @@ This command assumes that the `phantomjs` command is on your PATH.
The Makefile includes a `precommit` target for running all of the
above (`lint`, `build-all`, `doc`, and `test`). As the name of the
target suggests `make precommit` is the command to run before
target suggests `./build.py precommit` is the command to run before
committing:
$ make precommit
$ ./build.py precommit
### Commit messages

View File

@@ -1,32 +0,0 @@
#!/usr/bin/env python
from optparse import OptionParser
import os
import os.path
import re
import sys
def main(argv):
option_parser = OptionParser()
option_parser.add_option('--require', action='append')
options, args = option_parser.parse_args(argv[1:])
requires = set(options.require or ())
for arg in args:
for dirpath, dirnames, filenames in os.walk(arg):
for filename in filenames:
if not filename.endswith('.js'):
continue
for line in open(os.path.join(dirpath, filename)):
m = re.match(r'goog\.provide\(\'(.*)\'\);', line)
if m:
requires.add(m.group(1))
for require in sorted(requires):
sys.stdout.write('goog.require(\'%s\');\n' % (require,))
if __name__ == '__main__':
sys.exit(main(sys.argv))

View File

@@ -1,232 +0,0 @@
#!/usr/bin/env ruby
# git update-ghpages user/repo -b gh-pages -p manual/ -i
#
# elemoine
# Modified by me, for
# - https://github.com/rstacruz/git-update-ghpages/issues/1
# - not failing when there's nothing to commit
require 'fileutils'
require 'tmpdir'
module Params
def extract(what) i = index(what) and slice!(i, 2)[1] end;
def first_is(what) shift if what.include?(self.first); end
def self.[](*what) what.extend Params; end
def ===(argv) argv.first_is(self); end
end
# ============================================================================
ARGV.extend Params
class CLI
# CLI options
attr_reader :prefix #=> "doc/"
attr_reader :input #=> "/home/me/projects/foo"
attr_reader :message #=> "Updated"
attr_reader :repo #=> "git@github.com:me/project.git"
attr_reader :url #=> "http://me.github.com/project"
attr_reader :branch #=> "gh-pages"
def verbose?() @verbose; end
def force?() @force; end
def simulate?() @simulate; end
def initialize
# Switches
@verbose = !! (ARGV.extract('--verbose') || ARGV.delete('-v'))
@simulate = !! (ARGV.extract('--simulate') || ARGV.delete('-s'))
@force = !! (ARGV.delete('--force') || ARGV.delete('-f'))
# Stuff
@prefix = ARGV.extract('--prefix') || ARGV.extract('-p') || ''
@input = File.expand_path(ARGV.extract('--input') || ARGV.extract('-i') || '.')
@message = ARGV.extract('--message') || ARGV.extract('-m') || 'Update'
# Github info
branch = ARGV.extract('--branch') || ARGV.extract('-b') || nil
@repo, @url, @branch = get_github_info(ARGV.shift, branch)
end
def git_current_branch
`git rev-parse --abbrev-ref HEAD`.strip
end
def git_deploy
in_temp_path do |temppath|
status "Cloning repository"
system! "git clone #{repo} -b #{branch} #{temppath}"
if git_current_branch != branch
status "Warning: No #{branch} branch found in repo, creating one."
return git_deploy_force
end
copy_files input, File.join(temppath, prefix)
status "Committing files"
# elemoine - git commit exit code is 1 if there's nothing to commit
#system! "git add .; git add -u; git commit -m #{message.to_s.inspect}"
system! "git add . && git add -u && git commit -m #{message.to_s.inspect}", true
unless simulate?
status "Updating repo"
system! "git push origin #{branch}"
end
true
end
end
def git_deploy_force
in_temp_path do |temppath|
status "Creating new repository"
system! "git init ."
system! "git checkout -b gh-pages"
copy_files input, File.join(temppath, prefix)
status "Committing files"
# elemoine - git commit exit code is 1 if there's nothing to commit
#system! "git add . && git commit -m #{message.to_s.inspect}"
system! "git add . && git commit -m #{message.to_s.inspect}", true
unless simulate?
status "Updating repo"
system! "git push #{repo} gh-pages:#{branch} --force"
end
true
end
end
def get_github_info(repo, branch=nil, prefix=nil)
if github_format?(repo)
user, repo_name = repo.split('/')
r = "git@github.com:#{repo}.git"
# User page or project page?
if repo_name =~ /\.github\.com/
[r, "http://#{repo_name}/#{prefix}", branch || 'master' ]
else
[r, "http://#{user}.github.com/#{repo_name}/#{prefix}", branch || 'gh-pages' ]
end
else
[repo, nil, branch]
end
end
def run!
unless repo
print_help
exit 128
end
status "Deploying to #{repo} (branch #{branch})"
msg "NOTE: Running in simulation mode." if simulate?
msg "WARNING: If the repository has gh-pages history, it with be overriden." if force? && !simulate?
result = force? ? git_deploy_force : git_deploy
if result
puts ""
status "Done."
msg "See: #{url}" if url && !simulate?
else
tip "Failed."
exit 1
end
end
def status(str)
puts "#{c('===>',34)} #{c(str, 32)}"
end
def msg(str)
puts " #{c(str, 32)}"
end
def c(str, color)
"\033[#{color}m#{str}\033[0m"
end
def print_help
tip \
%{Usage: git update-ghpages username/repository [options]
Flags:
-f, --force Force an update (WARNING: kills the history!)
-s, --simulate Creates the repository, but doesn't push.
-v, --verbose Verbose mode
Options:
-p PATH, --prefix The prefix
-i PATH, --input Input (defaults to current directory)
-b BRANCH, --branch The branch to deploy to (defaults to gh-pages)
-m MSG, --message Commit message (defaults to 'Update')
Examples:
Update the repo 'coffee' of github user 'james' with the files from the
current directory. The files will be in http://james.github.com/coffee.
$ git update-ghpages james/coffee
Same as above, but take the files from 'doc/'.
$ git update-ghpages james/coffee -i doc
Same as the first, but the files will instead be in
http://james.github.com/coffee/manual.
$ git update-ghpages james/coffee -i doc -p manual
}.gsub(/^ {4}/, '')
end
private # Helpers
def tip(msg)
$stderr.write "#{msg}\n"
end
def github_format?(str)
str =~ /^([A-Za-z0-9\-_]+)\/([A-Za-z0-9\-_\.]+)$/
end
# Performs actions inside a temp path.
def in_temp_path(&blk)
require 'tmpdir'
Dir.mktmpdir do |dir|
Dir.chdir(dir) { yield dir }
end
end
# elemoine
# ignoreerr added
def system!(str, ignoreerr=false)
puts `#{str} 2>&1`.strip.gsub(/^/, " ")
raise "Failed with exit code #{$?.to_i}" unless $?.to_i == 0 or ignoreerr == true
end
# Returns the current branch name
def git_branch
`git symbolic-ref HEAD`.strip.split('/').last
end
# Copy files from source folder to another
def copy_files(from, to)
status "Copying files #{from} => #{to}..." if verbose?
Dir["#{from}/**/*"].each do |f|
next unless File.file?(f)
target = File.join(to, f.gsub(/^#{Regexp.escape from}/, ''))
FileUtils.mkdir_p File.dirname(target)
msg "%20s => %-20s" % [f, target] if verbose?
FileUtils.cp f, target
end
end
end
CLI.new.run!

251
build.py Executable file
View File

@@ -0,0 +1,251 @@
#!/usr/bin/env python
from cStringIO import StringIO
import glob
import gzip
import json
import os
import re
import shutil
import sys
import pake
if sys.platform == 'win32':
pake.variables.GIT = 'C:/Program Files/Git/bin/git.exe'
pake.variables.GJSLINT = 'gjslint' # FIXME
pake.variables.JAVA = 'C:/Program Files/Java/jre7/bin/java.exe'
pake.variables.JSDOC = 'jsdoc' # FIXME
pake.variables.PHANTOMJS = 'phantomjs' # FIXME
pake.variables.PYTHON = 'C:/Python27/python.exe'
else:
pake.variables.GIT = 'git'
pake.variables.GJSLINT = 'gjslint'
pake.variables.JAVA = 'java'
pake.variables.JSDOC = 'jsdoc'
pake.variables.PHANTOMJS = 'phantomjs'
pake.variables.PYTHON = 'python'
pake.variables.BRANCH = pake.output('%(GIT)s', 'rev-parse', '--abbrev-ref', 'HEAD').strip()
EXPORTS = [path
for path in pake.ifind('src')
if path.endswith('.exports')
if path != 'src/objectliterals.exports']
EXTERNAL_SRC = [
'build/src/external/externs/types.js',
'build/src/external/src/exports.js',
'build/src/external/src/types.js']
EXAMPLES = [path
for path in glob.glob('examples/*.html')
if path != 'examples/example-list.html']
EXAMPLES_SRC = [path
for path in pake.ifind('examples')
if path.endswith('.js')
if not path.endswith('.combined.js')
if path != 'examples/Jugl.js'
if path != 'examples/example-list.js']
INTERNAL_SRC = [
'build/src/internal/src/requireall.js',
'build/src/internal/src/types.js']
SPEC = [path
for path in pake.ifind('test/spec')
if path.endswith('.js')]
SRC = [path
for path in pake.ifind('src/ol')
if path.endswith('.js')]
PLOVR_JAR = 'bin/plovr-eba786b34df9.jar'
PLOVR_JAR_MD5 = '20eac8ccc4578676511cf7ccbfc65100'
def report_sizes(t):
t.info('uncompressed: %d bytes', os.stat(t.name).st_size)
stringio = StringIO()
gzipfile = gzip.GzipFile(t.name, 'w', 9, stringio)
with open(t.name) as f:
shutil.copyfileobj(f, gzipfile)
gzipfile.close()
t.info(' compressed: %d bytes', len(stringio.getvalue()))
pake.virtual('all', 'build-all', 'build', 'examples')
pake.virtual('precommit', 'lint', 'build-all', 'test', 'doc', 'build', 'build-examples')
pake.virtual('build', 'build/ol.css', 'build/ol.js')
@pake.target('build/ol.css', 'build/ol.js')
def build_ol_css(t):
t.touch()
@pake.target('build/ol.js', PLOVR_JAR, SRC, EXTERNAL_SRC, 'base.json', 'build/ol.json')
def build_ol_js(t):
t.output('%(JAVA)s', '-jar', PLOVR_JAR, 'build', 'build/ol.json')
report_sizes(t)
pake.virtual('build-all', 'build/ol-all.js')
@pake.target('build/ol-all.js', PLOVR_JAR, SRC, INTERNAL_SRC, 'base.json', 'build/ol-all.json')
def build_ol_all_js(t):
t.output('%(JAVA)s', '-jar', PLOVR_JAR, 'build', 'build/ol-all.json')
@pake.target('build/src/external/externs/types.js', 'bin/generate-exports.py', 'src/objectliterals.exports')
def build_src_external_externs_types_js(t):
t.output('%(PYTHON)s', 'bin/generate-exports.py', '--externs', 'src/objectliterals.exports')
@pake.target('build/src/external/src/exports.js', 'bin/generate-exports.py', 'src/objectliterals.exports', EXPORTS)
def build_src_external_src_exports_js(t):
t.output('%(PYTHON)s', 'bin/generate-exports.py', '--exports', 'src/objectliterals.exports', EXPORTS)
@pake.target('build/src/external/src/types.js', 'bin/generate-exports', 'src/objectliterals.exports')
def build_src_external_src_types_js(t):
t.output('%(PYTHON)s', 'bin/generate-exports.py', '--typedef', 'src/objectliterals.exports')
@pake.target('build/src/internal/src/requireall.js', SRC)
def build_src_internal_src_requireall_js(t):
requires = set(('goog.dom',))
for dependency in t.dependencies:
for line in open(dependency):
match = re.match(r'goog\.provide\(\'(.*)\'\);', line)
if match:
requires.add(match.group(1))
with open(t.name, 'w') as f:
for require in sorted(requires):
f.write('goog.require(\'%s\');\n' % (require,))
@pake.target('build/src/internal/src/types.js', 'bin/generate-exports.py', 'src/objectliterals.exports')
def build_src_internal_types_js(t):
t.output('%(PYTHON)s', 'bin/generate-exports.py', '--typedef', 'src/objectliterals.exports')
pake.virtual('build-examples', 'examples', (path.replace('.html', '.combined.js') for path in EXAMPLES))
pake.virtual('examples', 'examples/example-list.js', (path.replace('.html', '.json') for path in EXAMPLES))
@pake.target('examples/example-list.js', 'bin/exampleparser.py', EXAMPLES)
def examples_examples_list_js(t):
t.run('%(PYTHON)s', 'bin/exampleparser.py', 'examples', 'examples')
@pake.rule(r'\Aexamples/(?P<id>.*).json\Z')
def examples_star_json(name, match):
def action(t):
content = json.dumps({
'id': match.group('id'),
'inherits': '../base.json',
'inputs': [
'examples/%(id)s.js' % match.groupdict(),
'build/src/internal/src/types.js',
],
})
with open(t.name, 'w') as f:
f.write(content)
dependencies = [__file__, 'base.json']
return pake.Target(name, action=action, dependencies=dependencies)
@pake.rule(r'\Aexamples/(?P<id>.*).combined.js\Z')
def examples_star_combined_js(name, match):
def action(t):
t.output('%(JAVA)s', '-jar', PLOVR_JAR, 'build', 'examples/%(id)s.json' % match.groupdict())
report_sizes(t)
dependencies = [PLOVR_JAR, SRC, INTERNAL_SRC, 'base.json', 'examples/%(id)s.js' % match.groupdict(), 'examples/%(id)s.json' % match.groupdict()]
return pake.Target(name, action=action, dependencies=dependencies)
@pake.target('serve', PLOVR_JAR, INTERNAL_SRC, 'examples')
def serve(t):
t.run('%(JAVA)s', '-jar', PLOVR_JAR, 'serve', glob.glob('build/*.json'), glob.glob('examples/*.json'))
@pake.target('serve-precommit', PLOVR_JAR, INTERNAL_SRC)
def serve_precommit(t):
t.run('%(JAVA)s', '-jar', PLOVR_JAR, 'serve', 'build/ol-all.json')
pake.virtual('lint', 'build/lint-src-timestamp', 'build/lint-spec-timestamp')
@pake.target('build/lint-src-timestamp', SRC, INTERNAL_SRC, EXTERNAL_SRC, EXAMPLES_SRC)
def build_lint_src_timestamp(t):
limited_doc_files = [path
for path in pake.ifind('externs', 'build/src/external/externs')
if path.endswith('.js')]
t.run('%(GJSLINT)s', '--strict', '--limited_doc_files=%s' % (','.join(limited_doc_files),), SRC, INTERNAL_SRC, EXTERNAL_SRC, EXAMPLES_SRC)
t.touch()
@pake.target('build/lint-spec-timestamp', SPEC)
def build_lint_spec_timestamp(t):
t.run('%(GJSLINT)s', SPEC)
t.touch()
pake.virtual('plovr', PLOVR_JAR)
@pake.target(PLOVR_JAR, clean=False)
def plovr_jar(t):
t.download('https://plovr.googlecode.com/files/' + os.path.basename(PLOVR_JAR), md5=PLOVR_JAR_MD5)
@pake.target('gh-pages', 'hostexamples', 'doc', phony=True)
def gh_pages(t):
with t.tempdir() as tempdir:
t.run('%(GIT)s', 'clone', '--branch', 'gh-pages', 'git@github.com:openlayers/ol3.git', tempdir)
with t.chdir(tempdir):
t.rm_rf('%(BRANCH)s')
t.cp_r('build/gh-pages/%(BRANCH)s', tempdir + '/%(BRANCH)s')
with t.chdir(tempdir):
t.run('%(GIT)s', 'add', '--all', '%(BRANCH)s')
t.run('%(GIT)s', 'commit', '--message', 'Updated')
t.run('%(GIT)s', 'push', 'origin', 'gh-pages')
pake.virtual('doc', 'build/jsdoc-%(BRANCH)s-timestamp' % vars(pake.variables))
@pake.target('build/jsdoc-%(BRANCH)s-timestamp' % vars(pake.variables), SRC, pake.ifind('doc/template'))
def jsdoc_BRANCH_timestamp(t):
t.run('%(JSDOC)s', '-t', 'doc/template', '-r', 'src', '-d', 'build/gh-pages/%(BRANCH)s/apidoc')
t.touch()
@pake.target('hostexamples', 'build', 'examples', phony=True)
def hostexamples(t):
t.makedirs('build/gh-pages/%(BRANCH)s/examples')
t.makedirs('build/gh-pages/%(BRANCH)s/build')
t.cp(EXAMPLES, (path.replace('.html', '.js') for path in EXAMPLES), 'examples/style.css', 'build/gh-pages/%(BRANCH)s/examples/')
t.cp('build/loader_hosted_examples.js', 'build/gh-pages/%(BRANCH)s/examples/loader.js')
t.cp('build/ol.js', 'build/ol.css', 'build/gh-pages/%(BRANCH)s/build/')
t.cp('examples/example-list.html', 'build/gh-pages/%(BRANCH)s/examples/index.html')
t.cp('examples/example-list.js', 'examples/example-list.xml', 'examples/Jugl.js', 'build/gh-pages/%(BRANCH)s/examples/')
@pake.target('test', INTERNAL_SRC, phony=True)
def test(t):
t.run('%(PHANTOMJS)s', 'test/phantom-jasmine/run_jasmine_test.coffee', 'test/ol.html')
if __name__ == '__main__':
pake.main()

339
pake.py Normal file
View File

@@ -0,0 +1,339 @@
#!/usr/bin/env python
import collections
import contextlib
import hashlib
import logging
import optparse
import os
import re
import shutil
import subprocess
import tempfile
import sys
import time
import urllib2
logger = logging.getLogger(__name__)
class PakeError(RuntimeError):
pass
class AmbiguousRuleError(PakeError):
def __init__(self, name):
self.name = name
def __str__(self):
return '%r matches multiple rules' % (self.name,)
class BuildError(PakeError):
def __init__(self, target, message):
self.target = target
self.message = message
def __str__(self):
return '%s: %s' % (self.target.name, self.message)
class DuplicateTargetError(PakeError):
def __init__(self, target):
self.target = target
def __str__(self):
return 'duplicate target %r' % (self.target.name,)
class Target(object):
def __init__(self, name, action=None, clean=True, dependencies=(),
makedirs=True, phony=False, precious=False):
self.name = name
self.action = action
self._clean = clean
self.dependencies = list(flatten(dependencies))
self._makedirs = makedirs
self.phony = phony
self.precious = precious
self.logger = logging.getLogger(self.name)
self.timestamp = None
def build(self, dry_run=False):
timestamp = 0
for dependency in self.dependencies:
target = targets.get(dependency)
timestamp = max(timestamp, target.build(dry_run=dry_run))
self.debug('build')
if self.timestamp is None:
if not self.phony and os.path.exists(self.name):
self.timestamp = os.stat(self.name).st_mtime
else:
self.timestamp = -1
if self.timestamp < timestamp:
self.debug('action')
if self._makedirs and not dry_run:
self.makedirs(os.path.dirname(self.name))
if self.action:
if self.action.__doc__:
self.info(self.action.__doc__)
if not dry_run:
self.action(self)
self.timestamp = timestamp or time.time()
return self.timestamp
@contextlib.contextmanager
def chdir(self, dir):
cwd = os.getcwd()
dir = dir % vars(variables)
self.info('cd %s', dir)
os.chdir(dir)
try:
yield dir
finally:
self.info('cd %s', cwd)
os.chdir(cwd)
def cp(self, *args):
args = flatten_expand_list(args)
dest = args.pop()
for arg in args:
self.info('cp %s %s', arg, dest)
shutil.copy(arg, dest)
def cp_r(self, *args):
args = flatten_expand_list(args)
dest = args.pop()
for arg in args:
self.info('cp -r %s %s', arg, dest)
shutil.copytree(arg, dest)
def clean(self, really=False, recurse=True):
if (self._clean or really) and not self.precious:
self.info('clean')
try:
os.remove(self.name)
except OSError:
pass
if recurse:
for dependency in self.dependencies:
targets.get(dependency).clean(really=really, recurse=recurse)
def debug(self, *args, **kwargs):
self.logger.debug(*args, **kwargs)
def download(self, url, md5=None):
content = urllib2.urlopen(url).read()
if md5 and hashlib.md5(content).hexdigest() != md5:
raise pake.BuildError(t, 'corrupt download')
# FIXME Python on Windoze corrupts the content when writing it
# FIXME probably something to do with encodings
with open(self.name, 'w') as f:
f.write(content)
def error(self, message):
raise BuildError(self, message)
def graph(self, f, visited):
if self in visited:
return
visited.add(self)
for dependency in self.dependencies:
target = targets.get(dependency)
f.write('\t"%s" -> "%s";\n' % (self.name, target.name))
target.graph(f, visited)
def info(self, *args, **kwargs):
self.logger.info(*args, **kwargs)
def makedirs(self, path):
path = path % vars(variables)
if path and not os.path.exists(path):
self.info('mkdir -p %s', path)
os.makedirs(path)
def output(self, *args, **kwargs):
args = flatten_expand_list(args)
self.info(' '.join(args))
try:
output = subprocess.check_output(args, **kwargs)
with open(self.name, 'w') as f:
f.write(output)
except subprocess.CalledProcessError as e:
self.clean(recurse=False)
self.error(e)
def rm_rf(self, *args):
args = flatten_expand_list(args)
for arg in args:
self.info('rm -rf %s', arg)
shutil.rmtree(arg, ignore_errors=True)
def run(self, *args, **kwargs):
args = flatten_expand_list(args)
self.info(' '.join(args))
try:
subprocess.check_call(args, **kwargs)
except subprocess.CalledProcessError as e:
self.clean(recurse=False)
self.error(e)
@contextlib.contextmanager
def tempdir(self):
tempdir = tempfile.mkdtemp()
self.info('mkdir -p %s', tempdir)
try:
yield tempdir
finally:
self.info('rm -rf %s', tempdir)
shutil.rmtree(tempdir, ignore_errors=True)
def touch(self):
if os.path.exists(self.name):
os.utime(self.name, None)
else:
with open(self.name, 'w'):
pass
class TargetCollection(object):
def __init__(self):
self.default = None
self.targets = {}
def add(self, target):
if target.name in self.targets:
raise DuplicateTargetError(target)
self.targets[target.name] = target
if self.default is None:
self.default = target
def get(self, name):
if name in self.targets:
return self.targets[name]
target = None
for regexp, f in rules.iteritems():
match = regexp.search(name)
if not match:
continue
if target is not None:
raise AmbiguousRuleError(name)
target = f(name, match)
if target is None:
target = Target(name, precious=True)
self.targets[name] = target
return target
class VariableCollection(object):
def __init__(self, **kwargs):
for key, value in kwargs.iteritems():
setattr(self, key, value)
def __setattr__(self, key, value):
if not hasattr(self, key):
object.__setattr__(self, key, value)
targets = TargetCollection()
rules = {}
variables = VariableCollection(**os.environ)
def flatten(*args):
for arg in args:
if (isinstance(arg, collections.Iterable) and
not isinstance(arg, basestring)):
for element in flatten(*arg):
yield element
else:
yield arg
def flatten_expand_list(*args):
return list(arg % vars(variables) for arg in flatten(args))
def ifind(*paths):
for path in paths:
for dirpath, dirnames, names in os.walk(path):
for name in names:
yield os.path.join(dirpath, name)
def main(argv=sys.argv):
option_parser = optparse.OptionParser()
option_parser.add_option('-c', '--clean',
action='store_true')
option_parser.add_option('-g', '--graph',
action='store_true')
option_parser.add_option('-n', '--dry-run', '--just-print', '--recon',
action='store_true')
option_parser.add_option('-r', '--really',
action='store_true')
option_parser.add_option('-v', '--verbose',
action='count', dest='logging_level')
option_parser.set_defaults(logging_level=0)
options, args = option_parser.parse_args(argv[1:])
logging.basicConfig(format='%(asctime)s %(name)s: %(message)s',
level=logging.INFO - 10 * options.logging_level)
targets_ = []
for arg in args:
match = re.match(r'(?P<key>\w+)=(?P<value>.*)\Z', arg)
if match:
key, value = match.group('key', 'value')
if not hasattr(variables, key):
logger.error('%s is not a variable', key)
logger.debug('%s=%r', key, value)
object.__setattr__(variables, key, value)
continue
targets_.append(arg)
if not targets_:
targets_ = (targets.default.name,)
try:
for target in targets_:
target = targets.get(target)
if options.clean:
target.clean(really=options.really, recurse=True)
elif options.graph:
sys.stdout.write('digraph "%s" {\n' % (target.name,))
target.graph(sys.stdout, set())
sys.stdout.write('}\n')
else:
target.build(dry_run=options.dry_run)
except BuildError as e:
logger.error(e)
sys.exit(1)
def output(*args):
args = flatten_expand_list(args)
logger.debug(' '.join(args))
return subprocess.check_output(args)
def rule(pattern):
def f(targetmaker):
rules[re.compile(pattern)] = targetmaker
return f
def target(name, *dependencies, **kwargs):
def f(action):
target = Target(name, action=action, dependencies=dependencies,
**kwargs)
targets.add(target)
return f
def virtual(name, *dependencies, **kwargs):
target = Target(name, dependencies=dependencies, clean=False, phony=True,
**kwargs)
targets.add(target)

View File

@@ -6,13 +6,13 @@
Run make:
$ make
$ ./build.py
## Run the examples in debug mode
Run the [Plovr](http://plovr.com/) web server with:
$ make serve
$ ./build.py serve
Then, either open one of the example html files from the `examples` directory directly in your browser, or start a simple webserver, for example:
@@ -38,7 +38,7 @@ First, install the [Closure
Linter](https://developers.google.com/closure/utilities/docs/linter_howto).
Then:
$ make lint
$ ./build.py lint
## Add examples