diff --git a/cli.js b/cli.js index a446c95af67..1428af0d62c 100755 --- a/cli.js +++ b/cli.js @@ -1,332 +1,61 @@ #!/usr/bin/env node require("babel-register"); -var web3 = require("web3"); -var path = require("path"); -var fs = require("fs"); -var chokidar = require('chokidar'); -var deasync = require("deasync"); -var colors = require('colors/safe'); -var Truffle = require('./index.js'); - -var ConfigurationError = require("./lib/errors/configurationerror"); +var Command = require("./lib/command"); +var Tasks = require("./lib/tasks"); +var TaskError = require("./lib/errors/taskerror"); var ExtendableError = require("./lib/errors/extendableerror"); -var argv = require('yargs').argv; - -var truffle_dir = process.env.TRUFFLE_NPM_LOCATION || argv.n || argv.npm_directory || __dirname; -var working_dir = process.env.TRUFFLE_WORKING_DIRECTORY || argv.w || argv.working_directory || process.cwd(); - -if (working_dir[working_dir.length - 1] != "/") { - working_dir += "/"; -} - -var pkg = JSON.parse(fs.readFileSync(path.join(truffle_dir, "package.json"), {encoding: "utf8"})); - -var tasks = {}; -var registerTask = function(name, description, fn) { - tasks[name] = { - name: name, - description: description, - fn: fn - }; -} - -var printSuccess = function() { - console.log(colors.green("Completed without errors on " + new Date().toString())); -}; - -var printFailure = function() { - console.log(colors.red("Completed with errors on " + new Date().toString())); -}; - -var runTask = function(name) { - try { - var fn = deasync(tasks[name].fn); - return fn() || 0; - } catch (e) { - if (e instanceof ExtendableError) { - console.log(e.message); - - if (argv.stack != null) { - console.log(e.stack); - } - +var command = new Command(Tasks); +command.run(process.argv.slice(2), function(err) { + if (err) { + if (err instanceof TaskError) { + command.run("list", function() {}); } else { - // Bubble up all other unexpected errors. - console.log(e.stack || e.toString()); - } - return 1; - } -}; - -registerTask('watch', "Watch filesystem for changes and rebuild the project automatically", function(done) { - var needs_rebuild = true; - var needs_redeploy = false; - - chokidar.watch(["app/**/*", "environments/*/contracts/**/*", "contracts/**/*", "truffle.json", "truffle.js"], { - ignored: /[\/\\]\./, // Ignore files prefixed with "." - cwd: working_dir, - ignoreInitial: true - }).on('all', function(event, filePath) { - // On changed/added/deleted - var display_path = path.join("./", filePath.replace(working_dir, "")); - console.log(colors.cyan(">> File " + display_path + " changed.")); - - needs_rebuild = true; - - if (display_path.indexOf("contracts/") == 0) { - needs_redeploy = true; - } else { - needs_rebuild = true; - } - }); - - var check_rebuild = function() { - if (needs_redeploy == true) { - needs_redeploy = false; - needs_rebuild = false; - console.log("Redeploying..."); - if (runTask("deploy") != 0) { - printFailure(); - } - } - - if (needs_rebuild == true) { - needs_rebuild = false; - console.log("Rebuilding..."); - if (runTask("build") != 0) { - printFailure(); + if (err instanceof ExtendableError) { + console.log(err.message); + } else { + // Bubble up all other unexpected errors. + console.log(err.stack || err.toString()); } } - - setTimeout(check_rebuild, 200); - }; - - check_rebuild(); -}); - -registerTask('list', "List all available tasks", function(done) { - console.log("Truffle v" + pkg.version + " - a development framework for Ethereum"); - console.log(""); - console.log("Usage: truffle [command] [options]"); - console.log(""); - console.log("Commands:"); - console.log(""); - - var sorted = Object.keys(tasks).sort(); - - var longestTask = sorted.reduce(function(a, b) { - var first = typeof a == "string" ? a.length : a; - return Math.max(first, b.length); - }); - - for (var i = 0; i < sorted.length; i++) { - var task = tasks[sorted[i]]; - var heading = task.name; - while (heading.length < longestTask) { - heading += " "; - } - console.log(" " + heading + " => " + task.description) - } - - console.log(""); - done(); -}); - -registerTask('version', "Show version number and exit", function(done) { - console.log("Truffle v" + pkg.version); - done(); -}); - -registerTask('init', "Initialize new Ethereum project, including example contracts and tests", function(done) { - var config = Truffle.config.gather(truffle_dir, working_dir, argv); - Truffle.init.all(config, done); -}); - -registerTask('create:contract', "Create a basic contract", function(done) { - var config = Truffle.config.gather(truffle_dir, working_dir, argv); - - var name = argv.name; - - if (name == null && argv._.length > 1) { - name = argv._[1]; - } - - if (name == null) { - throw new ConfigurationError("Please specify a name. Example: truffle create:contract MyContract"); - } else { - Truffle.create.contract(config, name, done); - } -}); - -registerTask('create:test', "Create a basic test", function(done) { - var config = Truffle.config.gather(truffle_dir, working_dir, argv); - - var name = argv.name; - - if (name == null && argv._.length > 1) { - name = argv._[1]; - } - - if (name == null) { - throw new ConfigurationError("Please specify a name. Example: truffle create:test MyTest"); - } else { - Truffle.create.test(config, name, done); - } -}); - -registerTask('compile', "Compile contracts", function(done) { - var config = Truffle.config.gather(truffle_dir, working_dir, argv, "development"); - Truffle.contracts.compile(config, done); -}); - -registerTask('deploy', "Deploy contracts to the network, compiling if needed", function(done) { - var config = Truffle.config.gather(truffle_dir, working_dir, argv, "development"); - - console.log("Using environment " + config.environment + "."); - - var compile = true; - var link = true; - - if (argv.compile === false) { - compile = false; - } - - // Compile and deploy. - Truffle.contracts.deploy(config, compile, function(err) { - if (err != null) { - done(err); - } else { - // console.log("Rebuilding app with new contracts..."); - // runTask("build"); - done(); - } - }); -}); - -registerTask('build', "Build development version of app", function(done) { - var config = Truffle.config.gather(truffle_dir, working_dir, argv, "development"); - Truffle.build.build(config, function(err) { - done(err); - if (err == null) { - printSuccess(); - } - }); -}); - -registerTask('dist', "Create distributable version of app (minified)", function(done) { - var config = Truffle.config.gather(truffle_dir, working_dir, argv, "production"); - console.log("Using environment " + config.environment + "."); - Truffle.build.dist(config, function(err) { - done(err); - if (err == null) { - printSuccess(); - } - }); -}); - -registerTask('exec', "Execute a JS file within truffle environment. Script *must* call process.exit() when finished.", function(done) { - var config = Truffle.config.gather(truffle_dir, working_dir, argv, "development"); - - var file = argv.file; - - if (file == null && argv._.length > 1) { - file = argv._[1]; } - - if (file == null) { - console.log("Please specify a file, passing the path of the script you'd like the run. Note that all scripts *must* call process.exit() when finished."); - done(); - return; - } - - Truffle.exec.file(config, file, done); }); -// Supported options: -// --no-color: Disable color -// More to come. -registerTask('test', "Run tests", function(done) { - var config = Truffle.config.gather(truffle_dir, working_dir, argv, "test"); - - console.log("Using environment " + config.environment + "."); - - // Ensure we're quiet about deploys during tests. - config.argv.quietDeploy = true; - - var file = argv.file; - - if (file == null && argv._.length > 1) { - file = argv._[1]; - } - - if (file == null) { - Truffle.test.run(config, done); - } else { - Truffle.test.run(config, file, done); - } -}); - -registerTask('console', "Run a console with deployed contracts instantiated and available (REPL)", function(done) { - var config = Truffle.config.gather(truffle_dir, working_dir, argv, "development"); - Truffle.console.run(config, done); -}); - -registerTask('serve', "Serve app on localhost and rebuild changes as needed", function(done) { - var config = Truffle.config.gather(truffle_dir, working_dir, argv, "development"); - console.log("Using environment " + config.environment + "."); - Truffle.serve.start(config, argv.port || argv.p || "8080", function() { - runTask("watch"); - }); -}); - - - -// registerTask('watch:tests', "Watch filesystem for changes and rerun tests automatically", function(done) { +//var environment = argv.e || argv.environment || process.env.NODE_ENV || "default"; // -// gaze(["app/**/*", "config/**/*", "contracts/**/*", "test/**/*"], {cwd: working_dir, interval: 1000, debounceDelay: 500}, function() { -// // On changed/added/deleted -// this.on('all', function(event, filePath) { -// if (filePath.match(/\/config\/.*?\/.*?\.sol\.js$/)) { -// // ignore changes to /config/*/*.sol.js since these changes every time -// // tests are run (when contracts are compiled) -// return; -// } -// process.stdout.write("\u001b[2J\u001b[0;0H"); // clear screen -// var display_path = "./" + filePath.replace(working_dir, ""); -// console.log(colors.cyan(">> File " + display_path + " changed.")); -// run_tests(); -// }); -// }); +// if (working_dir[working_dir.length - 1] != "/") { +// working_dir += "/"; +// } // -// var run_tests = function() { -// console.log("Running tests..."); +// var printNetwork = function() { +// console.log("Using network " + environment + "."); +// }; // -// process.chdir(working_dir); -// var config = Truffle.config.gather(truffle_dir, working_dir, argv, "test"); -// config.argv.quietDeploy = true; // Ensure we're quiet about deploys during tests +// var printSuccess = function() { +// console.log(colors.green("Completed without errors on " + new Date().toString())); +// }; +// +// var printFailure = function() { +// console.log(colors.red("Completed with errors on " + new Date().toString())); +// }; // -// Test.run(config, function() { console.log("> test run complete; watching for changes..."); }); -// }; -// run_tests(); // run once immediately // -// }); - -// Default to listing available commands. -var current_task = argv._[0]; -if (current_task == null) { - current_task = "list"; -} -if (tasks[current_task] == null) { - console.log(colors.red("Unknown command: " + current_task)); - process.exit(1); -} -// Something is keeping the process open. I'm not sure what. -// Let's explicitly kill it until I can figure it out. -process.exit(runTask(current_task)); -//runTask(current_task); + // // Check to see if we're working on a dapp meant for 0.2.x or older + // if (fs.existsSync(path.join(working_dir, "config", "app.json"))) { + // console.log("Your dapp is meant for an older version of Truffle. Don't worry, there are two solutions!") + // console.log(""); + // console.log("1) Upgrade you're dapp using the followng instructions (it's easy):"); + // console.log(" https://github.com/ConsenSys/truffle/wiki/Migrating-from-v0.2.x-to-v0.3.0"); + // console.log(""); + // console.log(" ( OR )") + // console.log(""); + // console.log("2) Downgrade to Truffle 0.2.x"); + // console.log(""); + // console.log("Cheers! And file an issue if you run into trouble! https://github.com/ConsenSys/truffle/issues") + // process.exit(); + // } diff --git a/contracts/Example.sol b/contracts/Example.sol deleted file mode 100644 index e3c752092c2..00000000000 --- a/contracts/Example.sol +++ /dev/null @@ -1,5 +0,0 @@ -contract Example { - function Example() { - // constructor - } -} diff --git a/environments/test/config.json b/environments/test/config.json deleted file mode 100644 index 0967ef424bc..00000000000 --- a/environments/test/config.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/example/contracts/Migrations.sol b/example/contracts/Migrations.sol new file mode 100644 index 00000000000..132f325c10f --- /dev/null +++ b/example/contracts/Migrations.sol @@ -0,0 +1,21 @@ +contract Migrations { + address public owner; + uint public last_completed_migration; + + modifier restricted() { + if (msg.sender == owner) _ + } + + function Migrations() { + owner = msg.sender; + } + + function setCompleted(uint completed) restricted { + last_completed_migration = completed; + } + + function upgrade(address new_address) restricted { + Migrations upgraded = Migrations(new_address); + upgraded.setCompleted(last_completed_migration); + } +} diff --git a/example/environments/development/config.js b/example/environments/development/config.js deleted file mode 100644 index 4ba52ba2c8d..00000000000 --- a/example/environments/development/config.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = {} diff --git a/example/environments/production/config.js b/example/environments/production/config.js deleted file mode 100644 index 4ba52ba2c8d..00000000000 --- a/example/environments/production/config.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = {} diff --git a/example/environments/staging/config.js b/example/environments/staging/config.js deleted file mode 100644 index 4ba52ba2c8d..00000000000 --- a/example/environments/staging/config.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = {} diff --git a/example/environments/test/config.js b/example/environments/test/config.js deleted file mode 100644 index 4ba52ba2c8d..00000000000 --- a/example/environments/test/config.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = {} diff --git a/example/migrations/1_initial_migration.js b/example/migrations/1_initial_migration.js new file mode 100644 index 00000000000..5b309e062f2 --- /dev/null +++ b/example/migrations/1_initial_migration.js @@ -0,0 +1,3 @@ +module.exports = function(deployer) { + deployer.deploy(Migrations); +}; diff --git a/example/migrations/2_deploy_contracts.js b/example/migrations/2_deploy_contracts.js new file mode 100644 index 00000000000..ebb3a480d7e --- /dev/null +++ b/example/migrations/2_deploy_contracts.js @@ -0,0 +1,5 @@ +module.exports = function(deployer) { + deployer.deploy(ConvertLib); + deployer.autolink(); + deployer.deploy(MetaCoin); +}; diff --git a/example/truffle.js b/example/truffle.js index 395fb5b7c58..6e3970801fe 100644 --- a/example/truffle.js +++ b/example/truffle.js @@ -9,10 +9,6 @@ module.exports = { ], "images/": "images/" }, - deploy: [ - "MetaCoin", - "ConvertLib" - ], rpc: { host: "localhost", port: 8545 diff --git a/index.js b/index.js index 74c3762b442..ae625a8fa4d 100644 --- a/index.js +++ b/index.js @@ -1,11 +1,17 @@ +var pkg = require("./package.json"); + module.exports = { - init: require("./lib/init"), + build: require("./lib/build"), create: require("./lib/create"), + compiler: require("./lib/compiler"), config: require("./lib/config"), + console: require("./lib/repl"), contracts: require("./lib/contracts"), - build: require("./lib/build"), + require: require("./lib/require"), + init: require("./lib/init"), + migrate: require("./lib/migrate"), + profile: require("./lib/profiler"), + serve: require("./lib/serve"), test: require("./lib/test"), - exec: require("./lib/exec"), - console: require("./lib/repl"), - serve: require("./lib/serve") + version: pkg.version }; diff --git a/lib/build.js b/lib/build.js index 2f189b96909..686c1ff0368 100644 --- a/lib/build.js +++ b/lib/build.js @@ -1,14 +1,15 @@ var async = require("async"); var Promise = require("bluebird"); var mkdirp = Promise.promisify(require("mkdirp")); -var rimraf = Promise.promisify(require("rimraf")); +var del = require("del"); var fs = require("fs"); var DefaultBuilder = require("truffle-default-builder"); -var PuddingLoader = require("ether-pudding/loader"); +var Contracts = require("./contracts"); var BuildError = require("./errors/builderror"); var child_process = require("child_process"); var spawnargs = require("spawn-args"); var _ = require("lodash"); +var expect = require("./expect"); function CommandBuilder(command) { this.command = command; @@ -27,7 +28,7 @@ CommandBuilder.prototype.build = function(options, callback) { WORKING_DIRECTORY: options.working_directory, NODE_ENV: options.environment, BUILD_DESTINATION_DIRECTORY: options.destination_directory, - BUILD_CONTRACTS_DIRECTORY: options.contracts_directory, + BUILD_CONTRACTS_DIRECTORY: options.contracts_build_directory, WEB3_PROVIDER_LOCATION: "http://" + options.rpc.host + ":" + options.rpc.port }) }); @@ -50,54 +51,47 @@ CommandBuilder.prototype.build = function(options, callback) { }; var Build = { - clean: function(destination, callback) { + clean: function(options, callback) { + + var destination = options.build_directory; + var contracts_build_directory = options.contracts_build_directory; + // Clean first. - rimraf(destination + '/*').then(function() { + del([destination + '/*', "!" + contracts_build_directory]).then(function() { return mkdirp(destination); }).then(function() { callback(); }).catch(callback); }, - get_contract_data: function(config, callback) { - if (config.app.resolved.include_contracts === false) { - return callback(null, []); - } - - var warning = "Warning: No compiled contracts found. App will be built without compiled contracts."; - - if (fs.existsSync(config.contracts.build_directory) == false) { - console.log(warning); - callback(null, []); - } else { - PuddingLoader.contract_data(config.contracts.build_directory, function(err, contracts) { - if (err) return callback(err); - - if (contracts.length == 0) { - console.log(warning); - } - - callback(null, contracts); - }); - } - }, - // Note: key is a legacy parameter that will eventually be removed. // It's specific to the default builder and we should phase it out. - build: function(config, key, callback) { + build: function(options, key, callback) { var self = this; + expect.options(options, [ + "builder", + "build_directory", + "working_directory", + "contracts_build_directory", + "network", + "network_id", + "provider", + "rpc" + ]); + if (typeof key == "function") { callback = key; key = "build"; } - var builder = config.app.resolved.build; + var logger = options.logger || console; + var builder = options.builder; // No builder specified. Ignore the build then. if (typeof builder == "undefined") { - if (config.argv.quietDeploy == null) { - console.log("No build configuration specified. Not building."); + if (options.quiet != true) { + logger.log("No build configuration specified. Not building."); } return callback(); } @@ -109,7 +103,7 @@ var Build = { // a proper build function, then assume it's configuration // for the default builder. if (builder.hasOwnProperty("build") == false || typeof builder.build !== "function") { - builder = new DefaultBuilder(config.app.resolved.build, key, config.app.resolved.processors); + builder = new DefaultBuilder(builder, key, options.processors); } } else { // If they've only provided a build function, use that. @@ -124,22 +118,23 @@ var Build = { clean = builder.clean; } - clean(config.build.directory, function(err) { + clean(options, function(err) { if (err) return callback(err); - self.get_contract_data(config, function(err, contracts) { + Contracts.provision(options, function(err, contracts) { if (err) return callback(err); - var options = { - working_directory: config.working_dir, + var resolved_options = { + working_directory: options.working_directory, contracts: contracts, - contracts_directory: config.contracts.build_directory, - rpc: config.app.resolved.rpc, - environment: config.environment, - destination_directory: config.build.directory, + contracts_build_directory: options.contracts_build_directory, + destination_directory: options.build_directory, + rpc: options.rpc, + provider: options.provider, + network: options.network }; - builder.build(options, function(err) { + builder.build(resolved_options, function(err) { if (!err) return callback(); if (typeof err == "string") { diff --git a/lib/command.js b/lib/command.js new file mode 100644 index 00000000000..e7719f93e94 --- /dev/null +++ b/lib/command.js @@ -0,0 +1,54 @@ +var TaskError = require("./errors/taskerror"); +var yargs = require("yargs"); +var _ = require("lodash"); + +function Command(tasks) { + this.tasks = tasks; +}; + +Command.prototype.getTask = function(command) { + var argv = yargs.parse(command); + + if (argv._.length == 0) { + return null; + } + + var task_name = argv._[0]; + var task = this.tasks[task_name]; + + return task; +}; + +Command.prototype.run = function(command, options, callback) { + if (typeof options == "function") { + callback = options; + options = {}; + } + + var task = this.getTask(command); + + if (task == null) { + if (Array.isArray(command)) { + command = command.join(" ") + } + + return callback(new TaskError("Cannot find task for command: " + command)); + } + + var argv = yargs.parse(command); + // Remove the task name itself. + if (argv._) { + argv._.shift(); + } + + // We don't need this. + delete argv["$0"]; + + options = _.extend(_.clone(options), argv); + + task(options, function(err) { + callback(err); + }); +}; + +module.exports = Command; diff --git a/lib/compiler.js b/lib/compiler.js new file mode 100644 index 00000000000..82599783d8f --- /dev/null +++ b/lib/compiler.js @@ -0,0 +1,145 @@ +var solc = require("solc"); + +// Clean up after solc. +var listeners = process.listeners("uncaughtException"); +var solc_listener = listeners[listeners.length - 1]; +process.removeListener("uncaughtException", solc_listener); + +var path = require("path"); +var fs = require("fs"); +var async = require("async"); +var Profiler = require("./profiler"); +var CompileError = require("./errors/compileerror"); + +module.exports = { + // contracts_directory: String. Directory where .sol files can be found. + // quiet: Boolean. Suppress output. Defaults to false. + // strict: Boolean. Return compiler warnings as errors. Defaults to false. + compile_all: function(options, callback) { + var self = this; + Profiler.all_contracts(options.contracts_directory, function(err, files) { + options.files = files; + self.compile_with_dependencies(options, callback); + }); + }, + + // contracts_directory: String. Directory where .sol files can be found. + // build_directory: String. Optional. Directory where .sol.js files can be found. Only required if `all` is false. + // all: Boolean. Compile all sources found. Defaults to true. If false, will compare sources against built files + // in the build directory to see what needs to be compiled. + // quiet: Boolean. Suppress output. Defaults to false. + // strict: Boolean. Return compiler warnings as errors. Defaults to false. + compile_necessary: function(options, callback) { + var self = this; + options.logger = options.logger || console; + + Profiler.updated(options, function(err, updated) { + if (err) return callback(err); + + if (updated.length == 0 && options.quiet != true) { + return callback(); + } + + options.files = updated; + self.compile_with_dependencies(options, callback); + }); + }, + + // { + // files: [...], + // includes: { + // "Foo.sol": "contract Foo {}" // Example + // }, + // contracts_directory: "..." // or process.cwd() + // strict: false, + // quiet: false + // logger: console + // } + compile: function(options, callback) { + var files = options.files || []; + var includes = options.includes || {}; + var logger = options.logger || console; + var contracts_directory = options.contracts_directory || process.cwd(); + + var sources = {}; + + async.each(files, function(file, finished) { + fs.readFile(file, "utf8", function(err, body) { + if (err) return finished(err); + sources[path.relative(contracts_directory, file)] = body; + finished(); + }); + }, function() { + Object.keys(includes).forEach(function(key) { + sources[key] = includes[key]; + }); + + // Add the listener back in, just in case I need it. + process.on("uncaughtException", solc_listener); + + var result = solc.compile({sources: sources}, 1); + + // Alright, now remove it. + process.removeListener("uncaughtException", solc_listener); + + var errors = result.errors || []; + var warnings = result.errors || []; + + if (options.strict == true) { + errors = errors.filter(function(error) { + return error.indexOf("Warning:") < 0; + }); + warnings = warnings.filter(function(error) { + return error.indexOf("Warning:") >= 0; + }); + + if (options.quiet != null) { + warnings.forEach(function(warning) { + logger.log(warning); + }); + } + } + + if (errors.length > 0) { + return callback(new CompileError(result.errors.join())); + } + + // Examine the sources, and ensure the contract we expected was defined + // TODO: This forces contract names to be the same as their filename. This should go. + var filenames = Object.keys(sources); + for (var i = 0; i < filenames.length; i++) { + var filename = filenames[i]; + var expected_contract = path.basename(filename, ".sol"); + + if (result.contracts[expected_contract] == null) { + return callback(new CompileError("Could not find expected contract or library in '" + filename + "': contract or library '" + expected_contract + "' not found.")); + } + } + + callback(null, result.contracts); + }) + }, + + compile_with_dependencies: function(options, callback) { + options.files = options.files || []; + options.includes = options.includes || {}; + options.logger = options.logger || console; + options.contracts_directory = options.contracts_directory || process.cwd(); + + var self = this; + Profiler.required_files(options.files, function(err, files) { + if (err) return callback(err); + + files.sort().forEach(function(file) { + if (options.quiet != true) { + var relative = path.relative(options.contracts_directory, file) + options.logger.log("Compiling " + relative + "..."); + } + }); + + options.files = files; + + self.compile(options, callback); + }); + } +}; diff --git a/lib/config.js b/lib/config.js index 8aefc769eb9..e0901f5dd46 100644 --- a/lib/config.js +++ b/lib/config.js @@ -1,310 +1,184 @@ var fs = require("fs"); -var dir = require("node-dir"); -var deasync = require("deasync"); -var filesSync = deasync(dir.files); -var subdirSync = deasync(dir.subdirs); var _ = require("lodash"); -var Web3 = require("web3"); -var loadconf = deasync(require("./loadconf")); var path = require("path"); -var Exec = require("./exec"); +var Provider = require("./provider"); var ConfigurationError = require('./errors/configurationerror'); -var Pudding = require("ether-pudding"); -var PuddingLoader = require("ether-pudding/loader"); - -var Config = { - gather: function(truffle_dir, working_dir, argv, desired_environment) { - var config = {}; - config = _.merge(config, { - argv: argv, - truffle_dir: truffle_dir, - working_dir: working_dir, - web3: new Web3(), - environments: { - directory: path.join(working_dir, "environments"), - configfilename: "config.js", - oldconfigfilename: "config.json", - available: {}, - current: {} - }, - app: { - configfile: path.join(working_dir, "truffle.js"), - oldconfigfile: path.join(working_dir, "truffle.json"), - directory: path.join(working_dir, "app"), - // Default config objects that'll be overwritten by working_dir config. - resolved: { - build: {}, - include_contracts: true, - deploy: [], - after_deploy: [], - rpc: {}, - processors: {}, - } - }, - example: { - directory: path.join(truffle_dir, "example") - }, - templates: { - test: { - filename: path.join(truffle_dir, "templates", "example.js"), - variable: "example" - }, - contract: { - filename: path.join(truffle_dir, "templates", "Example.sol"), - name: "Example", - variable: "example" - } - }, - contracts: { - classes: {}, - directory: path.join(working_dir, "contracts"), - build_directory: null - }, - tests: { - directory: path.join(working_dir, "test"), - filter: /.*\.(js|es|es6|jsx)$/ - }, - build: { - directory: null, - }, - dist: { - directory: null, - }, - rpc: { - defaults: { - gas: 3141592, - gasPrice: 100000000000, // 100 Shannon, - from: null - } - } - }); - - // Check to see if we're working on a dapp meant for 0.2.x or older - if (fs.existsSync(path.join(working_dir, "config", "app.json"))) { - console.log("Your dapp is meant for an older version of Truffle. Don't worry, there are two solutions!") - console.log(""); - console.log("1) Upgrade you're dapp using the followng instructions (it's easy):"); - console.log(" https://github.com/ConsenSys/truffle/wiki/Migrating-from-v0.2.x-to-v0.3.0"); - console.log(""); - console.log(" ( OR )") - console.log(""); - console.log("2) Downgrade to Truffle 0.2.x"); - console.log(""); - console.log("Cheers! And file an issue if you run into trouble! https://github.com/ConsenSys/truffle/issues") - process.exit(); - } - - config.requireNoCache = function(filePath) { - //console.log("Requring w/o cache: " + path.resolve(filePath)); - delete require.cache[path.resolve(filePath)]; - return require(filePath); - }; - - desired_environment = argv.e || argv.environment || process.env.NODE_ENV || desired_environment; - - if (desired_environment) { - // Try to find the desired environment, and fall back to development if we don't find it. - for (var environment of [desired_environment, "development"]) { - var environment_directory = path.join(config.environments.directory, environment); - if (!fs.existsSync(environment_directory)) { - continue; - } - - // I put this warning here but now I'm not sure if I want it. - if (environment != desired_environment && desired_environment != null) { - console.log("Warning: Couldn't find environment " + desired_environment + "."); - } - - config.environment = desired_environment; - config.environments.current.directory = environment_directory; - config.environments.current.filename = path.join(environment_directory, config.environments.configfilename); - config.environments.current.oldfilename = path.join(environment_directory, config.environments.oldconfigfilename); - - break; +var requireNoCache = require("./require-nocache"); +var findUp = require("find-up"); + +var DEFAULT_CONFIG_FILENAME = "truffle.js"; + +function Config(truffle_directory, working_directory, network) { + var self = this; + + this._values = { + truffle_directory: truffle_directory || path.resolve(path.join(__dirname, "../")), + working_directory: working_directory || process.cwd(), + network: network || "default", + verboseRpc: false + }; + + var props = { + // These are already set. + truffle_directory: function() {}, + working_directory: function() {}, + network: function() {}, + verboseRpc: function() {}, + + build_directory: function() { + return path.join(self.working_directory, "build"); + }, + contracts_directory: function() { + return path.join(self.working_directory, "contracts"); + }, + contracts_build_directory: function() { + return path.join(self.build_directory, "contracts"); + }, + migrations_directory: function() { + return path.join(self.working_directory, "migrations"); + }, + test_directory: function() { + return path.join(self.working_directory, "test"); + }, + test_file_extension_regexp: function() { + return /.*\.(js|es|es6|jsx)$/ + }, + networks: function() { + return { + // "default": {}, + "test": {} } - } - - // If we didn't find an environment, but asked for one, error. - if (config.environment == null && desired_environment != null) { - throw new ConfigurationError("Couldn't find any suitable environment. Check environment configuration."); - } - - // Get environments in working directory, if available. - if (fs.existsSync(config.environments.directory)) { - for (var directory of subdirSync(config.environments.directory)) { - name = directory.substring(directory.lastIndexOf("/") + 1) - config.environments.available[name] = directory; + }, + network_id: function() { + if (!self.network || !self.networks[self.network] || !self.networks[self.network].network_id) { + return "default"; } - } - - // Load the app config. - // For now, support both new and old config files. - if (fs.existsSync(config.app.configfile)) { - _.merge(config.app.resolved, config.requireNoCache(config.app.configfile)); - } else if (fs.existsSync(config.app.oldconfigfile)) { - config.app.resolved = loadconf(config.app.oldconfigfile, config.app.resolved); - } - - // Now merge default rpc details, only overwriting if not specified. - _.mergeWith(config.app.resolved.rpc, config.rpc.defaults, function(objValue, srcValue) { - return objValue != null ? objValue : srcValue; - }); - // Load environment config - if (fs.existsSync(config.environments.current.filename)) { - _.merge(config.app.resolved, config.requireNoCache(config.environments.current.filename)); - } else if (fs.existsSync(config.environments.current.oldfilename)) { - config.app.resolved = loadconf(config.environments.current.oldfilename, config.app.resolved); - } - - if (fs.existsSync(config.environments.current.directory)) { - // Overwrite build and dist directories - config.build.directory = path.join(config.environments.current.directory, "build"); - config.dist.directory = path.join(config.environments.current.directory, "dist"); - config.contracts.build_directory = path.join(config.environments.current.directory, "contracts"); - } + return self.networks[self.network].network_id; + }, + network_config: function() { + var conf = self.networks[self.network]; - // Allow for deprecated build configuration. - if (config.app.resolved.frontend != null) { - config.app.resolved.build = config.app.resolved.frontend; - } - - // Helper function for expecting paths to exist. - config.expect = function(expected_path, description, extra, callback) { - if (typeof description == "function") { - callback = description; - description = "file"; - extra = ""; + if (conf == null && self.network == "default") { + return {}; } - if (typeof extra == "function") { - callback = extra; - extra = ""; + return conf; + }, + example_project_directory: function() { + return path.join(self.truffle_directory, "example"); + }, + rpc: function() { + return { + host: "localhost", + port: "8545", + gas: 4712388, + gasPrice: 100000000000, // 100 Shannon, + from: null } + }, + provider: function() { + var options = self.getRPCConfig(); + options.verboseRpc = self.verboseRpc; + return Provider.create(options); + } + }; + + Object.keys(props).forEach(function(prop) { + self.addProp(prop, props[prop]); + }); +}; + +Config.prototype.addProp = function(key, default_getter) { + Object.defineProperty(this, key, { + get: function() { + return this._values[key] || default_getter(); + }, + set: function(val) { + this._values[key] = val; + }, + configurable: true, + enumerable: true + }); +}; + +Config.prototype.getRPCConfig = function() { + if (this.network_config == null) { + throw new ConfigurationError("Cannot find network '" + network_id + "'"); + } - if (description == null) { - description = "file"; - } - - if (extra == null) { - extra = ""; - } - - if (!fs.existsSync(expected_path)) { - var display_path = expected_path.replace(this.working_dir, "./"); - var error = new ConfigurationError("Couldn't find " + description + " at " + display_path + ". " + extra); - - if (callback != null) { - callback(error); - return false; - } else { - throw error; - } - } - return true; - }; - - config.test_connection = function(callback) { - config.web3.eth.getCoinbase(function(error, coinbase) { - if (error != null) { - error = new Error("Could not connect to your RPC client. Please check your RPC configuration."); - } + return _.merge(this.rpc, this.network_config) +}; - callback(error, coinbase) - }); - }; +Config.prototype.with = function(obj) { + return _.extend({}, this, obj); +}; - // DEPRECATED: Resolve paths for default builder's extra processors. - for (var extension in config.app.resolved.processors) { - var file = config.app.resolved.processors[extension]; - var full_path = path.join(working_dir, file); - config.app.resolved.processors[extension] = full_path; - } +Config.prototype.merge = function(obj) { + return _.extend(this, obj); +}; - var provider = new Web3.providers.HttpProvider("http://" + config.app.resolved.rpc.host + ":" + config.app.resolved.rpc.port); - config.web3.setProvider(provider); +// Helper function for expecting paths to exist. +Config.expect = function(expected_path, description, extra, callback) { + if (typeof description == "function") { + callback = description; + description = "file"; + extra = ""; + } - if (argv.verboseRpc != null) { - // // If you want to see what web3 is sending and receiving. - var oldAsync = config.web3.currentProvider.sendAsync; - config.web3.currentProvider.sendAsync = function(options, callback) { - console.log(" > " + JSON.stringify(options, null, 2).split("\n").join("\n > ")); - oldAsync.call(config.web3.currentProvider, options, function(error, result) { - if (error == null) { - console.log(" < " + JSON.stringify(result, null, 2).split("\n").join("\n < ")); - } - callback(error, result) - }); - }; - } + if (typeof extra == "function") { + callback = extra; + extra = ""; + } - // Get contracts in working directory, if available. - if (fs.existsSync(config.contracts.directory)) { - for (file of filesSync(config.contracts.directory)) { + if (description == null) { + description = "file"; + } - // Ignore any files that aren't solidity files. - if (path.extname(file) != ".sol" || path.basename(file)[0] == ".") { - continue; - } + if (extra == null) { + extra = ""; + } - var name = path.basename(file, ".sol"); - var relative_path = file.replace(config.working_dir, "./"); - var stats = fs.statSync(file); + if (!fs.existsSync(expected_path)) { + var display_path = expected_path.replace(this.working_dir, "./"); + var error = new ConfigurationError("Couldn't find " + description + " at " + display_path + ". " + extra); - config.contracts.classes[name] = { - file: file, - source: relative_path, - source_modified_time: (stats.mtime || stats.ctime).getTime(), - compiled_time: 0 // This will be overwritten if we find a compiled file - } - } + if (callback != null) { + callback(error); + return false; + } else { + throw error; } + } + return true; +} - // Now merge those contracts with what's in the configuration, if any, using the loader. - Pudding.setWeb3(config.web3); +Config.default = function() { + return new Config(); +}; - // Functionalize this so we can make it synchronous. - function loadContracts(callback) { - if (fs.existsSync(config.contracts.build_directory) == false) { - return callback(); - } - - var contracts = {}; - PuddingLoader.load(config.contracts.build_directory, Pudding, contracts, function(err, names, data) { - if (err) return callback(err); +Config.detect = function(options, filename) { + if (filename == null) { + filename = DEFAULT_CONFIG_FILENAME; + } - data.forEach(function(item) { - var name = item.name; + var file = findUp.sync(filename); - // Don't load a contract that's been deleted. - if (!config.contracts.classes[name]) { - return; - } + if (file == null) { + throw new ConfigurationError("Could not find suitable configuration file."); + } - var stats; - try { - var stats = fs.statSync(item.file); - } catch (e) { - return callback(e); - } + return this.load(file, options); +}; - var contract = contracts[name]; - config.contracts.classes[name].abi = contract.abi; - config.contracts.classes[name].binary = contract.binary; - config.contracts.classes[name].unlinked_binary = contract.unlinked_binary || contract.binary; - config.contracts.classes[name].address = contract.address; - config.contracts.classes[name].compiled_time = (stats.mtime || stats.ctime).getTime(); - }); +Config.load = function(file, options) { + var config = new Config(); - callback(); - }); - }; + config.working_directory = path.dirname(path.resolve(file)); - var loader = deasync(loadContracts); - loader(); + var static_config = requireNoCache(file); - return config; - } -} + return _.merge(config, static_config, options); +}; module.exports = Config; diff --git a/lib/contracts.js b/lib/contracts.js index 11e4af729e7..3dc60beb27a 100644 --- a/lib/contracts.js +++ b/lib/contracts.js @@ -2,505 +2,94 @@ var async = require("async"); var fs = require("fs"); var mkdirp = require("mkdirp"); var path = require("path"); -var solc = require("solc"); -var path = require("path"); -var Exec = require("./exec"); +var Compiler = require("./compiler"); var Pudding = require("ether-pudding"); -var PuddingGenerator = require("ether-pudding/generator"); -var ConfigurationError = require("./errors/configurationerror"); -var CompileError = require("./errors/compileerror"); -var DeployError = require("./errors/deployerror"); -var graphlib = require("graphlib"); -var Graph = require("graphlib").Graph; -var isAcyclic = require("graphlib/lib/alg").isAcyclic; -var postOrder = require("graphlib/lib/alg").postorder; - +var Web3 = require("web3"); +var expect = require("./expect"); var Contracts = { - account: null, - - get_account: function(config, callback) { - var self = this; - - if (config.app.resolved.rpc.from != null) { - this.account = config.app.resolved.rpc.from; - } - - if (this.account != null) { - return callback(null, this.account); - } - - config.web3.eth.getAccounts(function(err, result) { - if (err != null) return callback(err); - - self.account = result[0]; - callback(null, self.account); - }); - }, - - update_sources: function(config, callback) { - var contract_names = Object.keys(config.contracts.classes); - async.each(contract_names, function(name, done) { - var contract = config.contracts.classes[name]; - fs.readFile(contract.file, {encoding: "utf8"}, function(err, body) { - if (err) return done(err); - - contract.body = body; - done(); - }); - }, callback); - }, - - compile_necessary: function(config, callback) { - var self = this; - this.update_sources(config, function() { - var contract_names = Object.keys(config.contracts.classes); - - var sources = {}; - var updated = {}; - var included = {}; - - for (var i = 0; i < contract_names.length; i++) { - var name = contract_names[i]; - var contract = config.contracts.classes[name]; - - if (contract.source_modified_time > contract.compiled_time || config.argv.compileAll === true) { - updated[name] = true; - } - } - - if (Object.keys(updated).length == 0 && config.argv.quietDeploy == null) { - console.log("No contracts updated; skipping compilation."); - return callback(); - } - - var dependsGraph = self.build_compile_dependency_graph(config, callback); - - if (dependsGraph == null) { - return; - } - - function is_updated(contract_name) { - return updated[contract_name] === true; - } - - function include_source_for(contract_name) { - var contract = config.contracts.classes[contract_name]; - var source_path = path.relative(config.contracts.directory, contract.source); - - if (sources[source_path] != null) { - return; - } - - var full_path = path.resolve(config.working_dir, contract.source) - sources[source_path] = fs.readFileSync(full_path, {encoding: "utf8"}); - - // For graph traversing - included[contract_name] = true; - } - - function walk_down(contract_name) { - if (included[contract_name] === true) { - return; - } - - include_source_for(contract_name); - - var dependencies = dependsGraph.successors(contract_name); - - // console.log("At: " + contract_name); - // console.log(" Dependencies: ", dependencies); - - if (dependencies.length > 0) { - dependencies.forEach(walk_down); - } - } - - function walk_from(contract_name) { - // if (included[contract_name] === true) { - // return; - // } - - var ancestors = dependsGraph.predecessors(contract_name); - var dependencies = dependsGraph.successors(contract_name); - - // console.log("At: " + contract_name); - // console.log(" Ancestors: ", ancestors); - // console.log(" Dependencies: ", dependencies); - - include_source_for(contract_name); - - if (ancestors.length > 0) { - ancestors.forEach(walk_from); - } - - if (dependencies.length > 0) { - dependencies.forEach(walk_down); - } - } - - Object.keys(updated).forEach(walk_from); - - Object.keys(sources).sort().forEach(function(file_path) { - if (config.argv.quietDeploy == null) { - console.log("Compiling " + file_path + "..."); - } - }); - - var result = solc.compile({sources: sources}, 1); - var errors = result.errors || []; - var warnings = result.errors || []; - - if (!config.argv.strict) { - errors = errors.filter(function(error) { - return error.indexOf("Warning:") < 0; - }); - warnings = warnings.filter(function(error) { - return error.indexOf("Warning:") >= 0; - }); - - if (config.argv.quietDeploy == null) { - warnings.forEach(function(warning) { - console.log(warning); - }); - } - } - - if (errors.length > 0) { - return callback(new CompileError(result.errors.join())); - } - - // Examine the sources, and ensure the contract we expected was defined - var filenames = Object.keys(sources); - for (var i = 0; i < filenames.length; i++) { - var filename = filenames[i]; - var expected_contract = path.basename(filename, ".sol"); - - if (result.contracts[expected_contract] == null) { - return callback(new CompileError("Could not find expected contract or library in '" + filename + "': contract or library '" + expected_contract + "' not found.")); - } - } - - for (var i = 0; i < contract_names.length; i++) { - var name = contract_names[i]; - var contract = config.contracts.classes[name]; - var compiled_contract = result.contracts[name]; - - // If we didn't compile this contract this run, continue. - if (compiled_contract == null) { - continue; - } - - contract.binary = compiled_contract.bytecode; - contract.unlinked_binary = compiled_contract.bytecode; - contract.abi = JSON.parse(compiled_contract.interface); - } - - callback(); - }); - }, - - compile: function(config, callback) { - var self = this; - async.series([ - function(c) { - self.compile_necessary(config, c); - }, - function(c) { - self.write_contracts(config, "contracts", c); - } - ], callback); - }, - - createContractAndWait: function(config, contract_name) { + provision: function(options, callback) { var self = this; - - var tx = { - from: this.account, - gas: config.app.resolved.rpc.gas || config.app.resolved.rpc.gasLimit, - gasPrice: config.app.resolved.rpc.gasPrice, // 100 Shannon - data: config.contracts.classes[contract_name].binary - }; - - return new Promise(function(accept, reject) { - config.web3.eth.sendTransaction(tx, function(err, hash) { - if (err != null) { - return reject(err); - } - - var interval = setInterval(function() { - config.web3.eth.getTransactionReceipt(hash, function(err, receipt) { - if (err != null) { - clearInterval(interval); - - return reject(err); - } - if (receipt != null) { - if (config.argv.quietDeploy == null) { - console.log("Deployed: " + contract_name + " to address: " + receipt.contractAddress); - } - - accept({ - name: contract_name, - address: receipt.contractAddress - }); - clearInterval(interval); - } + var logger = options.logger || console; + var web3 = new Web3(); + web3.setProvider(options.provider); + + Pudding.requireAll({ + source_directory: options.contracts_build_directory, + provider: options.provider + }, function(err, contracts) { + if (err) return callback(err); + + web3.eth.getAccounts(function(err, accounts) { + if (err) return callback(err); + + // Add contracts to context and prepare contracts. + contracts.forEach(function(contract) { + // Set defaults based on configuration. + contract.defaults({ + from: options.from || accounts[0], + gas: options.gas, + gasPrice: options.gasPrice }); - }, 500); - }) - }); - }, - - build_compile_dependency_graph: function(config, errorCallback) { - if (config.argv.quietDeploy == null) { - console.log("Checking sources..."); - } - // Iterate through all the contracts looking for libraries and building a dependency graph - var dependsGraph = new Graph(); - var contract_names = Object.keys(config.contracts.classes); - for (var i = 0; i < contract_names.length; i++) { - var name = contract_names[i] - var contract = config.contracts.classes[name]; - - if (contract == null) { - errorCallback(new CompileError("Could not find contract '" + name + "' for compiling. Check truffle.json.")); - return null; - } - - // Add the contract to the depend graph - dependsGraph.setNode(name); - - // Find import statements and resolve those import paths, adding them to the graph. - contract.body.split(/;|\n/).filter(function(line) { - return line.indexOf("import") >= 0; - }).forEach(function(line) { - var regex = /import.*("|')([^"']+)("|')*/g; - var match = regex.exec(line); - - if (match == null) return; - - var file = match[2]; - var dependency_name = path.basename(file, ".sol"); - - if (!dependsGraph.hasEdge(name, dependency_name)) { - dependsGraph.setEdge(name, dependency_name); - } - }); - } - // Check for cycles in the graph, the dependency graph needs to be a tree otherwise there's an error - if (!isAcyclic(dependsGraph)) - { - console.log("ERROR: Cycles in dependency graph"); - dependsGraph.edges().forEach(function(o){ - console.log(o.v+" -- depends on --> "+o.w); - }); - errorCallback(new CompileError("Found cyclic dependencies. Adjust your import statements to remove cycles.")); - return null; - } - return dependsGraph; - }, - - build_deploy_dependency_graph: function(config, errorCallback) { - if (config.argv.quietDeploy == null) { - console.log("Collecting dependencies..."); - } - // Iterate through all the contracts looking for libraries and building a dependency graph - var dependsGraph = new Graph(); - for (var i = 0; i < config.app.resolved.deploy.length; i++) { - var key = config.app.resolved.deploy[i]; - var contract_class = config.contracts.classes[key]; - - if (contract_class == null) { - errorCallback(new DeployError("Could not find contract '" + key + "' for linking. Check truffle.json.")); - return null; - } - - if (contract_class.binary == null){ - errorCallback(new DeployError("Could not find compiled binary for contract '" + key + "'. Check truffle.json.")); - return null; - } - // Add the contract to the depend graph - dependsGraph.setNode(key); - - // Find references to any librarys - // Library references are embedded in the bytecode of contracts with the format - // "__Lib___________________________________" , where "Lib" is your library name and the whole - // string is 40 characters long. This is the placeholder for the Lib's address. - - var regex = /__([^_]*)_*/g; - var matches; - while ( (matches = regex.exec(contract_class.unlinked_binary)) !== null ) { - var lib = matches[1]; - if (!dependsGraph.hasEdge(key,lib)) { - dependsGraph.setEdge(key, lib); - } - } - } - // Check for cycles in the graph, the dependency graph needs to be a tree otherwise there's an error - if (!isAcyclic(dependsGraph)) - { - console.log("ERROR: Cycles in dependency graph"); - dependsGraph.edges().forEach(function(o){ - console.log(o.v+" -- depends on --> "+o.w); - }); - errorCallback(new DeployError("Error linker found cyclic dependencies. Adjust your import statements to remove cycles.")); - return null; - } - return dependsGraph; - }, - link_dependencies: function(config, contract_name) { - var self = this; - return function(dependency_addresses) { - var contract = config.contracts.classes[contract_name]; - - //All of the library dependencies to this contract have been deployed - //Inject the address of each lib into this contract and then deploy it. - dependency_addresses.forEach(function(lib) { - if (config.argv.quietDeploy == null) { - console.log("Linking Library: " + lib.name + " to contract: " + contract_name + " at address: " + lib.address); - } + if (options.network_id) { + contract.setNetwork(options.network_id); + } + }); - var bin_address = lib.address.replace("0x", ""); - var re = new RegExp("__" + lib.name + "_*", "g"); - contract.binary = contract.unlinked_binary.replace(re, bin_address); + callback(null, contracts); }); - - return self.createContractAndWait(config, contract_name); - } + }); }, - deploy: function(config, compile, done_deploying) { + // contracts_directory: String. Directory where .sol files can be found. + // contracts_build_directory: String. Directory where .sol.js files can be found and written to. + // all: Boolean. Compile all sources found. Defaults to true. If false, will compare sources against built files + // in the build directory to see what needs to be compiled. + // network_id: network id to link saved contract artifacts. + // quiet: Boolean. Suppress output. Defaults to false. + // strict: Boolean. Return compiler warnings as errors. Defaults to false. + compile: function(options, callback) { var self = this; - if (typeof compile == "function") { - done_deploying = compile; - compile = true; - } - - if (compile == null) { - compile == true; - } - - async.series([ - function(c) { - self.get_account(config, c); - }, - function(c) { - if (compile == true) { - self.compile_necessary(config, c); - } else { - c(); - } - }, - function(c) { - Pudding.setWeb3(config.web3); - var dependsGraph = self.build_deploy_dependency_graph(config, c); - - if( dependsGraph == null) { - return; - } - - var dependsOrdering = postOrder(dependsGraph, dependsGraph.nodes()); - var deploy_promise = null; - var contract_name; // This is in global scope so that it can be used in the .catch below - - // Iterate over the dependency grpah in post order, deploy libraries first so we can - // capture their addresses and use them to deploy the contracts that depend on them - for(var i = 0; i < dependsOrdering.length; i++) { - contract_name = dependsOrdering[i]; - var contract_class = config.contracts.classes[contract_name]; - - if (contract_class == null) { - c(new DeployError("Could not find contract '" + contract_name + "' for deployment. Check truffle.json.")); - return; - } - - var dependencies = dependsGraph.successors(contract_name); - - // When we encounter a Library that is not dependant on other libraries, we can just - // deploy it as normal - if (dependencies.length == 0) { - deploy_promise = self.createContractAndWait(config, contract_name); + expect.options(options, [ + "contracts_directory", + "contracts_build_directory", + "network", + "network_id" + ]); - // Store the promise in the graph so we can fetch it later - dependsGraph.setNode(contract_name, deploy_promise); - } - // Contracts that have dependencies need to wait until those dependencies have been deployed - // so we can inject the address into their byte code - else - { - // Collect all the promises for the libraries this contract depends on into a list - // NOTE: since this loop is traversing in post-order, we can be assured that this list - // will contain ALL of the dependencies of this contract - var depends_promises = dependencies.map(dependsGraph.node, dependsGraph); + function finished(err, contracts) { + if (err) return callback(err); - // Wait for all the dependencies to be committed and then do the linking step - deploy_promise = Promise.all(depends_promises).then( - self.link_dependencies(config, contract_name) - ); - - // It's possible that this contract is a dependency of some other contract so we store - // it in the graph just in case - dependsGraph.setNode(contract_name,deploy_promise); - } - } - ///Now wait for all of the outstanding deployments to complete - Promise.all(dependsGraph.nodes().map(dependsGraph.node, dependsGraph)) - .then(function(deployed_contracts) { - deployed_contracts.forEach(function(a) { - config.contracts.classes[a.name].address = a.address; - }); - c(); - }).catch(function(err) { - c(new DeployError(err.message, contract_name)); - }); - }, - function(c) { - self.write_contracts(config, "built contract files", c); - }, - function(c) { - self.after_deploy(config, c); - } - ], function(err) { - if (err != null) { - done_deploying(err); - return; + if (contracts != null && Object.keys(contracts).length > 0) { + self.write_contracts(contracts, options, callback); } else { - done_deploying(); + callback(null, []); } - }); - }, + }; - after_deploy: function(config, done) { - async.eachSeries(config.app.resolved.after_deploy, function(file, iterator_callback) { - if (config.argv.quietDeploy == null) { - console.log("Running post deploy script " + file + "..."); - } - Exec.file(config, file, iterator_callback); - }, done); + if (options.all === true || options.compileAll === true) { + Compiler.compile_all(options, finished); + } else { + Compiler.compile_necessary(options, finished); + } }, - write_contracts: function(config, description, callback) { - var destination = config.contracts.build_directory; - - description = description || "contracts"; - - mkdirp(destination, function(err, result) { + write_contracts: function(contracts, options, callback) { + mkdirp(options.contracts_build_directory, function(err, result) { if (err != null) { callback(err); return; } - var display_directory = "." + path.sep + path.relative(config.working_dir, destination); // path.join("./", destination.replace(config.working_dir, "")); - if (config.argv.quietDeploy == null) { - console.log("Writing " + description + " to " + display_directory); + if (options.quiet != true && options.quietWrite != true) { + console.log("Writing artifacts to ." + path.sep + path.relative(process.cwd(), options.contracts_build_directory)); } - PuddingGenerator.save(config.contracts.classes, destination, {removeExisting: true}); - - callback(); + Pudding.saveAll(contracts, options.contracts_build_directory, options).then(function() { + callback(null, contracts); + }).catch(callback); }); } } diff --git a/lib/create.js b/lib/create.js index 48a6fa0de9a..0132401be7a 100644 --- a/lib/create.js +++ b/lib/create.js @@ -2,14 +2,22 @@ var util = require("./util"); var file = require("./file"); var path = require("path"); -var Create = { - contract: function(config, name, callback) { - if (!config.expect(config.contracts.directory, "contracts directory", callback)) { - return; - } +var templates = { + test: { + filename: path.join(__dirname, "../", "templates", "example.js"), + variable: "example" + }, + contract: { + filename: path.join(__dirname, "../", "templates", "Example.sol"), + name: "Example", + variable: "example" + } +}; - var from = config.templates.contract.filename; - var to = path.join(config.contracts.directory, name + ".sol"); +var Create = { + contract: function(directory, name, callback) { + var from = templates.contract.filename; + var to = path.join(directory, name + ".sol"); file.duplicate(from, to, function(err) { if (err != null) { @@ -17,17 +25,14 @@ var Create = { return; } - file.replace(to, config.templates.contract.name, name, callback); + file.replace(to, templates.contract.name, name, callback); }); }, - test: function(config, name, callback) { - if (!config.expect(config.tests.directory, "tests directory", callback)) { - return; - } - + test: function(directory, name, callback) { var underscored = util.toUnderscoreFromCamel(name); - var from = config.templates.test.filename; - var to = path.join(config.tests.directory, underscored + ".js"); + underscored = underscored.replace(/\./g, "_"); + var from = templates.test.filename; + var to = path.join(directory, underscored + ".js"); file.duplicate(from, to, function(err) { if (err != null) { @@ -35,8 +40,8 @@ var Create = { return; } - file.replace(to, config.templates.contract.name, name, function() { - file.replace(to, config.templates.contract.variable, underscored, callback); + file.replace(to, templates.contract.name, name, function() { + file.replace(to, templates.contract.variable, underscored, callback); }); }); } diff --git a/lib/deferredchain.js b/lib/deferredchain.js new file mode 100644 index 00000000000..ba423ee9a3d --- /dev/null +++ b/lib/deferredchain.js @@ -0,0 +1,43 @@ +function DeferredChain() { + var self = this; + this.chain = new Promise(function(accept, reject) { + self._accept = accept; + self._reject = reject; + }); + + this.await = new Promise(function() { + self._done = arguments[0]; + }); + this.started = false; +}; + +DeferredChain.prototype.then = function(fn) { + var self = this; + this.chain = this.chain.then(function() { + var args = Array.prototype.slice.call(arguments); + + return fn.apply(null, args); + }); + + return this; +}; + +DeferredChain.prototype.catch = function(fn) { + var self = this; + this.chain = this.chain.catch(function() { + var args = Array.prototype.slice.call(arguments); + + return fn.apply(null, args); + }); + + return this; +}; + +DeferredChain.prototype.start = function() { + this.started = true; + this.chain = this.chain.then(this._done); + this._accept(); + return this.await; +}; + +module.exports = DeferredChain; diff --git a/lib/deployer.js b/lib/deployer.js new file mode 100644 index 00000000000..a5eca0d32bc --- /dev/null +++ b/lib/deployer.js @@ -0,0 +1,211 @@ +var Linker = require("./linker"); +var Require = require("./require"); +var expect = require("./expect"); +var path = require("path"); +var DeferredChain = require("./deferredchain"); + +var Actions = { + deployAndLink: function(contract, args, deployer) { + var self = this; + return function() { + // Autolink the contract at deploy time. + Linker.autolink(contract, deployer.known_contracts, deployer.logger); + + return self.deploy(contract, args, deployer)(); + } + }, + + deployAndLinkMany: function(arr, deployer) { + return function() { + // Perform all autolinking before deployment. + arr.forEach(function(args) { + var contract; + + if (Array.isArray(args)) { + contract = args[0]; + } else { + contract = args; + } + + // Autolink the contract at deploy time. + Linker.autolink(contract, deployer.known_contracts, deployer.logger); + }); + + var deployments = arr.map(function(args) { + var contract; + + if (Array.isArray(args)) { + contract = args.shift(); + } else { + contract = args; + args = []; + } + + return Actions.deploy(contract, args, deployer)(); + }); + + return Promise.all(deployments); + }; + }, + + deploy: function(contract, args, deployer) { + return function() { + var prefix = "Deploying "; + if (contract.address != null) { + prefix = "Replacing "; + } + + deployer.logger.log(prefix + contract.contract_name + "..."); + + // Evaluate any arguments if they're promises + return Promise.all(args).then(function(new_args) { + return contract.new.apply(contract, new_args); + }).then(function(instance) { + deployer.logger.log(contract.contract_name + ": " + instance.address); + contract.address = instance.address; + }); + }; + }, + + autolink: function(contract, deployer) { + return function() { + Linker.autolink(contract, deployer.known_contracts, deployer.logger); + }; + }, + + link: function(library, destinations, deployer) { + return function() { + Linker.link(library, destinations, deployer.logger); + }; + }, + + new: function(contract, args, deployer) { + return function() { + self.logger.log("Creating new instance of " + contract.contract_name); + // Evaluate any arguments if they're promises + return Promise.all(args).then(function(new_args) { + return contract.new.apply(contract, args) + }); + }; + }, + + exec: function(file, deployer) { + return function() { + if (path.isAbsolute(file) == false) { + file = path.resolve(path.join(deployer.basePath, file)); + } + + deployer.logger.log("Running " + file + "..."); + // Evaluate any arguments if they're promises + return new Promise(function(accept, reject) { + Require.exec({ + file: file, + contracts: Object.keys(deployer.known_contracts).map(function(key) { + return deployer.known_contracts[key]; + }), + network: deployer.network, + network_id: deployer.network_id, + provider: deployer.provider + }, function(err) { + if (err) return reject(err); + accept(); + }); + }); + }; + } +}; + +function Deployer(options) { + var self = this; + options = options || {}; + + expect.options(options, [ + "provider", + "network", + "network_id" + ]); + + this.chain = new DeferredChain(); + this.logger = options.logger || console; + if (options.quiet) { + this.logger = {log: function() {}}; + } + this.known_contracts = {}; + (options.contracts || []).forEach(function(contract) { + self.known_contracts[contract.contract_name] = contract; + }); + this.network = options.network; + this.network_id = options.network_id; + this.provider = options.provider; + this.basePath = options.basePath || process.cwd(); +}; + +// Note: In all code below we overwrite this.chain every time .then() is used +// in order to ensure proper error processing. + +Deployer.prototype.start = function() { + return this.chain.start(); +}; + +Deployer.prototype.autolink = function(contract) { + var self = this; + + // autolink all contracts available. + if (contract == null) { + Object.keys(this.known_contracts).forEach(function(contract_name) { + self.autolink(self.known_contracts[contract_name]); + }); + return; + } + + this.queueOrExec(Actions.autolink(contract, self)); +}; + +Deployer.prototype.link = function(library, destinations) { + return this.queueOrExec(Actions.link(library, destinations, this)); +}; + +Deployer.prototype.deploy = function() { + var args = Array.prototype.slice.call(arguments); + var contract = args.shift(); + + if (Array.isArray(contract)) { + return this.queueOrExec(Actions.deployAndLinkMany(contract, this)); + } else { + return this.queueOrExec(Actions.deployAndLink(contract, args, this)); + } +}; + +Deployer.prototype.new = function() { + var args = Array.prototype.slice.call(arguments); + var contract = args.shift(); + + return this.queueOrExec(Actions.new(contract, args, this)); +}; + +Deployer.prototype.exec = function(file) { + return this.queueOrExec(Actions.exec(file, this)); +}; + +Deployer.prototype.then = function(fn) { + var self = this; + + return this.queueOrExec(function() { + self.logger.log("Running step..."); + return fn(); + }); +}; + +Deployer.prototype.queueOrExec = function(fn) { + var self = this; + + if (this.chain.started == true) { + return new Promise(function(accept, reject) { + accept(); + }).then(fn); + } else { + return this.chain.then(fn); + } +}; + +module.exports = Deployer; diff --git a/lib/errors/taskerror.js b/lib/errors/taskerror.js new file mode 100644 index 00000000000..d2b85ec99c5 --- /dev/null +++ b/lib/errors/taskerror.js @@ -0,0 +1,10 @@ +var ExtendableError = require("./extendableerror"); +var inherits = require("util").inherits; + +inherits(TaskError, ExtendableError); + +function TaskError(message) { + TaskError.super_.call(this, message); +}; + +module.exports = TaskError; diff --git a/lib/exec.js b/lib/exec.js deleted file mode 100644 index f65786bb03e..00000000000 --- a/lib/exec.js +++ /dev/null @@ -1,90 +0,0 @@ -var fs = require("fs"); -var m = require("module"); -var path = require("path"); -var vm = require("vm"); - -var Pudding = require("ether-pudding"); -var PuddingLoader = require("ether-pudding/loader"); - -var _ = require("lodash"); - -var Exec = { - file: function(config, file, done) { - var self = this; - - if (path.isAbsolute(file) == false) { - file = path.join(config.working_dir, file); - } - - config.web3.eth.getAccounts(function(error, accounts) { - if (error) { - done(error); - return; - } - - Pudding.setWeb3(config.web3); - - Pudding.defaults({ - from: accounts[0], - gas: 3141592 - }); - - var sandbox = {}; - - var old_cwd = process.cwd(); - var old_dirname = __dirname; - - // Change current working directory to that of the project. - process.chdir(config.working_dir); - __dirname = process.cwd(); - - var script_over = function(err) { - process.chdir(old_cwd); - __dirname = old_dirname; - done(err); - }; - - var new_process = _.merge({}, process); - new_process.exit = function(exit_code) { - if (exit_code != null && exit_code != 0) { - script_over(new Error("Script " + file + " exited with non-zero exit code: " + exit_code)); - } else { - script_over(); - } - }; - - // Create a sandbox that looks just like the global scope. - sandbox = _.merge(sandbox, global, { - web3: config.web3, - Pudding: Pudding, - process: new_process, - require: function(name) { - if (name.indexOf("./") == 0 || name.indexOf("../") == 0) { - return require(config.working_dir + name); - } else if (fs.existsSync(config.working_dir + "node_modules/" + name)) { - return require(config.working_dir + "node_modules/" + name) - } else { - return require(name); - } - }, - module: m, - __filename: file, - __dirname: __dirname - }); - - PuddingLoader.load(config.environments.current.directory, Pudding, sandbox, function(err) { - if (err != null) { - done(err); - return; - } - - var context = vm.createContext(sandbox); - var code = fs.readFileSync(file); - var script = new vm.Script(code, { filename: file}); - script.runInContext(context); - }); - }); - } -} - -module.exports = Exec diff --git a/lib/expect.js b/lib/expect.js new file mode 100644 index 00000000000..589ccc27675 --- /dev/null +++ b/lib/expect.js @@ -0,0 +1,11 @@ +var Expect = { + options: function(options, expected_keys) { + expected_keys.forEach(function(key) { + if (options[key] == null) { + throw new Error("Expected parameter '" + key + "' not passed to function."); + } + }); + } +} + +module.exports = Expect; diff --git a/lib/init.js b/lib/init.js index d0caa57dd9e..7deff13d7d1 100644 --- a/lib/init.js +++ b/lib/init.js @@ -1,10 +1,25 @@ var copy = require("./copy"); -var File = require("./file"); +var path = require("path"); +var temp = require("temp").track(); +var Config = require("./config"); -var Init = { - all: function(config, callback) { - copy(config.example.directory, config.working_dir, callback); - } +var Init = function(destination, callback) { + var example_directory = path.resolve(path.join(__dirname, "..", "example")); + copy(example_directory, destination, callback); } -module.exports = Init +Init.sandbox = function(callback) { + var self = this; + temp.mkdir("truffle-sandbox-", function(err, dirPath) { + if (err) return callback(err); + + Init(dirPath, function(err) { + if (err) return callback(err); + + var config = Config.load(path.join(dirPath, "truffle.js")); + callback(null, config); + }); + }); +}; + +module.exports = Init; diff --git a/lib/linker.js b/lib/linker.js new file mode 100644 index 00000000000..8faeacd874f --- /dev/null +++ b/lib/linker.js @@ -0,0 +1,67 @@ +module.exports = { + link: function(library, destinations, logger) { + var self = this; + + logger = logger || console; + + if (!Array.isArray(destinations)) { + destinations = [destinations]; + } + + var regex = new RegExp("__" + library.contract_name + "_*", "g"); + + if (library.contract_name == null) { + throw new Error("Cannot link a library with no name."); + } + + if (library.address == null) { + throw new Error("Cannot link library: " + library.contract_name + " has no address."); + } + + destinations.forEach(function(destination) { + logger.log("Linking " + library.contract_name + " to " + destination.contract_name); + destination.binary = destination.unlinked_binary.replace(regex, library.address.replace("0x", "")); + }); + }, + + autolink: function(contract, available_contracts, logger) { + // Abstract contract passed. + if (contract.binary == null) return; + + var self = this; + var regex = /__[^_]+_+/g; + + logger = logger || console; + + var unlinked_libraries = contract.unlinked_binary.match(regex); + + // Nothing to link. + if (unlinked_libraries == null) return; + + if (unlinked_libraries.length == 0) { + throw new Error("Cannot auto link " + contract.contract_name + "; " + contract.contract_name + " has no library dependencies.") + } + + unlinked_libraries = unlinked_libraries.map(function(name) { + // Remove underscores + return name.replace(/_/g, ""); + }).sort().filter(function(name, index, arr) { + // Remove duplicates + if (index + 1 >= arr.length) { + return true; + } + + return name != arr[index + 1]; + }); + + unlinked_libraries.forEach(function(name) { + var library = available_contracts[name]; + + if (library == null) { + throw new Error("Cannot auto link " + contract.contract_name + "; " + contract.contract_name + " unknown dependency " + name + ".") + } + + self.link(library, contract, logger); + }); + } +}; diff --git a/lib/migrate.js b/lib/migrate.js new file mode 100644 index 00000000000..be5b48df7ee --- /dev/null +++ b/lib/migrate.js @@ -0,0 +1,216 @@ + + +var fs = require("fs"); +var dir = require("node-dir"); +var path = require("path"); +var Contracts = require("./contracts"); +var Pudding = require("ether-pudding"); +var Deployer = require("./deployer"); +var Profiler = require("./profiler"); +var Provider = require("./provider"); +var Require = require("./require"); +var async = require("async"); +var Web3 = require("web3"); +var expect = require("./expect"); + +function Migration(file) { + this.file = path.resolve(file); + this.number = parseInt(path.basename(file)); +}; + +Migration.prototype.run = function(options, contracts, callback) { + var self = this; + var logger = options.logger || console; + + if (options.quiet) { + logger = { + log: function() {} + } + }; + + var web3 = new Web3(); + web3.setProvider(options.provider); + + logger.log("Running migration: " + path.relative(options.migrations_directory, this.file)); + + // Initial context. + var context = { + web3: web3 + }; + + // Add contracts to context and prepare contracts. + contracts.forEach(function(contract) { + context[contract.contract_name] = contract; + + // During migrations, we could be on a network that takes a long time to accept + // transactions (i.e., contract deployment close to block size). Because successful + // migration is more important than wait time in those cases, we'll synchronize "forever". + contract.synchronization_timeout = 0; + }); + + var deployer = new Deployer({ + logger: { + log: function(msg) { + logger.log(" " + msg); + } + }, + contracts: contracts, + network: options.network, + network_id: options.network_id, + provider: options.provider, + basePath: path.dirname(this.file) + }); + + var finish = function(err) { + if (err) return callback(err); + deployer.start().then(function() { + if (options.save === false) return; + logger.log("Saving successful migration to network..."); + var Migrations = context["Migrations"]; + if (Migrations && Migrations.address) { + return Migrations.deployed().setCompleted(self.number); + } + }).then(function() { + if (options.save === false) return; + logger.log("Saving artifacts..."); + return Pudding.saveAll(contracts, options.contracts_build_directory, options); + }).then(function() { + callback(); + }).catch(function(e) { + logger.log("Error encountered, bailing. Network state unknown. Review successful transactions manually."); + callback(e); + }); + }; + + Require.file({ + file: self.file, + context: context, + args: [deployer] + }, function(err, fn) { + if (!fn || !fn.length || fn.length == 0) { + return callback(new Error("Migration " + self.file + " invalid or does not take any parameters")); + } + if (fn.length == 1 || fn.length == 2) { + fn(deployer, options.network); + finish(); + } else if (fn.length == 3) { + fn(deployer, options.network, finish); + } + }); +}; + +var Migrate = { + Migration: Migration, + + assemble: function(options, callback) { + dir.files(options.migrations_directory, function(err, files) { + if (err) return callback(err); + + var migrations = files.map(function(file) { + return new Migration(file, options.network); + }); + + // Make sure to sort the prefixes as numbers and not strings. + migrations = migrations.sort(function(a, b) { + if (a.number > b.number) { + return 1; + } else if (a.number < b.number) { + return -1; + } + return 0; + }); + + callback(null, migrations); + }); + }, + + run: function(options, callback) { + var self = this; + + expect.options(options, [ + "migrations_directory", + "contracts_build_directory", + "provider", + "network", + "network_id" + ]); + + if (options.reset == true) { + return this.runAll(options, callback); + } + + self.lastCompletedMigration(options, function(err, last_migration) { + if (err) return callback(err); + + // Don't rerun the last completed migration. + self.runFrom(last_migration + 1, options, callback); + }); + }, + + runFrom: function(number, options, callback) { + var self = this; + this.assemble(options, function(err, migrations) { + if (err) return callback(err); + + while (migrations.length > 0) { + if (migrations[0].number >= number) { + break; + } + + migrations.shift(); + } + + self.runMigrations(migrations, options, callback); + }); + }, + + runAll: function(options, callback) { + var self = this; + this.assemble(options, function(err, migrations) { + if (err) return callback(err); + + self.runMigrations(migrations, options, callback); + }); + }, + + runMigrations: function(migrations, options, callback) { + Contracts.provision(options, function(err, contracts) { + if (err) return callback(err); + + if (options.reset == true) { + contracts.forEach(function(contract) { + contract.address = null; + }) + } + + async.eachSeries(migrations, function(migration, finished) { + migration.run(options, contracts, function(err) { + if (err) return finished(err); + finished(); + }); + }, callback); + }); + }, + + lastCompletedMigration: function(options, callback) { + var migrations_contract = path.resolve(path.join(options.contracts_build_directory, "Migrations.sol.js")); + + Pudding.requireFile(migrations_contract, options, function(err, Migrations) { + if (err) return callback(new Error("Could not find built Migrations contract.")); + + if (Migrations.address == null) { + return callback(null, 0); + } + + Migrations.setProvider(options.provider); + + var migrations = Migrations.deployed(); + + migrations.last_completed_migration.call().then(function(completed_migration) { + callback(null, completed_migration.toNumber()); + }).catch(callback); + }); + } +}; + +module.exports = Migrate; diff --git a/lib/profiler.js b/lib/profiler.js new file mode 100644 index 00000000000..bb7f39384d6 --- /dev/null +++ b/lib/profiler.js @@ -0,0 +1,245 @@ +// Compares .sol files to their .sol.js counterparts, +// determines which .sol files have been updated. +var dir = require("node-dir"); +var path = require("path"); +var async = require("async"); +var fs = require("fs"); +var Pudding = require("ether-pudding"); +var SolidityParser = require("solidity-parser"); +var Graph = require("graphlib").Graph; +var isAcyclic = require("graphlib/lib/alg").isAcyclic; +var postOrder = require("graphlib/lib/alg").postorder; + +module.exports = { + all_contracts: function(directory, callback) { + dir.files(directory, function(err, files) { + if (err) return callback(err); + + files = files.filter(function(file) { + // Ignore any files that aren't solidity files. + return path.extname(file) == ".sol" && path.basename(file)[0] != "."; + }); + + callback(null, files); + }); + }, + + updated: function(options, callback) { + var contracts_directory = options.contracts_directory; + var build_directory = options.contracts_build_directory; + + this.all_contracts(contracts_directory, function(err, files) { + var expected_build_files = files.map(function(file) { + return path.join(build_directory, path.dirname(path.relative(contracts_directory, file)), path.basename(file) + ".js"); + }); + + async.map(files, fs.stat, function(err, file_stats) { + if (err) return callback(err); + + async.map(expected_build_files, function(build_file, finished) { + Pudding.requireFile(build_file, options, function(err, contract) { + // Ignore errors, i.e., if the file doesn't exist. + finished(null, contract); + }); + }, function(err, contracts) { + if (err) return callback(err); + + var updated = []; + + for (var i = 0; i < contracts.length; i++) { + var file_stat = file_stats[i]; + var contract = contracts[i]; + + if (contract == null) { + updated.push(files[i]); + continue; + } + + var modified_time = (file_stat.mtime || file_stat.ctime).getTime(); + + // Note that the network is already set for is in Pudding.requireFile(). + var built_time = contract.updated_at || 0; + + if (modified_time > built_time) { + updated.push(files[i]); + } + } + + callback(null, updated); + }); + }); + }); + }, + + deployed_networks: function(options, callback) { + Pudding.requireAll(options.contracts_build_directory, function(err, contracts) { + if (err) return callback(err); + + var ids_to_names = {}; + var networks = {}; + + Object.keys(options.networks).forEach(function(network_name) { + var network = options.networks[network_name]; + + // Ignore the test network that's configured by default. + if (network_name == "test" && network.network_id == null) { + return; + } + + var network_id = network.network_id || "default"; + ids_to_names[network_id] = network_name; + networks[network_name] = {}; + }); + + contracts.forEach(function(contract) { + Object.keys(contract.all_networks).forEach(function(network_id) { + var network_name = ids_to_names[network_id] || network_id; + + if (networks[network_name] == null) { + networks[network_name] = {}; + } + + var address = contract.all_networks[network_id].address; + + if (address == null) return; + + networks[network_name][contract.contract_name] = address; + }); + }); + + callback(null, networks); + }); + }, + + imports: function(file, callback) { + fs.readFile(file, "utf8", function(err, body) { + if (err) callback(err); + + //console.log("Parsing " + path.basename(file) + "..."); + + var imports = SolidityParser.parse(body, "imports"); + + var dirname = path.dirname(file); + imports = imports.map(function(i) { + return path.resolve(path.join(dirname, i)); + }); + + callback(null, imports); + }); + }, + + required_files: function(files, callback) { + // Ensure full paths. + files = files.map(function(file) { + return path.resolve(file); + }); + + this.dependency_graph(files, function(err, dependsGraph) { + if (err) return callback(err); + + function is_updated(contract_name) { + return updated[contract_name] === true; + } + + var required = {}; + + function include(file) { + required[file] = true; + } + + function walk_down(file) { + if (required[file] === true) { + return; + } + + include(file); + + var dependencies = dependsGraph.successors(file); + + // console.log("At: " + contract_name); + // console.log(" Dependencies: ", dependencies); + + if (dependencies.length > 0) { + dependencies.forEach(walk_down); + } + } + + function walk_from(file) { + var ancestors = dependsGraph.predecessors(file); + var dependencies = dependsGraph.successors(file); + + // console.log("At: " + contract_name); + // console.log(" Ancestors: ", ancestors); + // console.log(" Dependencies: ", dependencies); + + include(file); + + if (ancestors.length > 0) { + ancestors.forEach(walk_from); + } + + if (dependencies.length > 0) { + dependencies.forEach(walk_down); + } + } + + files.forEach(walk_from); + + callback(null, Object.keys(required)); + }); + }, + + dependency_graph: function(files, callback) { + var self = this; + + // Ensure full paths. + files = files.map(function(file) { + return path.resolve(file); + }); + + // Iterate through all the contracts looking for libraries and building a dependency graph + var dependsGraph = new Graph(); + + var imports_cache = {}; + + function getImports(file, callback) { + if (imports_cache[file] != null) { + callback(null, imports_cache[file]); + } else { + self.imports(file, function(err, imports) { + if (err) return callback(err); + imports_cache[file] = imports; + callback(null, imports); + }); + } + }; + + async.each(files, function(file, finished) { + // Add the contract to the depend graph. + dependsGraph.setNode(file); + + getImports(file, function(err, imports) { + if (err) return callback(err); + + imports.forEach(function(import_path) { + if (!dependsGraph.hasEdge(file, import_path)) { + dependsGraph.setEdge(file, import_path); + } + }); + + finished(); + }) + }, + function() { + // Check for cycles in the graph, the dependency graph needs to be a tree otherwise there's an error + if (!isAcyclic(dependsGraph)) { + var errorMessage = "Found cyclic dependencies. Adjust your import statements to remove cycles.\n\n"; + dependsGraph.edges().forEach(function(o){ + errorMessage += o.v + " -- depends on --> " + o.w + "\n"; + }); + return callback(new CompileError(errorMessage)); + } + callback(null, dependsGraph) + }); + }, +}; diff --git a/lib/provider.js b/lib/provider.js new file mode 100644 index 00000000000..e9aa8a12f36 --- /dev/null +++ b/lib/provider.js @@ -0,0 +1,50 @@ +var Web3 = require("web3"); + +module.exports = { + wrap: function(provider, options) { + options = options || {}; + + if (options.verbose || options.verboseRpc) { + this.makeVerbose(provider, options.logger); + } + return provider; + }, + + makeVerbose: function(provider, logger) { + logger = logger || console; + + // // If you want to see what web3 is sending and receiving. + var oldAsync = provider.sendAsync; + + if (oldAsync.is_verbose) return; + + provider.sendAsync = function(options, callback) { + logger.log(" > " + JSON.stringify(options, null, 2).split("\n").join("\n > ")); + oldAsync.call(provider, options, function(error, result) { + if (error == null) { + logger.log(" < " + JSON.stringify(result, null, 2).split("\n").join("\n < ")); + } + callback(error, result) + }); + }; + + provider.sendAsync.is_verbose = true; + }, + + create: function(options) { + var provider = new Web3.providers.HttpProvider("http://" + options.host + ":" + options.port); + return this.wrap(provider, options); + }, + + test_connection: function(provider, callback) { + var web3 = new Web3(); + web3.setProvider(provider); + web3.eth.getCoinbase(function(error, coinbase) { + if (error != null) { + error = new Error("Could not connect to your RPC client. Please check your RPC configuration."); + } + + callback(error, coinbase) + }); + } +}; diff --git a/lib/repl.js b/lib/repl.js index ed11ea92a0b..942af18154a 100644 --- a/lib/repl.js +++ b/lib/repl.js @@ -1,24 +1,122 @@ var repl = require("repl"); +var Command = require("./command"); +var Contracts = require("./contracts"); +var Web3 = require("web3"); +var vm = require("vm"); +var expect = require("./expect"); +var _ = require("lodash"); -global.Pudding = require("ether-pudding"); -var PuddingLoader = require("ether-pudding/loader"); +function TruffleInterpreter(tasks, options) { + this.options = options; + this.contracts = []; + this.r = null; + this.command = new Command(tasks); +}; -var Repl = { - run: function(config, done) { - Pudding.setWeb3(config.web3); - global.web3 = config.web3; - PuddingLoader.load(config.environments.current.directory, Pudding, global, function() { - try { - var r = repl.start("truffle(" + config.environment + ")> "); - r.on("exit", function() { - process.exit(1); - }); - } catch(e) { - console.log(e.stack); +TruffleInterpreter.prototype.start = function() { + var self = this; + var options = this.options; + + var web3 = new Web3(); + web3.setProvider(options.provider); + + this.provision(function(err) { + if (err) return done(err); + + var prefix = "truffle(default)> "; + + if (options.network != null && options.network != "default") { + prefix = "truffle(" + options.network + ")> "; + } + + try { + self.r = repl.start({ + prompt: prefix, + eval: self.interpret.bind(self) + }); + + self.r.on("exit", function() { process.exit(1); - } + }); + + self.resetContracts(); + self.r.context.web3 = web3; + + } catch(e) { + console.log(e.stack); + process.exit(1); + } + }); +}; + +TruffleInterpreter.prototype.provision = function(callback) { + var self = this; + + Contracts.provision(this.options, function(err, contracts) { + if (err) return callback(err); + + self.contracts = contracts; + self.resetContracts(); + + callback(); + }); +}; + +TruffleInterpreter.prototype.resetContracts = function() { + var self = this; + + if (this.r != null) { + this.contracts.forEach(function(contract) { + self.r.context[contract.contract_name] = contract; }); } } +TruffleInterpreter.prototype.interpret = function(cmd, context, filename, callback) { + var self = this; + + if (this.command.getTask(cmd.trim()) != null) { + return this.command.run(cmd.trim(), this.options, function(err) { + if (err) return callback(err); + + // Reprovision after each command is it may change contracts. + self.provision(callback); + }); + } + + var result; + try { + result = vm.runInContext(cmd, context, { + displayErrors: false + }); + } catch (e) { + return callback(e); + } + callback(null, result); +} + +var Repl = { + TruffleInterpreter: TruffleInterpreter, + + run: function(tasks, options) { + var self = this; + + expect.options(options, [ + "working_directory", + "contracts_directory", + "contracts_build_directory", + "migrations_directory", + "network", + "network_id", + "provider", + "builder", + "build_directory", + "rpc" + ]); + + var interpreter = new TruffleInterpreter(tasks, options); + interpreter.start(); + } +} + module.exports = Repl; diff --git a/lib/require-nocache.js b/lib/require-nocache.js new file mode 100644 index 00000000000..04c5cfc7300 --- /dev/null +++ b/lib/require-nocache.js @@ -0,0 +1,6 @@ +var path = require("path"); + +module.exports = function(filePath) { + delete require.cache[path.resolve(filePath)]; + return require(filePath); +}; diff --git a/lib/require.js b/lib/require.js new file mode 100644 index 00000000000..3ad691e7ccb --- /dev/null +++ b/lib/require.js @@ -0,0 +1,134 @@ +var fs = require("fs"); +var path = require("path"); +var Module = require('module'); +var vm = require('vm'); +var expect = require("./expect"); +var Contracts = require("./contracts"); +var Web3 = require("web3"); + +// options.file: path to file to execute. Must be a module that exports a function. +// options.args: arguments passed to the exported function within file. If a callback +// is not included in args, exported function is treated as synchronous. +// options.context: Object containing any global variables you'd like set when this +// function is run. +var Require = { + file: function(options, done) { + var self = this; + var file = options.file; + + fs.readFile(options.file, {encoding: "utf8"}, function(err, source) { + if (err) return done(err); + + // Modified from here: https://gist.github.com/anatoliychakkaev/1599423 + var m = new Module(file); + + // Provide all the globals listed here: https://nodejs.org/api/globals.html + var context = { + Buffer: Buffer, + __dirname: path.dirname(file), + __filename: file, + clearImmediate: clearImmediate, + clearInterval: clearInterval, + clearTimeout: clearTimeout, + console: console, + exports: exports, + global: global, + module: m, + process: process, + require: function(pkgPath) { + // Ugh. Simulate a full require function for the file. + pkgPath = pkgPath.trim(); + + // If absolute, just require. + if (path.isAbsolute(pkgPath)) { + return require(pkgPath); + } + + // If relative, it's relative to the file. + if (pkgPath[0] == ".") { + return require(path.join(path.dirname(file), pkgPath)); + } else { + // Not absolute, not relative, must be a locally installed modules. + // Here we have to require from the node_modules directory directly. + var moduleDir = path.join(path.dirname(file), "node_modules"); + try { + return require(path.join(moduleDir, pkgPath)); + } catch (e) { + // Shave off path we added so the error message looks like normal. + e.message = e.message.replace(moduleDir + "/", ""); + throw e; + } + } + }, + setImmediate: setImmediate, + setInterval: setInterval, + setTimeout: setTimeout, + }; + + // Now add contract names. + Object.keys(options.context || {}).forEach(function(key) { + context[key] = options.context[key]; + }); + + var old_cwd = process.cwd(); + + process.chdir(path.dirname(file)); + + var script = vm.createScript(source, file); + script.runInNewContext(context); + + process.chdir(old_cwd); + + done(null, m.exports); + }); + }, + + exec: function(options, done) { + var self = this; + + expect.options(options, [ + "file", + "provider", + "network", + "network_id" + ]); + + var provision = function(callback) { + if (options.contracts != null) { + callback(null, options.contracts); + } else { + Contracts.provision({ + contracts_build_directory: options.contracts_build_directory, + provider: options.provider, + network: options.network, + network_id: options.network_id + }, callback); + } + }; + + provision(function(err, contracts) { + if (err) return done(err); + + var web3 = new Web3(); + web3.setProvider(options.provider); + + var context = { + web3: web3 + }; + + contracts.forEach(function(contract) { + context[contract.contract_name] = contract; + }); + + self.file({ + file: options.file, + context: context + }, function(err, fn) { + if (err) return done(err); + fn(done); + }); + }); + } +}; + +module.exports = Require; diff --git a/lib/serve.js b/lib/serve.js index 3ea30c7a612..50f74b2f76f 100644 --- a/lib/serve.js +++ b/lib/serve.js @@ -3,14 +3,16 @@ var finalhandler = require('finalhandler'); var serveStatic = require('serve-static'); var Serve = { - start: function(config, port, done) { - var serve = serveStatic(config.build.directory); + start: function(options, done) { + var serve = serveStatic(options.build_directory); var server = http.createServer(function(req, res) { var done = finalhandler(req, res); serve(req, res, done); }); + var port = options.port || options.p || 8080; + server.listen(port); console.log("Serving app on port " + port + "..."); done(); diff --git a/lib/tasks.js b/lib/tasks.js new file mode 100644 index 00000000000..4d22ad99605 --- /dev/null +++ b/lib/tasks.js @@ -0,0 +1,316 @@ +var Truffle = require('../index.js'); +var path = require("path"); +var dir = require("node-dir"); +var temp = require("temp").track(); +var fs = require("fs"); +var copy = require("./copy"); +var chokidar = require("chokidar"); +var colors = require("colors"); + +var Tasks = {}; + +function createTask(name, description, fn) { + Tasks[name] = function(options, done) { + if (typeof options == "function") { + done = options; + options = {}; + } + + options.logger = options.logger || console; + + fn(options, done); + }; + Tasks[name].description = description; + Tasks[name].task_name = name; +}; + +createTask('list', "List all available tasks", function(options, done) { + options.logger.log("Truffle v" + Truffle.version + " - a development framework for Ethereum"); + options.logger.log(""); + options.logger.log("Usage: truffle [command] [options]"); + options.logger.log(""); + options.logger.log("Commands:"); + options.logger.log(""); + + var sorted = Object.keys(Tasks).sort(); + + var longestTask = sorted.reduce(function(a, b) { + var first = typeof a == "string" ? a.length : a; + return Math.max(first, b.length); + }); + + for (var i = 0; i < sorted.length; i++) { + var task = Tasks[sorted[i]]; + var heading = task.task_name; + while (heading.length < longestTask) { + heading += " "; + } + options.logger.log(" " + heading + " => " + task.description) + } + + options.logger.log(""); + done(); +}); + +createTask('version', "Show version number and exit", function(options, done) { + options.logger.log("Truffle v" + Truffle.version); + done(); +}); + +createTask('init', "Initialize new Ethereum project, including example contracts and tests", function(options, done) { + var config = Truffle.config.default(); + Truffle.init(config.working_directory, done); +}); + +createTask('create:contract', "Create a basic contract", function(options, done) { + var config = Truffle.config.detect(options); + + var name = options.name; + + if (name == null && options._.length > 0) { + name = options._[0]; + } + + if (name == null) { + return done(new ConfigurationError("Please specify a name. Example: truffle create:contract MyContract")); + } else { + Truffle.create.contract(config.contracts_directory, name, done); + } +}); + +createTask('create:test', "Create a basic test", function(options, done) { + var config = Truffle.config.detect(options); + + var name = options.name; + + if (name == null && options._.length > 0) { + name = options._[0]; + } + + if (name == null) { + return done(new ConfigurationError("Please specify a name. Example: truffle create:test MyTest")); + } else { + Truffle.create.test(config.test_directory, name, done); + } +}); + +createTask('compile', "Compile contracts", function(options, done) { + var config = Truffle.config.detect(options); + Truffle.contracts.compile(config, done); +}); + +createTask('build', "Build development version of app", function(options, done) { + var config = Truffle.config.detect(options); + Truffle.build.build(config.with({ + builder: config.build, + processors: config.processors, // legacy option for default builder + rpc: config.getRPCConfig() + }), done); +}); + +createTask('migrate', "Run migrations", function(options, done) { + var config = Truffle.config.detect(options); + + Truffle.contracts.compile(config, function(err) { + if (err) return done(err); + Truffle.migrate.run(config, done); + }); +}); + +createTask('exec', "Execute a JS file within truffle environment", function(options, done) { + var config = Truffle.config.detect(options); + + var file = options.file; + + if (file == null && options._.length > 0) { + file = options._[0]; + } + + if (file == null) { + options.logger.log("Please specify a file, passing the path of the script you'd like the run. Note that all scripts *must* call process.exit() when finished."); + done(); + return; + } + + if (path.isAbsolute(file) == false) { + file = path.join(process.cwd(), file); + } + + Truffle.require.exec(config.with({ + file: file + }), done); +}); + +// Supported options: +// --no-color: Disable color +// More to come. +createTask('test', "Run tests", function(options, done) { + var config = Truffle.config.detect(options); + config.network = "test"; + + var files = []; + + if (options.file) { + files = [options.file]; + } else if (options._.length > 0) { + Array.prototype.push.apply(files, options._); + } + + function getFiles(callback) { + if (files.length != 0) { + return callback(null, files); + } + + dir.files(config.test_directory, callback); + }; + + getFiles(function(err, files) { + files = files.filter(function(file) { + return file.match(config.test_file_extension_regexp) != null; + }); + + temp.mkdir('test-', function(err, temporaryDirectory) { + if (err) return done(err); + + function cleanup() { + var args = arguments; + // Ensure directory cleanup. + temp.cleanup(function(err) { + // Ignore cleanup errors. + done.apply(null, args); + }); + }; + + function run() { + Truffle.test.run(config.with({ + test_files: files, + contracts_build_directory: temporaryDirectory, + }), cleanup); + }; + + // Copy all the built files over to a temporary directory, because we + // don't want to save any tests artifacts. Only do this if the build directory + // exists. + fs.stat(config.contracts_build_directory, function(err, stat) { + if (err) return run(); + + copy(config.contracts_build_directory, temporaryDirectory, function(err) { + if (err) return done(err); + run(); + }); + }); + }); + }); +}); + +createTask('console', "Run a console with deployed contracts instantiated and available (REPL)", function(options, done) { + var config = Truffle.config.detect(options); + + var available_tasks = Object.keys(Tasks).filter(function(task_name) { + return task_name != "console" && task_name != "init" && task_name != "watch" && task_name != "serve"; + }); + + var tasks = {}; + available_tasks.forEach(function(task_name) { + tasks[task_name] = Tasks[task_name]; + }); + + Truffle.console.run(tasks, config.with({ + builder: config.build, + processors: config.processors, // legacy option for default builder + }), done); +}); + +createTask('serve', "Serve app on localhost and rebuild changes as needed", function(options, done) { + var self = this; + var config = Truffle.config.detect(options); + Truffle.serve.start(config, function() { + Tasks.watch(options, done); + }); +}); + +createTask('networks', "Show addresses for deployed contracts on each network", function(options, done) { + var config = Truffle.config.detect(options); + + Truffle.profile.deployed_networks(config, function(err, networks) { + if (err) return callback(err); + + Object.keys(networks).sort().forEach(function(network_name) { + + options.logger.log("") + + var output = Object.keys(networks[network_name]).sort().map(function(contract_name) { + var address = networks[network_name][contract_name]; + return contract_name + ": " + address; + }); + + if (output.length == 0) { + output = ["No contracts deployed."]; + } + + options.logger.log("Network: " + network_name); + options.logger.log(" " + output.join("\n ")) + }); + + options.logger.log(""); + + done(); + }); +}); + +createTask('watch', "Watch filesystem for changes and rebuild the project automatically", function(options, done) { + var config = Truffle.config.detect(options); + + var printSuccess = function() { + options.logger.log(colors.green("Completed without errors on " + new Date().toString())); + }; + + var printFailure = function() { + options.logger.log(colors.red("Completed with errors on " + new Date().toString())); + }; + + var needs_rebuild = true; + + var watchPaths = [ + path.join(config.working_directory, "app/**/*"), + path.join(config.contracts_build_directory, "/**/*"), + path.join(config.contracts_directory, "/**/*"), + path.join(config.working_directory, "truffle.json"), + path.join(config.working_directory, "truffle.js") + ]; + + chokidar.watch(watchPaths, { + ignored: /[\/\\]\./, // Ignore files prefixed with "." + cwd: config.working_directory, + ignoreInitial: true + }).on('all', function(event, filePath) { + // On changed/added/deleted + var display_path = path.join("./", filePath.replace(config.working_directory, "")); + options.logger.log(colors.cyan(">> File " + display_path + " changed.")); + + needs_rebuild = true; + }); + + var check_rebuild = function() { + if (needs_rebuild == true) { + needs_rebuild = false; + options.logger.log("Rebuilding..."); + + Tasks.build(options, function(err) { + if (err) { + printFailure(); + } else { + printSuccess(); + } + done(err); + }); + } + + setTimeout(check_rebuild, 200); + }; + + check_rebuild(); +}); + + +module.exports = Tasks; diff --git a/lib/test.js b/lib/test.js index ea325492aa4..b8f79243fdf 100644 --- a/lib/test.js +++ b/lib/test.js @@ -3,70 +3,53 @@ var chai = require("chai"); var dir = require("node-dir"); var path = require("path"); var fs = require("fs"); - +var Web3 = require("web3"); var Contracts = require("./contracts"); - +var Migrate = require('./migrate'); var Pudding = require("ether-pudding"); -var PuddingLoader = require("ether-pudding/loader"); -var loadconf = require("./loadconf"); var Promise = require("bluebird"); - var ExtendableError = require("./errors/extendableerror"); - var SolidityCoder = require("web3/lib/solidity/coder.js"); +var expect = require("./expect"); chai.use(require("./assertions")); -var rpc = function(method, arg, cb) { - var req = { - jsonrpc: "2.0", - method: method, - id: new Date().getTime() - }; - if (arguments.length == 3) { - req.params = arg; - } else { - cb = arg; - } - - var intermediary = function(err, result) { - if (err != null) { - cb(err); - return; - } - - if (result.error != null) { - cb(new Error("RPC Error: " + (result.error.message || result.error))); - return; - } +var BEFORE_TIMEOUT = 120000; +var TEST_TIMEOUT = 300000; + +function TestRunner(options) { + this.options = options; + this.logger = options.logger || console; + this.provider = options.provider; + this.can_shapshot = false; + this.initial_snapshot = null; + this.known_events = {}; + this.web3 = new Web3(); + this.web3.setProvider(options.provider); + this.contracts = []; + + // For each test + this.currentTestStartBlock = null; +}; - cb(null, result); - }; +TestRunner.prototype.initialize = function(callback) { + var self = this; - web3.currentProvider.sendAsync(req, intermediary); -}; + var afterStateReset = function(err) { + if (err) return callback(err); -// Deploy all configured contracts to the chain without recompiling -var redeploy_contracts = function(config, recompile, done) { - Contracts.deploy(config, recompile, function(err) { - if (err != null) { - // Format our error messages so they print better with mocha. - if (err instanceof ExtendableError) { - err.formatForMocha(); - } + Contracts.provision(self.options, function(err, contracts) { + if (err) return callback(err); - done(err); - return; - } + self.contracts = contracts; + self.known_events = {}; - Pudding.setWeb3(config.web3); - PuddingLoader.load(config.environments.current.directory, Pudding, global, function(err, contract_names) { // Go through all abis and record events we know about. - for (var i = 0; i < contract_names.length; i++) { - var name = contract_names[i]; - Truffle.contracts[name] = global[name]; + self.contracts.forEach(function(contract) { + // make the contract globally available + global[contract.contract_name] = contract; - var abi = global[name].abi; + var abi = contract.abi; for (var j = 0; j < abi.length; j++) { var item = abi[j]; @@ -74,414 +57,266 @@ var redeploy_contracts = function(config, recompile, done) { if (item.type == "event") { var signature = item.name + "(" + item.inputs.map(function(param) {return param.type;}).join(",") + ")"; - Truffle.known_events["0x" + web3.sha3(signature)] = { + self.known_events["0x" + web3.sha3(signature)] = { signature: signature, abi_entry: item }; } } - } + }); - done(); + callback(); }); - }); -}; - -var Test = { - setup: function(config, callback) { - var BEFORE_TIMEOUT = 120000; - var TEST_TIMEOUT = 300000; - - // `accounts` will be populated before each contract() - // invocation and passed to it so tests don't have to call it themselves. - var accounts = []; - - global.web3 = config.web3; - - // Make Promise global so tests have access to it. - global.Promise = Promise; - - // Use custom assertions. - global.assert = chai.assert; - - global.Truffle = { - can_snapshot: false, - starting_snapshot_id: null, - contracts: {}, - known_events: {}, - - redeploy: function(recompile) { - return new Promise(function(resolve, reject) { - redeploy_contracts(config, recompile, function(err) { - if (err != null) { - reject(err); - } else { - resolve(); - } - }); - }); - }, + }; - handle_errs: function(done) { Promise.onPossiblyUnhandledRejection(done); }, + if (self.initial_snapshot == null) { + // Make the initial deployment (full migration). + self.deploy(function(err) { + if (err) return callback(err); - reset: function(cb) { - var self = this; - this.revert(this.starting_snapshot_id, function(err, result) { - if (err != null) { - cb(err); - return; - } + self.snapshot(function(err, initial_snapshot) { + if (err == null) { + self.can_snapshot = true; + self.initial_snapshot = initial_snapshot; + } + afterStateReset(); + }); + }); + } else { + self.resetState(afterStateReset); + } +}; - // Snapshot again, resetting the snapshot id. - self.snapshot(function(err, result) { - if (err != null) { - cb(err); - return; - } +TestRunner.prototype.deploy = function(callback) { + Migrate.run({ + migrations_directory: this.options.migrations_directory, + contracts_build_directory: this.options.contracts_build_directory, + network: this.options.network, + network_id: this.options.network_id, + provider: this.options.provider, + reset: true, + quiet: true + }, callback); +}; - Truffle.starting_snapshot_id = result.result; - cb(); - }); - }); - }, +TestRunner.prototype.resetState = function(callback) { + var self = this; + if (this.can_snapshot) { + this.revert(this.initial_snapshot, function(err) { + if (err) return callback(err); + self.snapshot(function(err, snapshot) { + if (err) return callback(err); + self.initial_snapshot = snapshot; + callback(); + }); + }); + } else { + this.deploy(callback); + } +}; - snapshot: function(cb) { - rpc("evm_snapshot", cb); - }, +TestRunner.prototype.startTest = function(mocha, callback) { + var self = this; + this.web3.eth.getBlockNumber(function(err, result) { + if (err) return callback(err); - revert: function(snapshot_id, cb) { - rpc("evm_revert", [snapshot_id], cb); - } - }; + result = web3.toBigNumber(result); - global.contract = function(name, opts, tests) { - if (typeof opts == "function") { - tests = opts; - opts = { - reset_state: false - }; - } + // Add one in base 10 + self.currentTestStartBlock = result.plus(1, 10); - if (opts.reset_state == null) { - opts.reset_state = false; - } - - describe("Contract: " + name, function() { - this.timeout(TEST_TIMEOUT); + callback(); + }); +}; - //var _original_contracts = {}; +TestRunner.prototype.endTest = function(mocha, callback) { + var self = this; - before("reset evm before each suite", function(done) { - this.timeout(BEFORE_TIMEOUT); + if (mocha.currentTest.state != "failed") { + return callback(); + } - if (Truffle.can_snapshot == false) { - return done(); - } + var logs = []; - // If we can snapshot, but haven't yet deployed, let's - // deploy for the first time. - if (Truffle.starting_snapshot_id == null) { - redeploy_contracts.call(this, config, false, function(err) { - if (err != null) { - done(err); - return; - } - - Truffle.snapshot(function(err, result) { - if (err != null) { - done(err); - return; - } - - Truffle.starting_snapshot_id = result.result; - done(); - }); - }); - } else { - Truffle.reset(done); - } - }); + // There's no API for eth_getLogs? + this.rpc("eth_getLogs", [{ + fromBlock: "0x" + this.currentTestStartBlock.toString(16) + }], function(err, result) { + if (err) return callback(err); - before("redeploy before each suite", function(done) { - this.timeout(BEFORE_TIMEOUT); + var logs = result.result; - // We don't need this step if we were able to reset. - if (Truffle.can_snapshot == true) { - return done(); - } + if (logs.length == 0) { + self.logger.log(" > No events were emitted"); + return callback(); + } - redeploy_contracts.call(this, config, false, function(err) { + self.logger.log("\n Events emitted during test:"); + self.logger.log( " ---------------------------"); + self.logger.log(""); - // Store address that was first deployed, in case we redeploy - // from within a test - // for (var name in config.contracts.classes) { - // var contract = global[name]; - // _original_contracts[name] = contract.address; - // } + logs.forEach(function(log) { + var event = self.known_events[log.topics[0]]; - done(err); - }); - }); - - // afterEach("restore contract address", function(done) { - // for (var name in _original_contracts) { - // global[name].address = _original_contracts[name]; - // } - // done(); - // }); + if (event == null) { + return; + } - var startingBlock; + var types = event.abi_entry.inputs.map(function(input) { + return input.indexed == true ? null : input.type; + }).filter(function(type) { + return type != null; + }); + var values = SolidityCoder.decodeParams(types, log.data.replace("0x", "")); + var index = 0; + + var line = " " + event.abi_entry.name + "("; + line += event.abi_entry.inputs.map(function(input) { + var value; + if (input.indexed == true) { + value = ""; + } else { + value = values[index]; + index += 1; + } - beforeEach("record block number of test start", function(done) { - web3.eth.getBlockNumber(function(err, result) { - if (err) return done(err); + return input.name + ": " + value.toString(); + }).join(", "); + line += ")"; + self.logger.log(line); + }); + self.logger.log( "\n ---------------------------"); + callback(); + }); +}; - result = web3.toBigNumber(result); +TestRunner.prototype.snapshot = function(callback) { + this.rpc("evm_snapshot", function(err, result) { + if (err) return callback(err); + callback(null, result.result); + }); +}, - // Add one in base 10 - startingBlock = result.plus(1, 10); +TestRunner.prototype.revert = function(snapshot_id, callback) { + this.rpc("evm_revert", [snapshot_id], callback); +} - done(); - }); - }); +TestRunner.prototype.rpc = function(method, arg, cb) { + var req = { + jsonrpc: "2.0", + method: method, + id: new Date().getTime() + }; + if (arguments.length == 3) { + req.params = arg; + } else { + cb = arg; + } - afterEach("check logs on failure", function(done) { - if (this.currentTest.state != "failed") { - return done(); - } - var logs = []; - - // There's no API for eth_getLogs? - rpc("eth_getLogs", [{ - fromBlock: "0x" + startingBlock.toString(16) - }], function(err, result) { - if (err) return done(err); - - var logs = result.result; - - if (logs.length == 0) { - console.log(" > No events were emitted"); - return done(); - } - - console.log("\n Events emitted during test:"); - console.log( " ---------------------------"); - console.log(""); - - // logs.sort(function(a, b) { - // var ret = a.blockNumber - b.blockNumber; - // if (ret == 0) { - // return a.logIndex - b.logIndex; - // } - // return ret; - // }); - - - logs.forEach(function(log) { - var event = Truffle.known_events[log.topics[0]]; - - if (event == null) { - return; - } - - var types = event.abi_entry.inputs.map(function(input) { - return input.indexed == true ? null : input.type; - }).filter(function(type) { - return type != null; - }); - var values = SolidityCoder.decodeParams(types, log.data.replace("0x", "")); - var index = 0; - - var line = " " + event.abi_entry.name + "("; - line += event.abi_entry.inputs.map(function(input) { - var value; - if (input.indexed == true) { - value = ""; - } else { - value = values[index]; - index += 1; - } - - return input.name + ": " + value.toString(); - }).join(", "); - line += ")"; - console.log(line); - }); - console.log( "\n ---------------------------"); - done(); - }); - }); + var intermediary = function(err, result) { + if (err != null) { + cb(err); + return; + } - if (opts.reset_state == true) { - var snapshot_id; - beforeEach("snapshot state before each test", function(done) { - if (!Truffle.can_snapshot) { - // can't snapshot/revert, redeploy instead - return redeploy_contracts(false, done); - } - Truffle.snapshot(function(err, result) { - snapshot_id = result.result; - done(); - }); - }); + if (result.error != null) { + cb(new Error("RPC Error: " + (result.error.message || result.error))); + return; + } - afterEach("revert state after each test", function(done) { - if (!Truffle.can_snapshot) { - return done(); - } - Truffle.revert(snapshot_id, function(err, ret) { - done(); - }); - }); - } + cb(null, result); + }; - tests(accounts); - }); - }; + this.provider.sendAsync(req, intermediary); +}; - (new Promise(function(accept, reject) { - // Get the accounts - web3.eth.getAccounts(function(error, accs) { - if (error != null) { - reject(error); +var Test = { + run: function(options, callback) { + expect.options(options, [ + "contracts_directory", + "contracts_build_directory", + "migrations_directory", + "test_files", + "network", + "network_id", + "provider" + ]); + + // Compile if needed. This will + Contracts.compile({ + all: options.compileAll === true, + contracts_directory: options.contracts_directory, + contracts_build_directory: options.contracts_build_directory, + network: options.network, + network_id: options.network_id, + quiet: false, + quietWrite: true, + strict: options.strict + }, function(err) { + if (err) return callback(err); + + // Override console.warn() because web3 outputs gross errors to it. + // e.g., https://github.com/ethereum/web3.js/blob/master/lib/web3/allevents.js#L61 + // Output looks like this during tests: https://gist.github.com/tcoulter/1988349d1ec65ce6b958 + var warn = console.warn; + console.warn = function(message) { + if (message == "cannot find event for log") { return; + } else { + warn.apply(console, arguments); } + }; - for (var account of accs) { - accounts.push(account); - } + // `accounts` will be populated before each contract() invocation + // and passed to it so tests don't have to call it themselves. + var web3 = new Web3(); + web3.setProvider(options.provider); - Pudding.defaults({ - from: accounts[0], - gas: 3141592 - }); + web3.eth.getAccounts(function(err, accounts) { + if (err) return callback(err); - accept(); - }); - })).then(function() { - return new Promise(function(accept, reject) { - // Compile if needed. + global.web3 = web3; + global.assert = chai.assert; - if (config.argv.compile === false) { - accept(); - return; - } + var runner = new TestRunner(options); - // Compile all the contracts and get the available accounts. - // We only need to do this once, and can get it outside of - // mocha. - console.log("Compiling contracts..."); - Contracts.compile(config, function(err) { - if (err != null) { - reject(err); - } else { - accept(); - } - }); - }); - }).then(function() { - return new Promise(function(accept, reject) { - // Check to see if the ethereum client can snapshot - Truffle.snapshot(function(err, result) { - if (err == null) { - Truffle.can_snapshot = true; + global.contract = function(name, tests) { + if (typeof opts == "function") { + tests = name; + name = ""; } - accept(); - }); - }); - }).then(callback).catch(callback); - }, - - run: function(config, file, callback) { - // Override console.warn() because web3 outputs gross errors to it. - // e.g., https://github.com/ethereum/web3.js/blob/master/lib/web3/allevents.js#L61 - // Output looks like this during tests: https://gist.github.com/tcoulter/1988349d1ec65ce6b958 - var warn = console.warn; - console.warn = function(message) { - if (message == "cannot find event for log") { - return; - } else { - warn.apply(console, arguments); - } - }; - - if (typeof file == "function") { - callback = file; - file = null; - config.expect(config.tests.directory, "tests directory"); - } - - if (file != null) { - if (path.isAbsolute(file) == false) { - file = path.resolve(config.working_dir, file); - } - - config.expect(file, "test file"); - } - - this.setup(config, function(err) { - if (err != null) { - callback(err); - return; - } - // Change current working directory to that of the project. - process.chdir(config.working_dir); - __dirname = process.cwd(); + describe("Contract: " + name, function() { + this.timeout(TEST_TIMEOUT); - // If errors aren't caught in Promises, make sure they're thrown - // and don't keep the process open. - Promise.onPossiblyUnhandledRejection(function(e, promise) { - throw e; - }); - - var mocha = new Mocha(config.app.resolved.mocha || { - useColors: true - }); + before("prepare suite", function(done) { + this.timeout(BEFORE_TIMEOUT); + runner.initialize(done); + }); - var runMocha = function() { - // TODO: Catch any errors here, and fail. - mocha.run(function(failures) { - callback(null, failures); - }); - }; + beforeEach("before test", function(done) { + runner.startTest(this, done); + }); - if (file != null) { - mocha.addFile(file); - runMocha(); - return; - } + afterEach("after test", function(done) { + runner.endTest(this, done); + }); - dir.files(config.tests.directory, function(err, files) { - if (err != null) { - callback(err); - return; - } + tests(accounts); + }); + }; - // if running via the 'watch:tests' task, we want to be able to run - // (require) our test files repeatedly, so this is a hack to make it - // work. we copy each test file to a temp filename and load that - // instead of the original to avoid getting cached. - // files = files.map(function(f) { - // var src = fs.readFileSync(f); - // f = temp.path({prefix: "truffle-", suffix: "-"+path.basename(f)}) - // fs.writeFileSync(f, src); - // return f; - // }); - - var mocha = new Mocha(config.app.resolved.mocha || { + var mocha = new Mocha(options.mocha || { useColors: true }); - for (var file of files.sort()) { - if (file.match(config.tests.filter)) { - mocha.addFile(file); - } - } + // // Change current working directory to that of the project. + // process.chdir(config.working_dir); + // __dirname = process.cwd(); - // Change current working directory to that of the project. - process.chdir(config.working_dir); - __dirname = process.cwd(); + options.test_files.forEach(function(file) { + mocha.addFile(file); + }); // If errors aren't caught in Promises, make sure they're thrown // and don't keep the process open. @@ -489,11 +324,7 @@ var Test = { throw e; }); - // TODO: Catch any errors here, and fail. mocha.run(function(failures) { - // files.forEach(function(f) { - // fs.unlinkSync(f); // cleanup our temp files - // }); console.warn = warn; callback(null, failures); }); diff --git a/package.json b/package.json index 9c0b8627bb6..b0169444ff6 100644 --- a/package.json +++ b/package.json @@ -16,33 +16,35 @@ "colors": "^1.1.2", "cpr": "^0.4.3", "deasync": "^0.1.3", - "ether-pudding": "2.0.6", + "del": "^2.2.0", + "ether-pudding": "^3.0.3", "finalhandler": "^0.4.0", + "find-up": "^1.1.2", "graphlib": "^2.0.0", "jsmin": "^1.0.1", "lodash": "^4.5.1", "mkdirp": "^0.5.1", "mocha": "^2.3.3", "node-dir": "^0.1.10", - "rimraf": "^2.4.3", "serve-static": "^1.10.0", "solc": "^0.3.1-1", + "solidity-parser": "^0.0.8", "spawn-args": "^0.1.0", - "truffle-default-builder": "0.0.9", + "temp": "^0.8.3", + "truffle-default-builder": "^1.0.1", "uglify-js": "^2.6.1", "web3": "^0.15.2", "yargs": "^3.27.0" }, "devDependencies": { - "babel-cli": "^6.4.5", - "temp": "^0.8.3" + "babel-cli": "^6.4.5" }, "bin": { "truffle": "./cli.js", "truffle-exec": "./exec.js" }, "scripts": { - "test": "node ./cli.js test" + "test": "mocha" }, "repository": { "type": "git", diff --git a/test/compile.js b/test/compile.js new file mode 100644 index 00000000000..ba197b0151c --- /dev/null +++ b/test/compile.js @@ -0,0 +1,114 @@ +var assert = require("chai").assert; +var Init = require("../lib/init"); +var Contracts = require("../lib/contracts"); +var Pudding = require("ether-pudding"); +var path = require("path"); +var fs = require("fs"); + +describe("compile", function() { + var config; + + before("Create a sandbox", function(done) { + this.timeout(5000); + Init.sandbox(function(err, result) { + if (err) return done(err); + config = result; + done(); + }); + }); + + before("edit config", function() { + config.networks = { + "default": { + "network_id": "default" + }, + "secondary": { + "network_id": "12345" + } + } + }); + + it('compiles all initial contracts', function(done) { + this.timeout(10000); + + Contracts.compile(config.with({ + all: false, + quiet: true + }), function(err, contracts) { + if (err) return done(err); + + assert.equal(Object.keys(contracts).length, 3, "Didn't compile the expected number of contracts"); + done(); + }); + }); + + it('compiles no contracts after no updates', function(done) { + this.timeout(10000); + + Contracts.compile(config.with({ + all: false, + quiet: true + }), function(err, contracts) { + if (err) return done(err); + + assert.equal(Object.keys(contracts).length, 0, "Compiled a contract even though we weren't expecting it"); + done(); + }); + }); + + it('compiles contract and dependencies after an update', function(done) { + this.timeout(10000); + + var file_to_update = path.resolve(path.join(config.contracts_directory, "MetaCoin.sol")); + + // Update the modification time to simulate an edit. + var newTime = new Date().getTime(); + fs.utimesSync(file_to_update, newTime, newTime); + + Contracts.compile(config.with({ + all: false, + quiet: true + }), function(err, contracts) { + if (err) return done(err); + + assert.equal(Object.keys(contracts).length, 2, "Expected MetaCoin and ConvertLib to be compiled"); + done(); + }); + }); + + it('contracts should only have one network', function(done) { + var file = path.resolve(path.join(config.contracts_build_directory, "MetaCoin.sol.js")); + + Pudding.requireFile(file, function(err, contract) { + if (err) return done(err); + assert.equal(contract.networks().length, 1, "Expected the contract to only be managing one network"); + done(); + }); + }); + + it('compiles all contracts after multiple changes after a change in network', function(done) { + this.timeout(10000); + + config.network = "secondary"; + + Contracts.compile(config.with({ + all: false, + quiet: true + }), function(err, contracts) { + if (err) return done(err); + + assert.equal(Object.keys(contracts).length, 3, "Expected all contracts to be compiled on a second network"); + done(); + }); + }); + + it('contracts should new have two networks', function(done) { + var file = path.resolve(path.join(config.contracts_build_directory, "MetaCoin.sol.js")); + + Pudding.requireFile(file, function(err, contract) { + if (err) return done(err); + assert.equal(contract.networks().length, 2, "Expected the contract to be managing two networks"); + done(); + }); + }); +}); diff --git a/test/create.es6 b/test/create.es6 deleted file mode 100644 index 48505c4c3e7..00000000000 --- a/test/create.es6 +++ /dev/null @@ -1,62 +0,0 @@ -var temp = require("temp").track(); -var path = require("path"); -var fs = require("fs"); -var Config = require("../lib/config"); -var Init = require("../lib/init"); -var Create = require("../lib/create"); - -describe('truffle:create', function() { - // Paths relative to app truffle directory. - var truffle_dir = path.resolve("./"); - var temp_dir = temp.mkdirSync(); - - before("initialize environment", function(done) { - var config = Config.gather(truffle_dir, temp_dir, {}, null); - Init.all(config, done); - }); - - it('successfully creates a new contract', function(done) { - var argv = { - name: "MyNewContract" - }; - - var config = Config.gather(truffle_dir, temp_dir, argv, "development"); - Create.contract(config, argv.name, function(err) { - if (err != null) { - return done(err); - } - - var expected_file = path.join(temp_dir, "contracts", "MyNewContract.sol"); - assert.isTrue(fs.existsSync(expected_file), `Contract to be created doesns't exist, ${expected_file}`); - - var file_data = fs.readFileSync(expected_file, {encoding: "utf8"}); - assert.isNotNull(file_data, "File's data is null"); - assert.notEqual(file_data, "", "File's data is blank"); - - done(); - }); - }); // it - - it('successfully creates a new test', function(done) { - var argv = { - name: "MyOtherNewContract" - }; - - var config = Config.gather(truffle_dir, temp_dir, argv, "development"); - Create.test(config, argv.name, function(err) { - if (err != null) { - return done(err); - } - - var expected_file = path.join(temp_dir, "test", "my_other_new_contract.js"); - assert.isTrue(fs.existsSync(expected_file), `Test to be created doesns't exist, ${expected_file}`); - - var file_data = fs.readFileSync(expected_file, {encoding: "utf8"}); - assert.isNotNull(file_data, "File's data is null"); - assert.notEqual(file_data, "", "File's data is blank"); - - done(); - }); - }); // it - -}); diff --git a/test/create.js b/test/create.js new file mode 100644 index 00000000000..4a0e3f1bf0d --- /dev/null +++ b/test/create.js @@ -0,0 +1,49 @@ +var assert = require("chai").assert; +var path = require("path"); +var fs = require("fs"); +var Init = require("../lib/init"); +var Create = require("../lib/create"); + +describe('create', function() { + var config; + + before("Create a sandbox", function(done) { + this.timeout(5000); + Init.sandbox(function(err, result) { + if (err) return done(err); + config = result; + done(); + }); + }); + + it('creates a new contract', function(done) { + Create.contract(config.contracts_directory, "MyNewContract", function(err) { + if (err) return done(err); + + var expected_file = path.join(config.contracts_directory, "MyNewContract.sol"); + assert.isTrue(fs.existsSync(expected_file), `Contract to be created doesns't exist, ${expected_file}`); + + var file_data = fs.readFileSync(expected_file, {encoding: "utf8"}); + assert.isNotNull(file_data, "File's data is null"); + assert.notEqual(file_data, "", "File's data is blank"); + + done(); + }); + }); // it + + it('creates a new test', function(done) { + Create.test(config.test_directory, "MyNewTest", function(err) { + if (err) return done(err); + + var expected_file = path.join(config.test_directory, "my_new_test.js"); + assert.isTrue(fs.existsSync(expected_file), `Test to be created doesns't exist, ${expected_file}`); + + var file_data = fs.readFileSync(expected_file, {encoding: "utf8"}); + assert.isNotNull(file_data, "File's data is null"); + assert.notEqual(file_data, "", "File's data is blank"); + + done(); + }); + }); // it + +}); diff --git a/test/init.es6 b/test/init.es6 deleted file mode 100644 index 29e2612c966..00000000000 --- a/test/init.es6 +++ /dev/null @@ -1,28 +0,0 @@ -var temp = require("temp").track(); -var path = require("path"); -var fs = require("fs"); -var Config = require("../lib/config"); -var Init = require("../lib/init") - -describe('truffle:init', function() { - // Paths relative to app truffle directory. - var truffle_dir = path.resolve("./"); - - it('successfully copies example configuration', function(done) { - var temp_dir = temp.mkdirSync(); - var config = Config.gather(truffle_dir, temp_dir, {}, null); - - Init.all(config, function(err) { - if (err != null) { - return done(err); - } - - assert.isTrue(fs.existsSync(path.join(temp_dir, "app")), "app directory not created successfully"); - assert.isTrue(fs.existsSync(path.join(temp_dir, "environments")), "config directory not created successfully"); - assert.isTrue(fs.existsSync(path.join(temp_dir, "contracts")), "contracts directory not created successfully"); - assert.isTrue(fs.existsSync(path.join(temp_dir, "test")), "tests directory not created successfully"); - - done(); - }); - }); -}); diff --git a/test/init.js b/test/init.js new file mode 100644 index 00000000000..82e5364716d --- /dev/null +++ b/test/init.js @@ -0,0 +1,25 @@ +var assert = require("chai").assert; +var Init = require("../lib/init"); +var fs = require("fs"); +var path = require('path'); + +describe('init', function() { + var config; + + before("Create a sandbox", function(done) { + this.timeout(5000); + Init.sandbox(function(err, result) { + if (err) return done(err); + config = result; + done(); + }); + }); + + it('copies example configuration', function(done) { + assert.isTrue(fs.existsSync(path.join(config.working_directory, "app")), "app directory not created successfully"); + assert.isTrue(fs.existsSync(path.join(config.working_directory, "contracts")), "contracts directory not created successfully"); + assert.isTrue(fs.existsSync(path.join(config.working_directory, "test")), "tests directory not created successfully"); + + done(); + }); +}); diff --git a/test/migrate.js b/test/migrate.js new file mode 100644 index 00000000000..39c202b4a3b --- /dev/null +++ b/test/migrate.js @@ -0,0 +1,121 @@ +var assert = require("chai").assert; +var Init = require("../lib/init"); +var Migrate = require("../lib/migrate"); +var Contracts = require("../lib/contracts"); +var Profiler = require("../lib/profiler"); +var Pudding = require("ether-pudding"); +var path = require("path"); +var fs = require("fs"); + +describe("migrate", function() { + var config; + + before("Create a sandbox", function(done) { + this.timeout(5000); + Init.sandbox(function(err, result) { + if (err) return done(err); + config = result; + done(); + }); + }); + + before("edit config", function() { + config.networks = { + "default": { + "network_id": "default" + }, + "secondary": { + "network_id": "12345" + } + } + }); + + it('profiles a new project as not having any contracts deployed', function(done) { + Profiler.deployed_networks(config, function(err, networks) { + if (err) return done(err); + + assert.equal(Object.keys(networks).length, 2, "Should have results for two networks from profiler"); + assert.equal(Object.keys(networks["default"]), 0, "Default network should not have been deployed to"); + assert.equal(Object.keys(networks["secondary"]), 0, "Secondary network should not have been deployed to"); + done(); + }) + }); + + it('links libraries in initial project, and runs all migrations', function(done) { + this.timeout(10000); + + Contracts.compile(config.with({ + all: false, + quiet: true + }), function(err) { + if (err) return done(err); + + Migrate.run(config.with({ + quiet: true + }), function(err, contracts) { + if (err) return done(err); + + Profiler.deployed_networks(config, function(err, networks) { + if (err) return done(err); + + assert.equal(Object.keys(networks).length, 2, "Should have results for two networks from profiler"); + assert.equal(Object.keys(networks["default"]).length, 3, "Default network should have three contracts deployed"); + assert.isNotNull(networks["default"]["MetaCoin"], "MetaCoin contract should have an address"); + assert.isNotNull(networks["default"]["ConvertLib"], "ConvertLib library should have an address"); + assert.isNotNull(networks["default"]["Migrations"], "Migrations contract should have an address"); + assert.equal(Object.keys(networks["secondary"]), 0, "Secondary network should not have been deployed to"); + done(); + }); + }); + }); + }); + + it('should migrate secondary network without altering primary network', function(done) { + this.timeout(10000); + + config.network = "secondary"; + + var currentAddresses = {}; + + Profiler.deployed_networks(config, function(err, networks) { + if (err) return done(err); + + ["MetaCoin", "ConvertLib", "Migrations"].forEach(function(contract_name) { + currentAddresses[contract_name] = networks["default"][contract_name]; + }); + + Contracts.compile(config.with({ + quiet: true + }), function(err) { + if (err) return done(err); + + Migrate.run(config.with({ + quiet: true + }), function(err, contracts) { + if (err) return done(err); + + Profiler.deployed_networks(config, function(err, networks) { + if (err) return done(err); + + assert.equal(Object.keys(networks).length, 2, "Should have results for two networks from profiler"); + assert.equal(Object.keys(networks["default"]).length, 3, "Default network should have three contracts deployed"); + assert.equal(networks["default"]["MetaCoin"], currentAddresses["MetaCoin"], "MetaCoin contract updated on default network"); + assert.equal(networks["default"]["ConvertLib"], currentAddresses["ConvertLib"], "ConvertLib library updated on default network"); + assert.equal(networks["default"]["Migrations"], currentAddresses["Migrations"], "Migrations contract updated on default network"); + assert.equal(Object.keys(networks["secondary"]).length, 3, "Secondary network should have three contracts deployed"); + assert.isNotNull(networks["secondary"]["MetaCoin"], "MetaCoin contract should have an address on secondary network"); + assert.isNotNull(networks["secondary"]["ConvertLib"], "ConvertLib library should have an address on secondary network"); + assert.isNotNull(networks["secondary"]["Migrations"], "Migrations contract should have an address on secondary network"); + + Object.keys(networks["default"]).forEach(function(contract_name) { + assert.notEqual(networks["secondary"][contract_name], networks["default"][contract_name], "Contract " + contract_name + " has the same address on both networks") + }); + + done(); + }); + }); + }); + + }); + }); +}); diff --git a/truffle.js b/truffle.js deleted file mode 100644 index dc2d3eaf98f..00000000000 --- a/truffle.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - rpc: { - host: "localhost", - port: 8545 - } -};