Fix ceedling (#2949)

* fixed unit test with ceedling 1.0.0
This commit is contained in:
Ha Thach 2025-01-15 14:53:13 +07:00 committed by GitHub
parent 2495563600
commit e889c0b51e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
210 changed files with 391 additions and 22564 deletions

View File

@ -1,102 +1,226 @@
# =========================================================================
# Ceedling - Test-Centered Build System for C
# ThrowTheSwitch.org
# Copyright (c) 2010-25 Mike Karlesky, Mark VanderVoord, & Greg Williams
# SPDX-License-Identifier: MIT
# =========================================================================
---
# Notes:
# Sample project C code is not presently written to produce a release artifact.
# As such, release build options are disabled.
# This sample, therefore, only demonstrates running a collection of unit tests.
:project:
:use_exceptions: TRUE
# how to use ceedling. If you're not sure, leave this as `gem` and `?`
:which_ceedling: gem
:ceedling_version: 1.0.0
:verbosity: 3
# optional features. If you don't need them, keep them turned off for performance
:use_mocks: TRUE
:use_test_preprocessor: TRUE
:use_auxiliary_dependencies: TRUE
:use_deep_dependencies: TRUE
:use_test_preprocessor: :mocks # options are :none, :mocks, :tests, or :all
:use_deep_dependencies: :all # options are :none, :mocks, :tests, or :all
:use_backtrace: :simple # options are :none, :simple, or :gdb
:use_decorators: :auto # decorate Ceedling's output text. options are :auto, :all, or :none
# tweak the way ceedling handles automatic tasks
:build_root: _build
# :release_build: TRUE
:test_file_prefix: test_
:which_ceedling: vendor/ceedling
:ceedling_version: 0.31.1
:default_tasks:
- test:all
#:test_build:
# :use_assembly: TRUE
# performance options. If your tools start giving mysterious errors, consider
# dropping this to 1 to force single-tasking
:test_threads: 8
:compile_threads: 8
#:release_build:
# :output: MyApp.out
# :use_assembly: FALSE
# enable release build (more details in release_build section below)
:release_build: FALSE
:environment:
# Specify where to find mixins and any that should be enabled automatically
:mixins:
:enabled: []
:load_paths: []
# further details to configure the way Ceedling handles test code
:test_build:
:use_assembly: FALSE
:test_runner:
# Insert additional #include statements in a generated runner
:includes:
- osal.h
# further details to configure the way Ceedling handles release code
:release_build:
:output: MyApp.out
:use_assembly: FALSE
:artifacts: []
# Plugins are optional Ceedling features which can be enabled. Ceedling supports
# a variety of plugins which may effect the way things are compiled, reported,
# or may provide new command options. Refer to the readme in each plugin for
# details on how to use it.
:plugins:
:load_paths: []
:enabled:
#- beep # beeps when finished, so you don't waste time waiting for ceedling
- module_generator # handy for quickly creating source, header, and test templates
#- gcov # test coverage using gcov. Requires gcc, gcov, and a coverage analyzer like gcovr
#- bullseye # test coverage using bullseye. Requires bullseye for your platform
#- command_hooks # write custom actions to be called at different points during the build process
#- compile_commands_json_db # generate a compile_commands.json file
#- dependencies # automatically fetch 3rd party libraries, etc.
#- subprojects # managing builds and test for static libraries
#- fake_function_framework # use FFF instead of CMock
# Report options (You'll want to choose one stdout option, but may choose multiple stored options if desired)
#- report_build_warnings_log
#- report_tests_gtestlike_stdout
#- report_tests_ide_stdout
#- report_tests_log_factory
- report_tests_pretty_stdout
#- report_tests_raw_output_log
#- report_tests_teamcity_stdout
# Specify which reports you'd like from the log factory
:report_tests_log_factory:
:reports:
- json
- junit
- cppunit
- html
# override the default extensions for your system and toolchain
:extension:
#:header: .h
#:source: .c
#:assembly: .s
#:dependencies: .d
#:object: .o
:executable: .out
#:testpass: .pass
#:testfail: .fail
#:subprojects: .a
# This is where Ceedling should look for your source and test files.
# see documentation for the many options for specifying this.
:paths:
:test:
- +:test/**
- -:test/support
:source:
- ../../src/**
:include:
- ../../src/**
:support:
- test/support
:libraries: []
# You can even specify specific files to add or remove from your test
# and release collections. Usually it's better to use paths and let
# Ceedling do the work for you!
:files:
:test: []
:source: []
# Compilation symbols to be injected into builds
# See documentation for advanced options:
# - Test name matchers for different symbols per test executable build
# - Referencing symbols in multiple lists using advanced YAML
# - Specifying symbols used during test preprocessing
:defines:
# in order to add common defines:
# 1) remove the trailing [] from the :common: section
# 2) add entries to the :common: section (e.g. :test: has TEST defined)
:common: &common_defines []
:test:
- _UNITY_TEST_
#- *common_defines
:test_preprocess:
- _UNITY_TEST_
#- *common_defines
:release: []
# Enable to inject name of a test as a unique compilation symbol into its respective executable build.
:use_test_definition: FALSE
# Configure additional command line flags provided to tools used in each build step
# :flags:
# :release:
# :compile: # Add '-Wall' and '--02' to compilation of all files in release target
# - -Wall
# - --O2
# :test:
# :compile:
# '(_|-)special': # Add '-pedantic' to compilation of all files in all test executables with '_special' or '-special' in their names
# - -pedantic
# '*': # Add '-foo' to compilation of all files in all test executables
# - -foo
# Configuration Options specific to CMock. See CMock docs for details
:cmock:
:mock_prefix: mock_
:when_no_prototypes: :warn
:enforce_strict_ordering: TRUE
:plugins:
# Core configuration
:plugins: # What plugins should be used by CMock?
- :ignore
- :ignore_arg
- :return_thru_ptr
- :callback
- :array
:treat_as:
:verbosity: 2 # the options being 0 errors only, 1 warnings and errors, 2 normal info, 3 verbose
:when_no_prototypes: :warn # the options being :ignore, :warn, or :error
# File configuration
:skeleton_path: '' # Subdirectory to store stubs when generated (default: '')
:mock_prefix: 'mock_' # Prefix to append to filenames for mocks
:mock_suffix: '' # Suffix to append to filenames for mocks
# Parser configuration
:strippables: ['(?:__attribute__\s*\([ (]*.*?[ )]*\)+)']
:attributes:
- __ramfunc
- __irq
- __fiq
- register
- extern
:c_calling_conventions:
- __stdcall
- __cdecl
- __fastcall
:treat_externs: :exclude # the options being :include or :exclud
:treat_inlines: :exclude # the options being :include or :exclud
# Type handling configuration
#:unity_helper_path: '' # specify a string of where to find a unity_helper.h file to discover custom type assertions
:treat_as: # optionally add additional types to map custom types
uint8: HEX8
uint16: HEX16
uint32: UINT32
int8: INT8
bool: UINT8
#:treat_as_array: {} # hint to cmock that these types are pointers to something
#:treat_as_void: [] # hint to cmock that these types are actually aliases of void
:memcmp_if_unknown: true # allow cmock to use the memory comparison assertions for unknown types
:when_ptr: :compare_data # hint to cmock how to handle pointers in general, the options being :compare_ptr, :compare_data, or :smart
# Add -gcov to the plugins list to make sure of the gcov plugin
# You will need to have gcov and gcovr both installed to make it work.
# For more information on these options, see docs in plugins/gcov
:gcov:
:html_report: TRUE
:html_report_type: detailed
:html_medium_threshold: 75
:html_high_threshold: 90
:xml_report: FALSE
# Mock generation configuration
:weak: '' # Symbol to use to declare weak functions
:enforce_strict_ordering: true # Do we want cmock to enforce ordering of all function calls?
:fail_on_unexpected_calls: true # Do we want cmock to fail when it encounters a function call that wasn't expected?
:callback_include_count: true # Do we want cmock to include the number of calls to this callback, when using callbacks?
:callback_after_arg_check: false # Do we want cmock to enforce an argument check first when using a callback?
#:includes: [] # You can add additional includes here, or specify the location with the options below
#:includes_h_pre_orig_header: []
#:includes_h_post_orig_header: []
#:includes_c_pre_header: []
#:includes_c_post_header: []
#:array_size_type: [] # Specify a type or types that should be used for array lengths
#:array_size_name: 'size|len' # Specify a name or names that CMock might automatically recognize as the length of an array
:exclude_setjmp_h: false # Don't use setjmp when running CMock. Note that this might result in late reporting or out-of-order failures.
:tools:
:test_compiler:
:executable: gcc
:name: 'gcc compiler'
:arguments:
- -I"$": COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE #expands to -I search paths
- -I"$": COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR #expands to -I search paths
- -D$: COLLECTION_DEFINES_TEST_AND_VENDOR #expands to all -D defined symbols
#- -fsanitize=address
- -c ${1} #source code input file (Ruby method call param list sub)
- -o ${2} #object file output (Ruby method call param list sub)
:test_linker:
:executable: gcc
:name: 'gcc linker'
:arguments:
#- -fsanitize=address
- ${1} #list of object files to link (Ruby method call param list sub)
- -o ${2} #executable file output (Ruby method call param list sub)
# Configuration options specific to Unity.
:unity:
:defines:
- UNITY_EXCLUDE_FLOAT
# You can optionally have ceedling create environment variables for you before
# performing the rest of its tasks.
:environment: []
# :environment:
# # List enforces order allowing later to reference earlier with inline Ruby substitution
# - :var1: value
# - :var2: another value
# - :path: # Special PATH handling with platform-specific path separators
# - #{ENV['PATH']} # Environment variables can use inline Ruby substitution
# - /another/path/to/include
# LIBRARIES
# These libraries are automatically injected into the build process. Those specified as
@ -104,19 +228,210 @@
# tests or releases. These options are MERGED with the options in supplemental yaml files.
:libraries:
:placement: :end
:flag: "${1}" # or "-L ${1}" for example
:common: &common_libraries []
:test:
#- *common_libraries
:release:
#- *common_libraries
:flag: "-l${1}"
:path_flag: "-L ${1}"
:system: [] # for example, you might list 'm' to grab the math library
:test: []
:release: []
:plugins:
:load_paths:
- vendor/ceedling/plugins
:enabled:
- stdout_pretty_tests_report
- module_generator
- raw_output_report
- colour_report
################################################################
# PLUGIN CONFIGURATION
################################################################
# Add -gcov to the plugins list to make sure of the gcov plugin
# You will need to have gcov and gcovr both installed to make it work.
# For more information on these options, see docs in plugins/gcov
:gcov:
:summaries: TRUE # Enable simple coverage summaries to console after tests
:report_task: FALSE # Disabled dedicated report generation task (this enables automatic report generation)
:utilities:
- gcovr # Use gcovr to create the specified reports (default).
#- ReportGenerator # Use ReportGenerator to create the specified reports.
:reports: # Specify one or more reports to generate.
# Make an HTML summary report.
- HtmlBasic
# - HtmlDetailed
# - Text
# - Cobertura
# - SonarQube
# - JSON
# - HtmlInline
# - HtmlInlineAzure
# - HtmlInlineAzureDark
# - HtmlChart
# - MHtml
# - Badges
# - CsvSummary
# - Latex
# - LatexSummary
# - PngChart
# - TeamCitySummary
# - lcov
# - Xml
# - XmlSummary
:gcovr:
# :html_artifact_filename: TestCoverageReport.html
# :html_title: Test Coverage Report
:html_medium_threshold: 75
:html_high_threshold: 90
# :html_absolute_paths: TRUE
# :html_encoding: UTF-8
# :module_generator:
# :project_root: ./
# :source_root: source/
# :inc_root: includes/
# :test_root: tests/
# :naming: :snake #options: :bumpy, :camel, :caps, or :snake
# :includes:
# :tst: []
# :src: []
# :boilerplates:
# :src: ""
# :inc: ""
# :tst: ""
# :dependencies:
# :libraries:
# - :name: WolfSSL
# :source_path: third_party/wolfssl/source
# :build_path: third_party/wolfssl/build
# :artifact_path: third_party/wolfssl/install
# :fetch:
# :method: :zip
# :source: \\shared_drive\third_party_libs\wolfssl\wolfssl-4.2.0.zip
# :environment:
# - CFLAGS+=-DWOLFSSL_DTLS_ALLOW_FUTURE
# :build:
# - "autoreconf -i"
# - "./configure --enable-tls13 --enable-singlethreaded"
# - make
# - make install
# :artifacts:
# :static_libraries:
# - lib/wolfssl.a
# :dynamic_libraries:
# - lib/wolfssl.so
# :includes:
# - include/**
# :subprojects:
# :paths:
# - :name: libprojectA
# :source:
# - ./subprojectA/source
# :include:
# - ./subprojectA/include
# :build_root: ./subprojectA/build
# :defines: []
# :command_hooks:
# :pre_mock_preprocess:
# :post_mock_preprocess:
# :pre_test_preprocess:
# :post_test_preprocess:
# :pre_mock_generate:
# :post_mock_generate:
# :pre_runner_generate:
# :post_runner_generate:
# :pre_compile_execute:
# :post_compile_execute:
# :pre_link_execute:
# :post_link_execute:
# :pre_test_fixture_execute:
# :post_test_fixture_execute:
# :pre_test:
# :post_test:
# :pre_release:
# :post_release:
# :pre_build:
# :post_build:
# :post_error:
################################################################
# TOOLCHAIN CONFIGURATION
################################################################
#:tools:
# Ceedling defaults to using gcc for compiling, linking, etc.
# As [:tools] is blank, gcc will be used (so long as it's in your system path)
# See documentation to configure a given toolchain for use
#:tools:
# :test_compiler:
# :executable: gcc
# :name: 'gcc compiler'
# :arguments:
# - -I"$": COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE #expands to -I search paths
# - -I"$": COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR #expands to -I search paths
# - -D$: COLLECTION_DEFINES_TEST_AND_VENDOR #expands to all -D defined symbols
# #- -fsanitize=address
# - -c ${1} #source code input file (Ruby method call param list sub)
# - -o ${2} #object file output (Ruby method call param list sub)
# :test_linker:
# :executable: gcc
# :name: 'gcc linker'
# :arguments:
# #- -fsanitize=address
# - ${1} #list of object files to link (Ruby method call param list sub)
# - -o ${2} #executable file output (Ruby method call param list sub)
# :test_compiler:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
# :test_linker:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
# :test_assembler:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
# :test_fixture:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
# :test_includes_preprocessor:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
# :test_file_preprocessor:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
# :test_file_preprocessor_directives:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
# :test_dependencies_generator:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
# :release_compiler:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
# :release_linker:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
# :release_assembler:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
# :release_dependencies_generator:
# :executable:
# :arguments: []
# :name:
# :optional: FALSE
...

View File

@ -32,8 +32,8 @@
#include "tusb_fifo.h"
#include "tusb.h"
#include "usbd.h"
TEST_FILE("usbd_control.c")
TEST_FILE("msc_device.c")
TEST_SOURCE_FILE("usbd_control.c")
TEST_SOURCE_FILE("msc_device.c")
// Mock File
#include "mock_dcd.h"

View File

@ -29,7 +29,7 @@
#include "tusb_fifo.h"
#include "tusb.h"
#include "usbd.h"
TEST_FILE("usbd_control.c")
TEST_SOURCE_FILE("usbd_control.c")
// Mock File
#include "mock_dcd.h"

View File

@ -1,350 +0,0 @@
#!/usr/bin/env ruby
#these are always used
require 'rubygems'
require 'fileutils'
# Check for the main project file (either the one defined in the ENV or the default)
main_filepath = ENV['CEEDLING_MAIN_PROJECT_FILE']
project_found = (!main_filepath.nil? && File.exists?(main_filepath))
if (!project_found)
main_filepath = "project.yml"
project_found = File.exists?(main_filepath)
end
def is_windows?
return ((RbConfig::CONFIG['host_os'] =~ /mswin|mingw/) ? true : false) if defined?(RbConfig)
return ((Config::CONFIG['host_os'] =~ /mswin|mingw/) ? true : false)
end
unless (project_found)
#===================================== We Do Not Have A Project ================================================
puts "Welcome to Ceedling!"
require 'thor'
def here
File.dirname(__FILE__) + "/.."
end
class CeedlingTasks < Thor
include Thor::Actions
desc "new PROJECT_NAME", "create a new ceedling project"
method_option :docs, :type => :boolean, :default => false, :desc => "Add docs in project vendor directory"
method_option :local, :type => :boolean, :default => false, :desc => "Create a copy of Ceedling in the project vendor directory"
method_option :gitignore, :type => :boolean, :default => false, :desc => "Create a gitignore file for ignoring ceedling generated files"
method_option :no_configs, :type => :boolean, :default => false, :desc => "Don't install starter configuration files"
method_option :noconfigs, :type => :boolean, :default => false
#deprecated:
method_option :no_docs, :type => :boolean, :default => false
method_option :nodocs, :type => :boolean, :default => false
method_option :as_gem, :type => :boolean, :default => false
method_option :asgem, :type => :boolean, :default => false
method_option :with_ignore, :type => :boolean, :default => false
method_option :withignore, :type => :boolean, :default => false
def new(name, silent = false)
copy_assets_and_create_structure(name, silent, false, options)
end
desc "upgrade PROJECT_NAME", "upgrade ceedling for a project (not req'd if gem used)"
def upgrade(name, silent = false)
as_local = true
begin
require "yaml"
as_local = (YAML.load_file(File.join(name, "project.yml"))[:project][:which_ceedling] != 'gem')
rescue
raise "ERROR: Could not find valid project file '#{yaml_path}'"
end
found_docs = File.exists?( File.join(name, "docs", "CeedlingPacket.md") )
copy_assets_and_create_structure(name, silent, true, {:upgrade => true, :no_configs => true, :local => as_local, :docs => found_docs})
end
no_commands do
def copy_assets_and_create_structure(name, silent=false, force=false, options = {})
puts "WARNING: --no_docs deprecated. It is now the default. Specify -docs if you want docs installed." if (options[:no_docs] || options[:nodocs])
puts "WARNING: --as_gem deprecated. It is now the default. Specify -local if you want ceedling installed to this project." if (options[:as_gem] || options[:asgem])
puts "WARNING: --with_ignore deprecated. It is now called -gitignore" if (options[:with_ignore] || options[:with_ignore])
use_docs = options[:docs] || false
use_configs = !(options[:no_configs] || options[:noconfigs] || false)
use_gem = !(options[:local])
use_ignore = options[:gitignore] || false
is_upgrade = options[:upgrade] || false
ceedling_path = File.join(name, 'vendor', 'ceedling')
source_path = File.join(name, 'src')
test_path = File.join(name, 'test')
test_support_path = File.join(name, 'test/support')
# If it's not an upgrade, make sure we have the paths we expect
if (!is_upgrade)
[source_path, test_path, test_support_path].each do |d|
FileUtils.mkdir_p d
end
end
# Generate gitkeep in test support path
FileUtils.touch(File.join(test_support_path, '.gitkeep'))
# If documentation requested, create a place to dump them and do so
doc_path = ""
if use_docs
doc_path = use_gem ? File.join(name, 'docs') : File.join(ceedling_path, 'docs')
FileUtils.mkdir_p doc_path
in_doc_path = lambda {|f| File.join(doc_path, f)}
# Add documentation from main projects to list
doc_files = {}
['docs','vendor/unity/docs','vendor/cmock/docs','vendor/cexception/docs'].each do |p|
Dir[ File.expand_path(File.join(here, p, '*.md')) ].each do |f|
doc_files[ File.basename(f) ] = f unless(doc_files.include? f)
end
end
# Add documentation from plugins to list
Dir[ File.join(here, 'plugins/**/README.md') ].each do |plugin_path|
k = "plugin_" + plugin_path.split(/\\|\//)[-2] + ".md"
doc_files[ k ] = File.expand_path(plugin_path)
end
# Copy all documentation
doc_files.each_pair do |k, v|
copy_file(v, in_doc_path.call(k), :force => force)
end
end
# If installed locally to project, copy ceedling, unity, cmock, & supports to vendor
unless use_gem
FileUtils.mkdir_p ceedling_path
#copy full folders from ceedling gem into project
%w{plugins lib bin}.map do |f|
{:src => f, :dst => File.join(ceedling_path, f)}
end.each do |f|
directory(f[:src], f[:dst], :force => force)
end
# mark ceedling as an executable
File.chmod(0755, File.join(ceedling_path, 'bin', 'ceedling')) unless is_windows?
#copy necessary subcomponents from ceedling gem into project
sub_components = [
{:src => 'vendor/c_exception/lib/', :dst => 'vendor/c_exception/lib'},
{:src => 'vendor/cmock/config/', :dst => 'vendor/cmock/config'},
{:src => 'vendor/cmock/lib/', :dst => 'vendor/cmock/lib'},
{:src => 'vendor/cmock/src/', :dst => 'vendor/cmock/src'},
{:src => 'vendor/diy/lib', :dst => 'vendor/diy/lib'},
{:src => 'vendor/unity/auto/', :dst => 'vendor/unity/auto'},
{:src => 'vendor/unity/src/', :dst => 'vendor/unity/src'},
]
sub_components.each do |c|
directory(c[:src], File.join(ceedling_path, c[:dst]), :force => force)
end
end
# We're copying in a configuration file if we haven't said not to
if (use_configs)
dst_yaml = File.join(name, 'project.yml')
src_yaml = if use_gem
File.join(here, 'assets', 'project_as_gem.yml')
else
if is_windows?
copy_file(File.join('assets', 'ceedling.cmd'), File.join(name, 'ceedling.cmd'), :force => force)
else
copy_file(File.join('assets', 'ceedling'), File.join(name, 'ceedling'), :force => force)
File.chmod(0755, File.join(name, 'ceedling'))
end
File.join(here, 'assets', 'project_with_guts.yml')
end
# Perform the actual clone of the config file, while updating the version
File.open(dst_yaml,'w') do |dst|
require File.expand_path(File.join(File.dirname(__FILE__),"..","lib","ceedling","version.rb"))
dst << File.read(src_yaml).gsub(":ceedling_version: '?'",":ceedling_version: #{Ceedling::Version::CEEDLING}")
puts " create #{dst_yaml}"
end
end
# Copy the gitignore file if requested
if (use_ignore)
copy_file(File.join('assets', 'default_gitignore'), File.join(name, '.gitignore'), :force => force)
end
unless silent
puts "\n"
puts "Project '#{name}' #{force ? "upgraded" : "created"}!"
puts " - Tool documentation is located in #{doc_path}" if use_docs
puts " - Execute 'ceedling help' from #{name} to view available test & build tasks"
puts ''
end
end
end
desc "examples", "list available example projects"
def examples()
puts "Available sample projects:"
FileUtils.cd(File.join(here, "examples")) do
Dir["*"].each {|proj| puts " #{proj}"}
end
end
desc "example PROJ_NAME [DEST]", "new specified example project (in DEST, if specified)"
def example(proj_name, dest=nil)
if dest.nil? then dest = proj_name end
copy_assets_and_create_structure(dest, true, false, {:local=>true, :docs=>true})
dest_src = File.join(dest,'src')
dest_test = File.join(dest,'test')
dest_project = File.join(dest,'project.yml')
directory "examples/#{proj_name}/src", dest_src
directory "examples/#{proj_name}/test", dest_test
remove_file dest_project
copy_file "examples/#{proj_name}/project.yml", dest_project
puts "\n"
puts "Example project '#{proj_name}' created!"
puts " - Tool documentation is located in vendor/ceedling/docs"
puts " - Execute 'ceedling help' to view available test & build tasks"
puts ''
end
desc "version", "return the version of the tools installed"
def version()
require File.expand_path(File.join(File.dirname(__FILE__),"..","lib","ceedling","version.rb"))
puts " Ceedling:: #{Ceedling::Version::CEEDLING}"
puts " CMock:: #{Ceedling::Version::CMOCK}"
puts " Unity:: #{Ceedling::Version::UNITY}"
puts " CException:: #{Ceedling::Version::CEXCEPTION}"
end
end
if (ARGV[0] =~ /^\-T$/)
puts "\n(No Project Detected, Therefore Showing Options to Create Projects)"
CeedlingTasks.tasks.each_pair do |k,v|
puts v.usage.ljust(25,' ') + v.description
end
puts "\n"
else
CeedlingTasks.source_root here
CeedlingTasks.start
end
#===================================== We Have A Project Already ================================================
else
require 'yaml'
require 'rbconfig'
#determine platform
platform = begin
case(RbConfig::CONFIG['host_os'])
when /mswin|mingw|cygwin/i
:mswin
when /darwin/
:osx
else
:linux
end
rescue
:linux
end
#create our default meta-runner option set
options = {
:pretest => nil,
:args => [],
:add_path => [],
:path_connector => (platform == :mswin) ? ";" : ":",
:graceful_fail => false,
:which_ceedling => (Dir.exists?("vendor/ceedling") ? "vendor/ceedling" : 'gem'),
:default_tasks => [ 'test:all' ],
:list_tasks => false
}
#guess that we need a special script file first if it exists
if (platform == :mswin)
options[:pretest] = File.exists?("#{ platform.to_s }_setup.bat") ? "#{ platform.to_s }_setup.bat" : nil
else
options[:pretest] = File.exists?("#{ platform.to_s }_setup.sh") ? "source #{ platform.to_s }_setup.sh" : nil
end
#merge in project settings if they can be found here
yaml_options = YAML.load_file(main_filepath)
if (yaml_options[:paths])
options[:add_path] = yaml_options[:paths][:tools] || []
else
options[:add_path] = []
end
options[:graceful_fail] = yaml_options[:graceful_fail] if yaml_options[:graceful_fail]
options[:which_ceedling] = yaml_options[:project][:which_ceedling] if (yaml_options[:project] && yaml_options[:project][:which_ceedling])
options[:default_tasks] = yaml_options[:default_tasks] if yaml_options[:default_tasks]
#sort through command line options
ARGV.each do |v|
case(v)
when /^(?:new|examples?|templates?)$/
puts "\nOops. You called ceedling with argument '#{v}'.\n" +
" This is an operation that will create a new project... \n" +
" but it looks like you're already in a project. If you really \n" +
" want to do this, try moving to an empty folder.\n\n"
abort
when /^help$/
options[:list_tasks] = true
when /^-T$/
options[:list_tasks] = true
when /^--tasks$/
options[:list_tasks] = true
when /^project:(\w+)/
ENV['CEEDLING_USER_PROJECT_FILE'] = "#{$1}.yml"
else
options[:args].push(v)
end
end
#add to the path
if (options[:add_path] && !options[:add_path].empty?)
path = ENV["PATH"]
options[:add_path].each do |p|
f = File.expand_path(File.dirname(__FILE__),p)
path = (f + options[:path_connector] + path) unless path.include? f
end
ENV["PATH"] = path
end
# Load Ceedling (either through the rakefile OR directly)
if (File.exists?("rakefile.rb"))
load 'rakefile.rb'
else
if (options[:which_ceedling] == 'gem')
require 'ceedling'
else
load "#{options[:which_ceedling]}/lib/ceedling.rb"
end
Ceedling.load_project
end
Rake.application.standard_exception_handling do
if options[:list_tasks]
# Display helpful task list when requested. This required us to dig into Rake internals a bit
Rake.application.define_singleton_method(:name=) {|n| @name = n}
Rake.application.name = 'ceedling'
Rake.application.options.show_tasks = :tasks
Rake.application.options.show_task_pattern = /^(?!.*build).*$/
Rake.application.display_tasks_and_comments()
else
task :default => options[:default_tasks]
# Run our Tasks!
Rake.application.collect_command_line_tasks(options[:args])
Rake.application.top_level
end
end
true
#===================================================================================================================
end

View File

@ -1,98 +0,0 @@
##
# This module defines the interface for interacting with and loading a project
# with Ceedling.
module Ceedling
##
# Returns the location where the gem is installed.
# === Return
# _String_ - The location where the gem lives.
def self.location
File.join( File.dirname(__FILE__), '..')
end
##
# Return the path to the "built-in" plugins.
# === Return
# _String_ - The path where the default plugins live.
def self.load_path
File.join( self.location, 'plugins')
end
##
# Return the path to the Ceedling Rakefile
# === Return
# _String_
def self.rakefile
File.join( self.location, 'lib', 'ceedling', 'rakefile.rb' )
end
##
# This method selects the project file that Ceedling will use by setting the
# CEEDLING_MAIN_PROJECT_FILE environment variable before loading the ceedling
# rakefile. A path supplied as an argument to this method will override the
# current value of the environment variable. If no path is supplied as an
# argument then the existing value of the environment variable is used. If
# the environment variable has not been set and no argument has been supplied
# then a default path of './project.yml' will be used.
#
# === Arguments
# +options+ _Hash_::
# A hash containing the options for ceedling. Currently the following
# options are supported:
# * +config+ - The path to the project YAML configuration file.
# * +root+ - The root of the project directory.
# * +prefix+ - A prefix to prepend to plugin names in order to determine the
# corresponding gem name.
# * +plugins+ - The list of ceedling plugins to load
def self.load_project(options = {})
# Make sure our path to the yaml file is setup
if options.has_key? :config
ENV['CEEDLING_MAIN_PROJECT_FILE'] = options[:config]
elsif ENV['CEEDLING_MAIN_PROJECT_FILE'].nil?
ENV['CEEDLING_MAIN_PROJECT_FILE'] = './project.yml'
end
# Register the plugins
if options.has_key? :plugins
options[:plugins].each do |plugin|
register_plugin( plugin, options[:prefix] )
end
end
# Define the root of the project if specified
Object.const_set('PROJECT_ROOT', options[:root]) if options.has_key? :root
# Load ceedling
load "#{self.rakefile}"
end
##
# Register a plugin for ceedling to use when a project is loaded. This method
# *must* be called prior to calling the _load_project_ method.
#
# This method is intended to be used for loading plugins distributed via the
# RubyGems mechanism. As such, the following gem structure is assumed for
# plugins.
#
# * The gem name must be prefixed with 'ceedling-' followed by the plugin
# name (ex. 'ceedling-bullseye')
#
# * The contents of the plugin must be installed into a subdirectory of
# the gem with the same name as the plugin (ex. 'bullseye/')
#
# === Arguments
# +name+ _String_:: The name of the plugin to load.
# +prefix+ _String_::
# (optional, default = nil) The prefix to use for the full gem name.
def self.register_plugin(name, prefix=nil)
# Figure out the full name of the gem and location
prefix ||= 'ceedling-'
gem_name = prefix + name
gem_dir = Gem::Specification.find_by_name(gem_name).gem_dir()
# Register the plugin with Ceedling
require 'ceedling/defaults'
DEFAULT_CEEDLING_CONFIG[:plugins][:enabled] << name
DEFAULT_CEEDLING_CONFIG[:plugins][:load_paths] << gem_dir
end
end

View File

@ -1,39 +0,0 @@
require 'ceedling/constants'
##
# Utilities for raiser and reporting errors during building.
class BuildInvokerUtils
constructor :configurator, :streaminator
##
# Processes exceptions and tries to display a useful message for the user.
#
# ==== Attributes
#
# * _exception_: The exception given by a rescue statement.
# * _context_: A symbol representing where in the build the exception
# occurs.
# * _test_build_: A bool to signify if the exception occurred while building
# from test or source.
#
def process_exception(exception, context, test_build=true)
if (exception.message =~ /Don't know how to build task '(.+)'/i)
error_header = "ERROR: Rake could not find file referenced in source"
error_header += " or test" if (test_build)
error_header += ": '#{$1}'. Possible stale dependency."
@streaminator.stderr_puts( error_header )
if (@configurator.project_use_deep_dependencies)
help_message = "Try fixing #include statements or adding missing file. Then run '#{REFRESH_TASK_ROOT}#{context.to_s}' task and try again."
@streaminator.stderr_puts( help_message )
end
raise ''
else
raise exception
end
end
end

View File

@ -1,47 +0,0 @@
class Cacheinator
constructor :cacheinator_helper, :file_path_utils, :file_wrapper, :yaml_wrapper
def cache_test_config(hash)
@yaml_wrapper.dump( @file_path_utils.form_test_build_cache_path( INPUT_CONFIGURATION_CACHE_FILE), hash )
end
def cache_release_config(hash)
@yaml_wrapper.dump( @file_path_utils.form_release_build_cache_path( INPUT_CONFIGURATION_CACHE_FILE ), hash )
end
def diff_cached_test_file( filepath )
cached_filepath = @file_path_utils.form_test_build_cache_path( filepath )
if (@file_wrapper.exist?( cached_filepath ) and (!@file_wrapper.compare( filepath, cached_filepath )))
@file_wrapper.cp(filepath, cached_filepath, {:preserve => false})
return filepath
elsif (!@file_wrapper.exist?( cached_filepath ))
@file_wrapper.cp(filepath, cached_filepath, {:preserve => false})
return filepath
end
return cached_filepath
end
def diff_cached_test_config?(hash)
cached_filepath = @file_path_utils.form_test_build_cache_path(INPUT_CONFIGURATION_CACHE_FILE)
return @cacheinator_helper.diff_cached_config?( cached_filepath, hash )
end
def diff_cached_test_defines?(files)
cached_filepath = @file_path_utils.form_test_build_cache_path(DEFINES_DEPENDENCY_CACHE_FILE)
return @cacheinator_helper.diff_cached_defines?( cached_filepath, files )
end
def diff_cached_release_config?(hash)
cached_filepath = @file_path_utils.form_release_build_cache_path(INPUT_CONFIGURATION_CACHE_FILE)
return @cacheinator_helper.diff_cached_config?( cached_filepath, hash )
end
end

View File

@ -1,35 +0,0 @@
class CacheinatorHelper
constructor :file_wrapper, :yaml_wrapper
def diff_cached_config?(cached_filepath, hash)
return false if ( not @file_wrapper.exist?(cached_filepath) )
return true if (@yaml_wrapper.load(cached_filepath) != hash)
return false
end
def diff_cached_defines?(cached_filepath, files)
changed_defines = false
current_defines = COLLECTION_DEFINES_TEST_AND_VENDOR.reject(&:empty?)
current_dependencies = Hash[files.collect { |source| [source, current_defines.dup] }]
if not @file_wrapper.exist?(cached_filepath)
@yaml_wrapper.dump(cached_filepath, current_dependencies)
return changed_defines
end
dependencies = @yaml_wrapper.load(cached_filepath)
common_dependencies = current_dependencies.select { |file, defines| dependencies.has_key?(file) }
if dependencies.values_at(*common_dependencies.keys) != common_dependencies.values
changed_defines = true
end
dependencies.merge!(current_dependencies)
@yaml_wrapper.dump(cached_filepath, dependencies)
return changed_defines
end
end

View File

@ -1,15 +0,0 @@
require 'cmock'
class CmockBuilder
attr_accessor :cmock
def setup
@cmock = nil
end
def manufacture(cmock_config)
@cmock = CMock.new(cmock_config)
end
end

View File

@ -1,381 +0,0 @@
require 'ceedling/defaults'
require 'ceedling/constants'
require 'ceedling/file_path_utils'
require 'deep_merge'
class Configurator
attr_reader :project_config_hash, :script_plugins, :rake_plugins
attr_accessor :project_logging, :project_debug, :project_verbosity, :sanity_checks
constructor(:configurator_setup, :configurator_builder, :configurator_plugins, :cmock_builder, :yaml_wrapper, :system_wrapper) do
@project_logging = false
@project_debug = false
@project_verbosity = Verbosity::NORMAL
@sanity_checks = TestResultsSanityChecks::NORMAL
end
def setup
# special copy of cmock config to provide to cmock for construction
@cmock_config_hash = {}
# note: project_config_hash is an instance variable so constants and accessors created
# in eval() statements in build() have something of proper scope and persistence to reference
@project_config_hash = {}
@project_config_hash_backup = {}
@script_plugins = []
@rake_plugins = []
end
def replace_flattened_config(config)
@project_config_hash.merge!(config)
@configurator_setup.build_constants_and_accessors(@project_config_hash, binding())
end
def store_config
@project_config_hash_backup = @project_config_hash.clone
end
def restore_config
@project_config_hash = @project_config_hash_backup
@configurator_setup.build_constants_and_accessors(@project_config_hash, binding())
end
def reset_defaults(config)
[:test_compiler,
:test_linker,
:test_fixture,
:test_includes_preprocessor,
:test_file_preprocessor,
:test_file_preprocessor_directives,
:test_dependencies_generator,
:release_compiler,
:release_assembler,
:release_linker,
:release_dependencies_generator].each do |tool|
config[:tools].delete(tool) if (not (config[:tools][tool].nil?))
end
end
# The default values defined in defaults.rb (eg. DEFAULT_TOOLS_TEST) are populated
# into @param config
def populate_defaults(config)
new_config = DEFAULT_CEEDLING_CONFIG.deep_clone
new_config.deep_merge!(config)
config.replace(new_config)
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_TEST )
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_TEST_PREPROCESSORS ) if (config[:project][:use_test_preprocessor])
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_TEST_DEPENDENCIES ) if (config[:project][:use_deep_dependencies])
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_RELEASE ) if (config[:project][:release_build])
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_RELEASE_ASSEMBLER ) if (config[:project][:release_build] and config[:release_build][:use_assembly])
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_RELEASE_DEPENDENCIES ) if (config[:project][:release_build] and config[:project][:use_deep_dependencies])
end
def populate_unity_defaults(config)
unity = config[:unity] || {}
@runner_config = unity.merge(@runner_config || config[:test_runner] || {})
end
def populate_cmock_defaults(config)
# cmock has its own internal defaults handling, but we need to set these specific values
# so they're present for the build environment to access;
# note: these need to end up in the hash given to initialize cmock for this to be successful
cmock = config[:cmock] || {}
# yes, we're duplicating the default mock_prefix in cmock, but it's because we need CMOCK_MOCK_PREFIX always available in Ceedling's environment
cmock[:mock_prefix] = 'Mock' if (cmock[:mock_prefix].nil?)
# just because strict ordering is the way to go
cmock[:enforce_strict_ordering] = true if (cmock[:enforce_strict_ordering].nil?)
cmock[:mock_path] = File.join(config[:project][:build_root], TESTS_BASE_PATH, 'mocks') if (cmock[:mock_path].nil?)
cmock[:verbosity] = @project_verbosity if (cmock[:verbosity].nil?)
cmock[:plugins] = [] if (cmock[:plugins].nil?)
cmock[:plugins].map! { |plugin| plugin.to_sym }
cmock[:plugins] << (:cexception) if (!cmock[:plugins].include?(:cexception) and (config[:project][:use_exceptions]))
cmock[:plugins].uniq!
cmock[:unity_helper] = false if (cmock[:unity_helper].nil?)
if (cmock[:unity_helper])
cmock[:unity_helper] = [cmock[:unity_helper]] if cmock[:unity_helper].is_a? String
cmock[:includes] += cmock[:unity_helper].map{|helper| File.basename(helper) }
cmock[:includes].uniq!
end
@runner_config = cmock.merge(@runner_config || config[:test_runner] || {})
@cmock_builder.manufacture(cmock)
end
def get_runner_config
@runner_config
end
# grab tool names from yaml and insert into tool structures so available for error messages
# set up default values
def tools_setup(config)
config[:tools].each_key do |name|
tool = config[:tools][name]
# populate name if not given
tool[:name] = name.to_s if (tool[:name].nil?)
# handle inline ruby string substitution in executable
if (tool[:executable] =~ RUBY_STRING_REPLACEMENT_PATTERN)
tool[:executable].replace(@system_wrapper.module_eval(tool[:executable]))
end
# populate stderr redirect option
tool[:stderr_redirect] = StdErrRedirect::NONE if (tool[:stderr_redirect].nil?)
# populate background execution option
tool[:background_exec] = BackgroundExec::NONE if (tool[:background_exec].nil?)
# populate optional option to control verification of executable in search paths
tool[:optional] = false if (tool[:optional].nil?)
end
end
def tools_supplement_arguments(config)
tools_name_prefix = 'tools_'
config[:tools].each_key do |name|
tool = @project_config_hash[(tools_name_prefix + name.to_s).to_sym]
# smoosh in extra arguments if specified at top-level of config (useful for plugins & default gcc tools)
# arguments are squirted in at _end_ of list
top_level_tool = (tools_name_prefix + name.to_s).to_sym
if (not config[top_level_tool].nil?)
# adding and flattening is not a good idea: might over-flatten if there's array nesting in tool args
tool[:arguments].concat config[top_level_tool][:arguments]
end
end
end
def find_and_merge_plugins(config)
# plugins must be loaded before generic path evaluation & magic that happen later;
# perform path magic here as discrete step
config[:plugins][:load_paths].each do |path|
path.replace(@system_wrapper.module_eval(path)) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
FilePathUtils::standardize(path)
end
config[:plugins][:load_paths] << FilePathUtils::standardize(Ceedling.load_path)
config[:plugins][:load_paths].uniq!
paths_hash = @configurator_plugins.add_load_paths(config)
@rake_plugins = @configurator_plugins.find_rake_plugins(config, paths_hash)
@script_plugins = @configurator_plugins.find_script_plugins(config, paths_hash)
config_plugins = @configurator_plugins.find_config_plugins(config, paths_hash)
plugin_yml_defaults = @configurator_plugins.find_plugin_yml_defaults(config, paths_hash)
plugin_hash_defaults = @configurator_plugins.find_plugin_hash_defaults(config, paths_hash)
config_plugins.each do |plugin|
plugin_config = @yaml_wrapper.load(plugin)
config.deep_merge(plugin_config)
end
plugin_yml_defaults.each do |defaults|
@configurator_builder.populate_defaults( config, @yaml_wrapper.load(defaults) )
end
plugin_hash_defaults.each do |defaults|
@configurator_builder.populate_defaults( config, defaults )
end
# special plugin setting for results printing
config[:plugins][:display_raw_test_results] = true if (config[:plugins][:display_raw_test_results].nil?)
paths_hash.each_pair { |name, path| config[:plugins][name] = path }
end
def merge_imports(config)
if config[:import]
if config[:import].is_a? Array
until config[:import].empty?
path = config[:import].shift
path = @system_wrapper.module_eval(path) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
config.deep_merge!(@yaml_wrapper.load(path))
end
else
config[:import].each_value do |path|
if !path.nil?
path = @system_wrapper.module_eval(path) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
config.deep_merge!(@yaml_wrapper.load(path))
end
end
end
end
config.delete(:import)
end
def eval_environment_variables(config)
config[:environment].each do |hash|
key = hash.keys[0]
value = hash[key]
items = []
interstitial = ((key == :path) ? File::PATH_SEPARATOR : '')
items = ((value.class == Array) ? hash[key] : [value])
items.each do |item|
if item.is_a? String and item =~ RUBY_STRING_REPLACEMENT_PATTERN
item.replace( @system_wrapper.module_eval( item ) )
end
end
hash[key] = items.join( interstitial )
@system_wrapper.env_set( key.to_s.upcase, hash[key] )
end
end
def eval_paths(config)
# [:plugins]:[load_paths] already handled
paths = [ # individual paths that don't follow convention processed below
config[:project][:build_root],
config[:release_build][:artifacts]]
eval_path_list( paths )
config[:paths].each_pair { |collection, paths| eval_path_list( paths ) }
config[:files].each_pair { |collection, files| eval_path_list( files ) }
# all other paths at secondary hash key level processed by convention:
# ex. [:toplevel][:foo_path] & [:toplevel][:bar_paths] are evaluated
config.each_pair { |parent, child| eval_path_list( collect_path_list( child ) ) }
end
def standardize_paths(config)
# [:plugins]:[load_paths] already handled
paths = [ # individual paths that don't follow convention processed below
config[:project][:build_root],
config[:release_build][:artifacts]] # cmock path in case it was explicitly set in config
paths.flatten.each { |path| FilePathUtils::standardize( path ) }
config[:paths].each_pair do |collection, paths|
# ensure that list is an array (i.e. handle case of list being a single string,
# or a multidimensional array)
config[:paths][collection] = [paths].flatten.map{|path| FilePathUtils::standardize( path )}
end
config[:files].each_pair { |collection, files| files.each{ |path| FilePathUtils::standardize( path ) } }
config[:tools].each_pair { |tool, config| FilePathUtils::standardize( config[:executable] ) if (config.include? :executable) }
# all other paths at secondary hash key level processed by convention:
# ex. [:toplevel][:foo_path] & [:toplevel][:bar_paths] are standardized
config.each_pair do |parent, child|
collect_path_list( child ).each { |path| FilePathUtils::standardize( path ) }
end
end
def validate(config)
# collect felonies and go straight to jail
raise if (not @configurator_setup.validate_required_sections( config ))
# collect all misdemeanors, everybody on probation
blotter = []
blotter << @configurator_setup.validate_required_section_values( config )
blotter << @configurator_setup.validate_paths( config )
blotter << @configurator_setup.validate_tools( config )
blotter << @configurator_setup.validate_plugins( config )
raise if (blotter.include?( false ))
end
# create constants and accessors (attached to this object) from given hash
def build(config, *keys)
# create flattened & expanded configuration hash
built_config = @configurator_setup.build_project_config( config, @configurator_builder.flattenify( config ) )
@project_config_hash = built_config.clone
store_config()
@configurator_setup.build_constants_and_accessors(built_config, binding())
# top-level keys disappear when we flatten, so create global constants & accessors to any specified keys
keys.each do |key|
hash = { key => config[key] }
@configurator_setup.build_constants_and_accessors(hash, binding())
end
end
# add to constants and accessors as post build step
def build_supplement(config_base, config_more)
# merge in our post-build additions to base configuration hash
config_base.deep_merge!( config_more )
# flatten our addition hash
config_more_flattened = @configurator_builder.flattenify( config_more )
# merge our flattened hash with built hash from previous build
@project_config_hash.deep_merge!( config_more_flattened )
store_config()
# create more constants and accessors
@configurator_setup.build_constants_and_accessors(config_more_flattened, binding())
# recreate constants & update accessors with new merged, base values
config_more.keys.each do |key|
hash = { key => config_base[key] }
@configurator_setup.build_constants_and_accessors(hash, binding())
end
end
def insert_rake_plugins(plugins)
plugins.each do |plugin|
@project_config_hash[:project_rakefile_component_files] << plugin
end
end
### private ###
private
def collect_path_list( container )
paths = []
container.each_key { |key| paths << container[key] if (key.to_s =~ /_path(s)?$/) } if (container.class == Hash)
return paths.flatten
end
def eval_path_list( paths )
if paths.kind_of?(Array)
paths = Array.new(paths)
end
paths.flatten.each do |path|
path.replace( @system_wrapper.module_eval( path ) ) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
end
end
end

View File

@ -1,475 +0,0 @@
require 'rubygems'
require 'rake' # for ext() method
require 'ceedling/file_path_utils' # for class methods
require 'ceedling/defaults'
require 'ceedling/constants' # for Verbosity constants class & base file paths
class ConfiguratorBuilder
constructor :file_system_utils, :file_wrapper, :system_wrapper
def build_global_constants(config)
config.each_pair do |key, value|
formatted_key = key.to_s.upcase
# undefine global constant if it already exists
Object.send(:remove_const, formatted_key.to_sym) if @system_wrapper.constants_include?(formatted_key)
# create global constant
Object.module_eval("#{formatted_key} = value")
end
end
def build_accessor_methods(config, context)
config.each_pair do |key, value|
# fill configurator object with accessor methods
eval("def #{key.to_s.downcase}() return @project_config_hash[:#{key.to_s}] end", context)
end
end
# create a flattened hash from the original configuration structure
def flattenify(config)
new_hash = {}
config.each_key do | parent |
# gracefully handle empty top-level entries
next if (config[parent].nil?)
case config[parent]
when Array
config[parent].each do |hash|
key = "#{parent.to_s.downcase}_#{hash.keys[0].to_s.downcase}".to_sym
new_hash[key] = hash[hash.keys[0]]
end
when Hash
config[parent].each_pair do | child, value |
key = "#{parent.to_s.downcase}_#{child.to_s.downcase}".to_sym
new_hash[key] = value
end
# handle entries with no children, only values
else
new_hash["#{parent.to_s.downcase}".to_sym] = config[parent]
end
end
return new_hash
end
def populate_defaults(config, defaults)
defaults.keys.sort.each do |section|
defaults[section].keys.sort.each do |entry|
config[section] = {} if config[section].nil?
config[section][entry] = defaults[section][entry].deep_clone if (config[section][entry].nil?)
end
end
end
def clean(in_hash)
# ensure that include files inserted into test runners have file extensions & proper ones at that
in_hash[:test_runner_includes].map!{|include| include.ext(in_hash[:extension_header])}
end
def set_build_paths(in_hash)
out_hash = {}
project_build_artifacts_root = File.join(in_hash[:project_build_root], 'artifacts')
project_build_tests_root = File.join(in_hash[:project_build_root], TESTS_BASE_PATH)
project_build_release_root = File.join(in_hash[:project_build_root], RELEASE_BASE_PATH)
paths = [
[:project_build_artifacts_root, project_build_artifacts_root, true ],
[:project_build_tests_root, project_build_tests_root, true ],
[:project_build_release_root, project_build_release_root, in_hash[:project_release_build] ],
[:project_test_artifacts_path, File.join(project_build_artifacts_root, TESTS_BASE_PATH), true ],
[:project_test_runners_path, File.join(project_build_tests_root, 'runners'), true ],
[:project_test_results_path, File.join(project_build_tests_root, 'results'), true ],
[:project_test_build_output_path, File.join(project_build_tests_root, 'out'), true ],
[:project_test_build_output_asm_path, File.join(project_build_tests_root, 'out', 'asm'), true ],
[:project_test_build_output_c_path, File.join(project_build_tests_root, 'out', 'c'), true ],
[:project_test_build_cache_path, File.join(project_build_tests_root, 'cache'), true ],
[:project_test_dependencies_path, File.join(project_build_tests_root, 'dependencies'), true ],
[:project_release_artifacts_path, File.join(project_build_artifacts_root, RELEASE_BASE_PATH), in_hash[:project_release_build] ],
[:project_release_build_cache_path, File.join(project_build_release_root, 'cache'), in_hash[:project_release_build] ],
[:project_release_build_output_path, File.join(project_build_release_root, 'out'), in_hash[:project_release_build] ],
[:project_release_build_output_asm_path, File.join(project_build_release_root, 'out', 'asm'), in_hash[:project_release_build] ],
[:project_release_build_output_c_path, File.join(project_build_release_root, 'out', 'c'), in_hash[:project_release_build] ],
[:project_release_dependencies_path, File.join(project_build_release_root, 'dependencies'), in_hash[:project_release_build] ],
[:project_log_path, File.join(in_hash[:project_build_root], 'logs'), true ],
[:project_temp_path, File.join(in_hash[:project_build_root], 'temp'), true ],
[:project_test_preprocess_includes_path, File.join(project_build_tests_root, 'preprocess/includes'), in_hash[:project_use_test_preprocessor] ],
[:project_test_preprocess_files_path, File.join(project_build_tests_root, 'preprocess/files'), in_hash[:project_use_test_preprocessor] ],
]
out_hash[:project_build_paths] = []
# fetch already set mock path
out_hash[:project_build_paths] << in_hash[:cmock_mock_path] if (in_hash[:project_use_mocks])
paths.each do |path|
build_path_name = path[0]
build_path = path[1]
build_path_add_condition = path[2]
# insert path into build paths if associated with true condition
out_hash[:project_build_paths] << build_path if build_path_add_condition
# set path symbol name and path for each entry in paths array
out_hash[build_path_name] = build_path
end
return out_hash
end
def set_force_build_filepaths(in_hash)
out_hash = {}
out_hash[:project_test_force_rebuild_filepath] = File.join( in_hash[:project_test_dependencies_path], 'force_build' )
out_hash[:project_release_force_rebuild_filepath] = File.join( in_hash[:project_release_dependencies_path], 'force_build' ) if (in_hash[:project_release_build])
return out_hash
end
def set_rakefile_components(in_hash)
out_hash = {
:project_rakefile_component_files =>
[File.join(CEEDLING_LIB, 'ceedling', 'tasks_base.rake'),
File.join(CEEDLING_LIB, 'ceedling', 'tasks_filesystem.rake'),
File.join(CEEDLING_LIB, 'ceedling', 'tasks_tests.rake'),
File.join(CEEDLING_LIB, 'ceedling', 'tasks_vendor.rake'),
File.join(CEEDLING_LIB, 'ceedling', 'rules_tests.rake')]}
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'rules_cmock.rake') if (in_hash[:project_use_mocks])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'rules_preprocess.rake') if (in_hash[:project_use_test_preprocessor])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'rules_tests_deep_dependencies.rake') if (in_hash[:project_use_deep_dependencies])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'tasks_tests_deep_dependencies.rake') if (in_hash[:project_use_deep_dependencies])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'rules_release_deep_dependencies.rake') if (in_hash[:project_release_build] and in_hash[:project_use_deep_dependencies])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'rules_release.rake') if (in_hash[:project_release_build])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'tasks_release_deep_dependencies.rake') if (in_hash[:project_release_build] and in_hash[:project_use_deep_dependencies])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'tasks_release.rake') if (in_hash[:project_release_build])
return out_hash
end
def set_release_target(in_hash)
return {} if (not in_hash[:project_release_build])
release_target_file = ((in_hash[:release_build_output].nil?) ? (DEFAULT_RELEASE_TARGET_NAME.ext(in_hash[:extension_executable])) : in_hash[:release_build_output])
release_map_file = ((in_hash[:release_build_output].nil?) ? (DEFAULT_RELEASE_TARGET_NAME.ext(in_hash[:extension_map])) : in_hash[:release_build_output].ext(in_hash[:extension_map]))
return {
# tempted to make a helper method in file_path_utils? stop right there, pal. you'll introduce a cyclical dependency
:project_release_build_target => File.join(in_hash[:project_build_release_root], release_target_file),
:project_release_build_map => File.join(in_hash[:project_build_release_root], release_map_file)
}
end
def collect_project_options(in_hash)
options = []
in_hash[:project_options_paths].each do |path|
options << @file_wrapper.directory_listing( File.join(path, '*.yml') )
end
return {
:collection_project_options => options.flatten
}
end
def expand_all_path_globs(in_hash)
out_hash = {}
path_keys = []
in_hash.each_key do |key|
next if (not key.to_s[0..4] == 'paths')
path_keys << key
end
# sorted to provide assured order of traversal in test calls on mocks
path_keys.sort.each do |key|
out_hash["collection_#{key.to_s}".to_sym] = @file_system_utils.collect_paths( in_hash[key] )
end
return out_hash
end
def collect_source_and_include_paths(in_hash)
return {
:collection_paths_source_and_include =>
( in_hash[:collection_paths_source] +
in_hash[:collection_paths_include] ).select {|x| File.directory?(x)}
}
end
def collect_source_include_vendor_paths(in_hash)
extra_paths = []
extra_paths << File.join(in_hash[:cexception_vendor_path], CEXCEPTION_LIB_PATH) if (in_hash[:project_use_exceptions])
return {
:collection_paths_source_include_vendor =>
in_hash[:collection_paths_source_and_include] +
extra_paths
}
end
def collect_test_support_source_include_paths(in_hash)
return {
:collection_paths_test_support_source_include =>
(in_hash[:collection_paths_test] +
in_hash[:collection_paths_support] +
in_hash[:collection_paths_source] +
in_hash[:collection_paths_include] ).select {|x| File.directory?(x)}
}
end
def collect_vendor_paths(in_hash)
return {:collection_paths_vendor => get_vendor_paths(in_hash)}
end
def collect_test_support_source_include_vendor_paths(in_hash)
return {
:collection_paths_test_support_source_include_vendor =>
get_vendor_paths(in_hash) +
in_hash[:collection_paths_test_support_source_include]
}
end
def collect_tests(in_hash)
all_tests = @file_wrapper.instantiate_file_list
in_hash[:collection_paths_test].each do |path|
all_tests.include( File.join(path, "#{in_hash[:project_test_file_prefix]}*#{in_hash[:extension_source]}") )
end
@file_system_utils.revise_file_list( all_tests, in_hash[:files_test] )
return {:collection_all_tests => all_tests}
end
def collect_assembly(in_hash)
all_assembly = @file_wrapper.instantiate_file_list
return {:collection_all_assembly => all_assembly} if ((not in_hash[:release_build_use_assembly]) && (not in_hash[:test_build_use_assembly]))
# Sprinkle in all assembly files we can find in the source folders
in_hash[:collection_paths_source].each do |path|
all_assembly.include( File.join(path, "*#{in_hash[:extension_assembly]}") )
end
# Also add all assembly files we can find in the support folders
in_hash[:collection_paths_support].each do |path|
all_assembly.include( File.join(path, "*#{in_hash[:extension_assembly]}") )
end
# Also add files that we are explicitly adding via :files:assembly: section
@file_system_utils.revise_file_list( all_assembly, in_hash[:files_assembly] )
return {:collection_all_assembly => all_assembly}
end
def collect_source(in_hash)
all_source = @file_wrapper.instantiate_file_list
in_hash[:collection_paths_source].each do |path|
if File.exists?(path) and not File.directory?(path)
all_source.include( path )
else
all_source.include( File.join(path, "*#{in_hash[:extension_source]}") )
end
end
@file_system_utils.revise_file_list( all_source, in_hash[:files_source] )
return {:collection_all_source => all_source}
end
def collect_headers(in_hash)
all_headers = @file_wrapper.instantiate_file_list
paths =
in_hash[:collection_paths_test] +
in_hash[:collection_paths_support] +
in_hash[:collection_paths_source] +
in_hash[:collection_paths_include]
paths.each do |path|
all_headers.include( File.join(path, "*#{in_hash[:extension_header]}") )
end
@file_system_utils.revise_file_list( all_headers, in_hash[:files_include] )
return {:collection_all_headers => all_headers}
end
def collect_release_existing_compilation_input(in_hash)
release_input = @file_wrapper.instantiate_file_list
paths =
in_hash[:collection_paths_source] +
in_hash[:collection_paths_include]
paths << File.join(in_hash[:cexception_vendor_path], CEXCEPTION_LIB_PATH) if (in_hash[:project_use_exceptions])
paths.each do |path|
release_input.include( File.join(path, "*#{in_hash[:extension_header]}") )
if File.exists?(path) and not File.directory?(path)
release_input.include( path )
else
release_input.include( File.join(path, "*#{in_hash[:extension_source]}") )
end
end
@file_system_utils.revise_file_list( release_input, in_hash[:files_source] )
@file_system_utils.revise_file_list( release_input, in_hash[:files_include] )
# finding assembly files handled explicitly through other means
return {:collection_release_existing_compilation_input => release_input}
end
def collect_all_existing_compilation_input(in_hash)
all_input = @file_wrapper.instantiate_file_list
paths =
in_hash[:collection_paths_test] +
in_hash[:collection_paths_support] +
in_hash[:collection_paths_source] +
in_hash[:collection_paths_include] +
[File.join(in_hash[:unity_vendor_path], UNITY_LIB_PATH)]
paths << File.join(in_hash[:cexception_vendor_path], CEXCEPTION_LIB_PATH) if (in_hash[:project_use_exceptions])
paths << File.join(in_hash[:cmock_vendor_path], CMOCK_LIB_PATH) if (in_hash[:project_use_mocks])
paths.each do |path|
all_input.include( File.join(path, "*#{in_hash[:extension_header]}") )
if File.exists?(path) and not File.directory?(path)
all_input.include( path )
else
all_input.include( File.join(path, "*#{in_hash[:extension_source]}") )
all_input.include( File.join(path, "*#{in_hash[:extension_assembly]}") ) if (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY)
end
end
@file_system_utils.revise_file_list( all_input, in_hash[:files_test] )
@file_system_utils.revise_file_list( all_input, in_hash[:files_support] )
@file_system_utils.revise_file_list( all_input, in_hash[:files_source] )
@file_system_utils.revise_file_list( all_input, in_hash[:files_include] )
# finding assembly files handled explicitly through other means
return {:collection_all_existing_compilation_input => all_input}
end
def get_vendor_defines(in_hash)
defines = in_hash[:unity_defines].clone
defines.concat(in_hash[:cmock_defines]) if (in_hash[:project_use_mocks])
defines.concat(in_hash[:cexception_defines]) if (in_hash[:project_use_exceptions])
return defines
end
def collect_vendor_defines(in_hash)
return {:collection_defines_vendor => get_vendor_defines(in_hash)}
end
def collect_test_and_vendor_defines(in_hash)
defines = in_hash[:defines_test].clone
vendor_defines = get_vendor_defines(in_hash)
defines.concat(vendor_defines) if vendor_defines
return {:collection_defines_test_and_vendor => defines}
end
def collect_release_and_vendor_defines(in_hash)
release_defines = in_hash[:defines_release].clone
release_defines.concat(in_hash[:cexception_defines]) if (in_hash[:project_use_exceptions])
return {:collection_defines_release_and_vendor => release_defines}
end
def collect_release_artifact_extra_link_objects(in_hash)
objects = []
# no build paths here so plugins can remap if necessary (i.e. path mapping happens at runtime)
objects << CEXCEPTION_C_FILE.ext( in_hash[:extension_object] ) if (in_hash[:project_use_exceptions])
return {:collection_release_artifact_extra_link_objects => objects}
end
def collect_test_fixture_extra_link_objects(in_hash)
# Note: Symbols passed to compiler at command line can change Unity and CException behavior / configuration;
# we also handle those dependencies elsewhere in compilation dependencies
sources = [UNITY_C_FILE]
in_hash[:files_support].each { |file| sources << file }
# we don't include paths here because use of plugins or mixing different compilers may require different build paths
sources << CEXCEPTION_C_FILE if (in_hash[:project_use_exceptions])
sources << CMOCK_C_FILE if (in_hash[:project_use_mocks])
# if we're using mocks & a unity helper is defined & that unity helper includes a source file component (not only a header of macros),
# then link in the unity_helper object file too
if ( in_hash[:project_use_mocks] and in_hash[:cmock_unity_helper] )
in_hash[:cmock_unity_helper].each do |helper|
if @file_wrapper.exist?(helper.ext(in_hash[:extension_source]))
sources << helper
end
end
end
# create object files from all the sources
objects = sources.map { |file| File.basename(file) }
# no build paths here so plugins can remap if necessary (i.e. path mapping happens at runtime)
objects.map! { |object| object.ext(in_hash[:extension_object]) }
return { :collection_all_support => sources,
:collection_test_fixture_extra_link_objects => objects
}
end
private
def get_vendor_paths(in_hash)
vendor_paths = []
vendor_paths << File.join(in_hash[:unity_vendor_path], UNITY_LIB_PATH)
vendor_paths << File.join(in_hash[:cexception_vendor_path], CEXCEPTION_LIB_PATH) if (in_hash[:project_use_exceptions])
vendor_paths << File.join(in_hash[:cmock_vendor_path], CMOCK_LIB_PATH) if (in_hash[:project_use_mocks])
vendor_paths << in_hash[:cmock_mock_path] if (in_hash[:project_use_mocks])
return vendor_paths
end
end

View File

@ -1,131 +0,0 @@
require 'ceedling/constants'
class ConfiguratorPlugins
constructor :stream_wrapper, :file_wrapper, :system_wrapper
attr_reader :rake_plugins, :script_plugins
def setup
@rake_plugins = []
@script_plugins = []
end
def add_load_paths(config)
plugin_paths = {}
config[:plugins][:enabled].each do |plugin|
config[:plugins][:load_paths].each do |root|
path = File.join(root, plugin)
is_script_plugin = ( not @file_wrapper.directory_listing( File.join( path, 'lib', '*.rb' ) ).empty? )
is_rake_plugin = ( not @file_wrapper.directory_listing( File.join( path, '*.rake' ) ).empty? )
if is_script_plugin or is_rake_plugin
plugin_paths[(plugin + '_path').to_sym] = path
if is_script_plugin
@system_wrapper.add_load_path( File.join( path, 'lib') )
@system_wrapper.add_load_path( File.join( path, 'config') )
end
break
end
end
end
return plugin_paths
end
# gather up and return .rake filepaths that exist on-disk
def find_rake_plugins(config, plugin_paths)
@rake_plugins = []
plugins_with_path = []
config[:plugins][:enabled].each do |plugin|
if path = plugin_paths[(plugin + '_path').to_sym]
rake_plugin_path = File.join(path, "#{plugin}.rake")
if (@file_wrapper.exist?(rake_plugin_path))
plugins_with_path << rake_plugin_path
@rake_plugins << plugin
end
end
end
return plugins_with_path
end
# gather up and return just names of .rb classes that exist on-disk
def find_script_plugins(config, plugin_paths)
@script_plugins = []
config[:plugins][:enabled].each do |plugin|
if path = plugin_paths[(plugin + '_path').to_sym]
script_plugin_path = File.join(path, "lib", "#{plugin}.rb")
if @file_wrapper.exist?(script_plugin_path)
@script_plugins << plugin
end
end
end
return @script_plugins
end
# gather up and return configuration .yml filepaths that exist on-disk
def find_config_plugins(config, plugin_paths)
plugins_with_path = []
config[:plugins][:enabled].each do |plugin|
if path = plugin_paths[(plugin + '_path').to_sym]
config_plugin_path = File.join(path, "config", "#{plugin}.yml")
if @file_wrapper.exist?(config_plugin_path)
plugins_with_path << config_plugin_path
end
end
end
return plugins_with_path
end
# gather up and return default .yml filepaths that exist on-disk
def find_plugin_yml_defaults(config, plugin_paths)
defaults_with_path = []
config[:plugins][:enabled].each do |plugin|
if path = plugin_paths[(plugin + '_path').to_sym]
default_path = File.join(path, 'config', 'defaults.yml')
if @file_wrapper.exist?(default_path)
defaults_with_path << default_path
end
end
end
return defaults_with_path
end
# gather up and return
def find_plugin_hash_defaults(config, plugin_paths)
defaults_hash= []
config[:plugins][:enabled].each do |plugin|
if path = plugin_paths[(plugin + '_path').to_sym]
default_path = File.join(path, "config", "defaults_#{plugin}.rb")
if @file_wrapper.exist?(default_path)
@system_wrapper.require_file( "defaults_#{plugin}.rb")
object = eval("get_default_config()")
defaults_hash << object
end
end
end
return defaults_hash
end
end

View File

@ -1,128 +0,0 @@
# add sort-ability to symbol so we can order keys array in hash for test-ability
class Symbol
include Comparable
def <=>(other)
self.to_s <=> other.to_s
end
end
class ConfiguratorSetup
constructor :configurator_builder, :configurator_validator, :configurator_plugins, :stream_wrapper
def build_project_config(config, flattened_config)
### flesh out config
@configurator_builder.clean(flattened_config)
### add to hash values we build up from configuration & file system contents
flattened_config.merge!(@configurator_builder.set_build_paths(flattened_config))
flattened_config.merge!(@configurator_builder.set_force_build_filepaths(flattened_config))
flattened_config.merge!(@configurator_builder.set_rakefile_components(flattened_config))
flattened_config.merge!(@configurator_builder.set_release_target(flattened_config))
flattened_config.merge!(@configurator_builder.collect_project_options(flattened_config))
### iterate through all entries in paths section and expand any & all globs to actual paths
flattened_config.merge!(@configurator_builder.expand_all_path_globs(flattened_config))
flattened_config.merge!(@configurator_builder.collect_vendor_paths(flattened_config))
flattened_config.merge!(@configurator_builder.collect_source_and_include_paths(flattened_config))
flattened_config.merge!(@configurator_builder.collect_source_include_vendor_paths(flattened_config))
flattened_config.merge!(@configurator_builder.collect_test_support_source_include_paths(flattened_config))
flattened_config.merge!(@configurator_builder.collect_test_support_source_include_vendor_paths(flattened_config))
flattened_config.merge!(@configurator_builder.collect_tests(flattened_config))
flattened_config.merge!(@configurator_builder.collect_assembly(flattened_config))
flattened_config.merge!(@configurator_builder.collect_source(flattened_config))
flattened_config.merge!(@configurator_builder.collect_headers(flattened_config))
flattened_config.merge!(@configurator_builder.collect_release_existing_compilation_input(flattened_config))
flattened_config.merge!(@configurator_builder.collect_all_existing_compilation_input(flattened_config))
flattened_config.merge!(@configurator_builder.collect_vendor_defines(flattened_config))
flattened_config.merge!(@configurator_builder.collect_test_and_vendor_defines(flattened_config))
flattened_config.merge!(@configurator_builder.collect_release_and_vendor_defines(flattened_config))
flattened_config.merge!(@configurator_builder.collect_release_artifact_extra_link_objects(flattened_config))
flattened_config.merge!(@configurator_builder.collect_test_fixture_extra_link_objects(flattened_config))
return flattened_config
end
def build_constants_and_accessors(config, context)
@configurator_builder.build_global_constants(config)
@configurator_builder.build_accessor_methods(config, context)
end
def validate_required_sections(config)
validation = []
validation << @configurator_validator.exists?(config, :project)
validation << @configurator_validator.exists?(config, :paths)
return false if (validation.include?(false))
return true
end
def validate_required_section_values(config)
validation = []
validation << @configurator_validator.exists?(config, :project, :build_root)
validation << @configurator_validator.exists?(config, :paths, :test)
validation << @configurator_validator.exists?(config, :paths, :source)
return false if (validation.include?(false))
return true
end
def validate_paths(config)
validation = []
if config[:cmock][:unity_helper]
config[:cmock][:unity_helper].each do |path|
validation << @configurator_validator.validate_filepath_simple( path, :cmock, :unity_helper )
end
end
config[:project][:options_paths].each do |path|
validation << @configurator_validator.validate_filepath_simple( path, :project, :options_paths )
end
config[:plugins][:load_paths].each do |path|
validation << @configurator_validator.validate_filepath_simple( path, :plugins, :load_paths )
end
config[:paths].keys.sort.each do |key|
validation << @configurator_validator.validate_path_list(config, :paths, key)
end
return false if (validation.include?(false))
return true
end
def validate_tools(config)
validation = []
config[:tools].keys.sort.each do |key|
validation << @configurator_validator.exists?(config, :tools, key, :executable)
validation << @configurator_validator.validate_executable_filepath(config, :tools, key, :executable) if (not config[:tools][key][:optional])
validation << @configurator_validator.validate_tool_stderr_redirect(config, :tools, key)
end
return false if (validation.include?(false))
return true
end
def validate_plugins(config)
missing_plugins =
Set.new( config[:plugins][:enabled] ) -
Set.new( @configurator_plugins.rake_plugins ) -
Set.new( @configurator_plugins.script_plugins )
missing_plugins.each do |plugin|
@stream_wrapper.stderr_puts("ERROR: Ceedling plugin '#{plugin}' contains no rake or ruby class entry point. (Misspelled or missing files?)")
end
return ( (missing_plugins.size > 0) ? false : true )
end
end

View File

@ -1,193 +0,0 @@
require 'rubygems'
require 'rake' # for ext()
require 'ceedling/constants'
require 'ceedling/tool_executor' # for argument replacement pattern
require 'ceedling/file_path_utils' # for glob handling class methods
class ConfiguratorValidator
constructor :file_wrapper, :stream_wrapper, :system_wrapper
# walk into config hash verify existence of data at key depth
def exists?(config, *keys)
hash = retrieve_value(config, keys)
exist = !hash[:value].nil?
if (not exist)
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Required config file entry #{format_key_sequence(keys, hash[:depth])} does not exist.")
end
return exist
end
# walk into config hash. verify directory path(s) at given key depth
def validate_path_list(config, *keys)
hash = retrieve_value(config, keys)
list = hash[:value]
# return early if we couldn't walk into hash and find a value
return false if (list.nil?)
path_list = []
exist = true
case list
when String then path_list << list
when Array then path_list = list
end
path_list.each do |path|
base_path = FilePathUtils::extract_path(path) # lop off add/subtract notation & glob specifiers
if (not @file_wrapper.exist?(base_path))
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Config path #{format_key_sequence(keys, hash[:depth])}['#{base_path}'] does not exist on disk.")
exist = false
end
end
return exist
end
# simple path verification
def validate_filepath_simple(path, *keys)
validate_path = path
if (not @file_wrapper.exist?(validate_path))
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Config path '#{validate_path}' associated with #{format_key_sequence(keys, keys.size)} does not exist on disk.")
return false
end
return true
end
# walk into config hash. verify specified file exists.
def validate_filepath(config, *keys)
hash = retrieve_value(config, keys)
filepath = hash[:value]
# return early if we couldn't walk into hash and find a value
return false if (filepath.nil?)
# skip everything if we've got an argument replacement pattern
return true if (filepath =~ TOOL_EXECUTOR_ARGUMENT_REPLACEMENT_PATTERN)
if (not @file_wrapper.exist?(filepath))
# See if we can deal with it internally.
if GENERATED_DIR_PATH.include?(filepath)
# we already made this directory before let's make it again.
FileUtils.mkdir_p File.join(File.dirname(__FILE__), filepath)
@stream_wrapper.stderr_puts("WARNING: Generated filepath #{format_key_sequence(keys, hash[:depth])}['#{filepath}'] does not exist on disk. Recreating")
else
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Config filepath #{format_key_sequence(keys, hash[:depth])}['#{filepath}'] does not exist on disk.")
return false
end
end
return true
end
# walk into config hash. verify specified file exists.
def validate_executable_filepath(config, *keys)
exe_extension = config[:extension][:executable]
hash = retrieve_value(config, keys)
filepath = hash[:value]
# return early if we couldn't walk into hash and find a value
return false if (filepath.nil?)
# skip everything if we've got an argument replacement pattern
return true if (filepath =~ TOOL_EXECUTOR_ARGUMENT_REPLACEMENT_PATTERN)
# if there's no path included, verify file exists somewhere in system search paths
if (not filepath.include?('/'))
exists = false
@system_wrapper.search_paths.each do |path|
if (@file_wrapper.exist?( File.join(path, filepath)) )
exists = true
break
end
if (@file_wrapper.exist?( (File.join(path, filepath)).ext( exe_extension ) ))
exists = true
break
elsif (@system_wrapper.windows? and @file_wrapper.exist?( (File.join(path, filepath)).ext( EXTENSION_WIN_EXE ) ))
exists = true
break
end
end
if (not exists)
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Config filepath #{format_key_sequence(keys, hash[:depth])}['#{filepath}'] does not exist in system search paths.")
return false
end
# if there is a path included, check that explicit filepath exists
else
if (not @file_wrapper.exist?(filepath))
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Config filepath #{format_key_sequence(keys, hash[:depth])}['#{filepath}'] does not exist on disk.")
return false
end
end
return true
end
def validate_tool_stderr_redirect(config, tools, tool)
redirect = config[tools][tool][:stderr_redirect]
if (redirect.class == Symbol)
# map constants and force to array of strings for runtime universality across ruby versions
if (not StdErrRedirect.constants.map{|constant| constant.to_s}.include?(redirect.to_s.upcase))
error = "ERROR: [:#{tools}][:#{tool}][:stderr_redirect][:#{redirect}] is not a recognized option " +
"{#{StdErrRedirect.constants.map{|constant| ':' + constant.to_s.downcase}.join(', ')}}."
@stream_wrapper.stderr_puts(error)
return false
end
end
return true
end
private #########################################
def retrieve_value(config, keys)
value = nil
hash = config
depth = 0
# walk into hash & extract value at requested key sequence
keys.each do |symbol|
depth += 1
if (not hash[symbol].nil?)
hash = hash[symbol]
value = hash
else
value = nil
break
end
end
return {:value => value, :depth => depth}
end
def format_key_sequence(keys, depth)
walked_keys = keys.slice(0, depth)
formatted_keys = walked_keys.map{|key| "[:#{key.to_s}]"}
return formatted_keys.join
end
end

View File

@ -1,99 +0,0 @@
class Verbosity
SILENT = 0 # as silent as possible (though there are some messages that must be spit out)
ERRORS = 1 # only errors
COMPLAIN = 2 # spit out errors and warnings/notices
NORMAL = 3 # errors, warnings/notices, standard status messages
OBNOXIOUS = 4 # all messages including extra verbose output (used for lite debugging / verification)
DEBUG = 5 # special extra verbose output for hardcore debugging
end
class TestResultsSanityChecks
NONE = 0 # no sanity checking of test results
NORMAL = 1 # perform non-problematic checks
THOROUGH = 2 # perform checks that require inside knowledge of system workings
end
class StdErrRedirect
NONE = :none
AUTO = :auto
WIN = :win
UNIX = :unix
TCSH = :tcsh
end
class BackgroundExec
NONE = :none
AUTO = :auto
WIN = :win
UNIX = :unix
end
unless defined?(PROJECT_ROOT)
PROJECT_ROOT = Dir.pwd()
end
GENERATED_DIR_PATH = [['vendor', 'ceedling'], 'src', "test", ['test', 'support'], 'build'].each{|p| File.join(*p)}
EXTENSION_WIN_EXE = '.exe'
EXTENSION_NONWIN_EXE = '.out'
CEXCEPTION_ROOT_PATH = 'c_exception'
CEXCEPTION_LIB_PATH = "#{CEXCEPTION_ROOT_PATH}/lib"
CEXCEPTION_C_FILE = 'CException.c'
CEXCEPTION_H_FILE = 'CException.h'
UNITY_ROOT_PATH = 'unity'
UNITY_LIB_PATH = "#{UNITY_ROOT_PATH}/src"
UNITY_C_FILE = 'unity.c'
UNITY_H_FILE = 'unity.h'
UNITY_INTERNALS_H_FILE = 'unity_internals.h'
CMOCK_ROOT_PATH = 'cmock'
CMOCK_LIB_PATH = "#{CMOCK_ROOT_PATH}/src"
CMOCK_C_FILE = 'cmock.c'
CMOCK_H_FILE = 'cmock.h'
DEFAULT_CEEDLING_MAIN_PROJECT_FILE = 'project.yml' unless defined?(DEFAULT_CEEDLING_MAIN_PROJECT_FILE) # main project file
DEFAULT_CEEDLING_USER_PROJECT_FILE = 'user.yml' unless defined?(DEFAULT_CEEDLING_USER_PROJECT_FILE) # supplemental user config file
INPUT_CONFIGURATION_CACHE_FILE = 'input.yml' unless defined?(INPUT_CONFIGURATION_CACHE_FILE) # input configuration file dump
DEFINES_DEPENDENCY_CACHE_FILE = 'defines_dependency.yml' unless defined?(DEFINES_DEPENDENCY_CACHE_FILE) # preprocessor definitions for files
TEST_ROOT_NAME = 'test' unless defined?(TEST_ROOT_NAME)
TEST_TASK_ROOT = TEST_ROOT_NAME + ':' unless defined?(TEST_TASK_ROOT)
TEST_SYM = TEST_ROOT_NAME.to_sym unless defined?(TEST_SYM)
RELEASE_ROOT_NAME = 'release' unless defined?(RELEASE_ROOT_NAME)
RELEASE_TASK_ROOT = RELEASE_ROOT_NAME + ':' unless defined?(RELEASE_TASK_ROOT)
RELEASE_SYM = RELEASE_ROOT_NAME.to_sym unless defined?(RELEASE_SYM)
REFRESH_ROOT_NAME = 'refresh' unless defined?(REFRESH_ROOT_NAME)
REFRESH_TASK_ROOT = REFRESH_ROOT_NAME + ':' unless defined?(REFRESH_TASK_ROOT)
REFRESH_SYM = REFRESH_ROOT_NAME.to_sym unless defined?(REFRESH_SYM)
UTILS_ROOT_NAME = 'utils' unless defined?(UTILS_ROOT_NAME)
UTILS_TASK_ROOT = UTILS_ROOT_NAME + ':' unless defined?(UTILS_TASK_ROOT)
UTILS_SYM = UTILS_ROOT_NAME.to_sym unless defined?(UTILS_SYM)
OPERATION_COMPILE_SYM = :compile unless defined?(OPERATION_COMPILE_SYM)
OPERATION_ASSEMBLE_SYM = :assemble unless defined?(OPERATION_ASSEMBLE_SYM)
OPERATION_LINK_SYM = :link unless defined?(OPERATION_LINK_SYM)
RUBY_STRING_REPLACEMENT_PATTERN = /#\{.+\}/
RUBY_EVAL_REPLACEMENT_PATTERN = /^\{(.+)\}$/
TOOL_EXECUTOR_ARGUMENT_REPLACEMENT_PATTERN = /(\$\{(\d+)\})/
TEST_STDOUT_STATISTICS_PATTERN = /\n-+\s*(\d+)\s+Tests\s+(\d+)\s+Failures\s+(\d+)\s+Ignored\s+(OK|FAIL)\s*/i
NULL_FILE_PATH = '/dev/null'
TESTS_BASE_PATH = TEST_ROOT_NAME
RELEASE_BASE_PATH = RELEASE_ROOT_NAME
VENDORS_FILES = %w(unity UnityHelper cmock CException).freeze

View File

@ -1,471 +0,0 @@
require 'ceedling/constants'
require 'ceedling/system_wrapper'
require 'ceedling/file_path_utils'
#this should be defined already, but not always during system specs
CEEDLING_VENDOR = File.expand_path(File.dirname(__FILE__) + '/../../vendor') unless defined? CEEDLING_VENDOR
CEEDLING_PLUGINS = [] unless defined? CEEDLING_PLUGINS
DEFAULT_TEST_COMPILER_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_compiler'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
"-DGNU_COMPILER".freeze,
"-g".freeze,
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
"-c \"${1}\"".freeze,
"-o \"${2}\"".freeze,
# gcc's list file output options are complex; no use of ${3} parameter in default config
"-MMD".freeze,
"-MF \"${4}\"".freeze,
].freeze
}
DEFAULT_TEST_LINKER_TOOL = {
:executable => ENV['CCLD'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CCLD'].split[0],
:name => 'default_test_linker'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CCLD'].nil? ? "" : ENV['CCLD'].split[1..-1],
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
ENV['LDFLAGS'].nil? ? "" : ENV['LDFLAGS'].split,
"\"${1}\"".freeze,
"${5}".freeze,
"-o \"${2}\"".freeze,
"".freeze,
"${4}".freeze,
ENV['LDLIBS'].nil? ? "" : ENV['LDLIBS'].split
].freeze
}
DEFAULT_TEST_FIXTURE_TOOL = {
:executable => '${1}'.freeze,
:name => 'default_test_fixture'.freeze,
:stderr_redirect => StdErrRedirect::AUTO.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [].freeze
}
DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_includes_preprocessor'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze, # OSX clang
'-MM'.freeze,
'-MG'.freeze,
# avoid some possibility of deep system lib header file complications by omitting vendor paths
# if cpp is run on *nix system, escape spaces in paths; if cpp on windows just use the paths collection as is
# {"-I\"$\"" => "{SystemWrapper.windows? ? COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE : COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE.map{|path| path.gsub(\/ \/, \'\\\\ \') }}"}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze, # OSX clang
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
"\"${1}\"".freeze
].freeze
}
DEFAULT_TEST_FILE_PREPROCESSOR_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_file_preprocessor'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze,
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
"\"${1}\"".freeze,
"-o \"${2}\"".freeze
].freeze
}
DEFAULT_TEST_FILE_PREPROCESSOR_DIRECTIVES_TOOL = {
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
:name => 'default_test_file_preprocessor_directives'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze,
'-fdirectives-only'.freeze,
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
"\"${1}\"".freeze,
"-o \"${2}\"".freeze
].freeze
}
# Disable the -MD flag for OSX LLVM Clang, since unsupported
if RUBY_PLATFORM =~ /darwin/ && `gcc --version 2> /dev/null` =~ /Apple LLVM version .* \(clang/m # OSX w/LLVM Clang
MD_FLAG = '' # Clang doesn't support the -MD flag
else
MD_FLAG = '-MD'
end
DEFAULT_TEST_DEPENDENCIES_GENERATOR_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_dependencies_generator'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze,
"-MT \"${3}\"".freeze,
'-MM'.freeze,
MD_FLAG.freeze,
'-MG'.freeze,
"-MF \"${2}\"".freeze,
"-c \"${1}\"".freeze,
# '-nostdinc'.freeze,
].freeze
}
DEFAULT_RELEASE_DEPENDENCIES_GENERATOR_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_release_dependencies_generator'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_RELEASE_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_RELEASE_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_RELEASE_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze,
"-MT \"${3}\"".freeze,
'-MM'.freeze,
MD_FLAG.freeze,
'-MG'.freeze,
"-MF \"${2}\"".freeze,
"-c \"${1}\"".freeze,
# '-nostdinc'.freeze,
].freeze
}
DEFAULT_RELEASE_COMPILER_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_release_compiler'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_RELEASE_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_RELEASE_AND_VENDOR'}.freeze,
"-DGNU_COMPILER".freeze,
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
"-c \"${1}\"".freeze,
"-o \"${2}\"".freeze,
# gcc's list file output options are complex; no use of ${3} parameter in default config
"-MMD".freeze,
"-MF \"${4}\"".freeze,
].freeze
}
DEFAULT_RELEASE_ASSEMBLER_TOOL = {
:executable => ENV['AS'].nil? ? FilePathUtils.os_executable_ext('as').freeze : ENV['AS'].split[0],
:name => 'default_release_assembler'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['AS'].nil? ? "" : ENV['AS'].split[1..-1],
ENV['ASFLAGS'].nil? ? "" : ENV['ASFLAGS'].split,
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_AND_INCLUDE'}.freeze,
"\"${1}\"".freeze,
"-o \"${2}\"".freeze,
].freeze
}
DEFAULT_RELEASE_LINKER_TOOL = {
:executable => ENV['CCLD'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CCLD'].split[0],
:name => 'default_release_linker'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CCLD'].nil? ? "" : ENV['CCLD'].split[1..-1],
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
ENV['LDFLAGS'].nil? ? "" : ENV['LDFLAGS'].split,
"\"${1}\"".freeze,
"${5}".freeze,
"-o \"${2}\"".freeze,
"".freeze,
"${4}".freeze,
ENV['LDLIBS'].nil? ? "" : ENV['LDLIBS'].split
].freeze
}
DEFAULT_TOOLS_TEST = {
:tools => {
:test_compiler => DEFAULT_TEST_COMPILER_TOOL,
:test_linker => DEFAULT_TEST_LINKER_TOOL,
:test_fixture => DEFAULT_TEST_FIXTURE_TOOL,
}
}
DEFAULT_TOOLS_TEST_PREPROCESSORS = {
:tools => {
:test_includes_preprocessor => DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL,
:test_file_preprocessor => DEFAULT_TEST_FILE_PREPROCESSOR_TOOL,
:test_file_preprocessor_directives => DEFAULT_TEST_FILE_PREPROCESSOR_DIRECTIVES_TOOL,
}
}
DEFAULT_TOOLS_TEST_DEPENDENCIES = {
:tools => {
:test_dependencies_generator => DEFAULT_TEST_DEPENDENCIES_GENERATOR_TOOL,
}
}
DEFAULT_TOOLS_RELEASE = {
:tools => {
:release_compiler => DEFAULT_RELEASE_COMPILER_TOOL,
:release_linker => DEFAULT_RELEASE_LINKER_TOOL,
}
}
DEFAULT_TOOLS_RELEASE_ASSEMBLER = {
:tools => {
:release_assembler => DEFAULT_RELEASE_ASSEMBLER_TOOL,
}
}
DEFAULT_TOOLS_RELEASE_DEPENDENCIES = {
:tools => {
:release_dependencies_generator => DEFAULT_RELEASE_DEPENDENCIES_GENERATOR_TOOL,
}
}
DEFAULT_RELEASE_TARGET_NAME = 'project'
DEFAULT_CEEDLING_CONFIG = {
:project => {
# :build_root must be set by user
:use_exceptions => true,
:use_mocks => true,
:compile_threads => 1,
:test_threads => 1,
:use_test_preprocessor => false,
:use_preprocessor_directives => false,
:use_deep_dependencies => false,
:generate_deep_dependencies => true, # only applicable if use_deep_dependencies is true
:auto_link_deep_dependencies => false,
:test_file_prefix => 'test_',
:options_paths => [],
:release_build => false,
},
:release_build => {
# :output is set while building configuration -- allows smart default system-dependent file extension handling
:use_assembly => false,
:artifacts => [],
},
:paths => {
:test => [], # must be populated by user
:source => [], # must be populated by user
:support => [],
:include => [],
:libraries => [],
:test_toolchain_include => [],
:release_toolchain_include => [],
},
:files => {
:test => [],
:source => [],
:assembly => [],
:support => [],
:include => [],
},
# unlike other top-level entries, environment's value is an array to preserve order
:environment => [
# when evaluated, this provides wider text field for rake task comments
{:rake_columns => '120'},
],
:defines => {
:test => [],
:test_preprocess => [],
:release => [],
:release_preprocess => [],
:use_test_definition => false,
},
:libraries => {
:flag => '-l${1}',
:path_flag => '-L ${1}',
:test => [],
:test_preprocess => [],
:release => [],
:release_preprocess => [],
},
:flags => {},
:extension => {
:header => '.h',
:source => '.c',
:assembly => '.s',
:object => '.o',
:libraries => ['.a','.so'],
:executable => ( SystemWrapper.windows? ? EXTENSION_WIN_EXE : EXTENSION_NONWIN_EXE ),
:map => '.map',
:list => '.lst',
:testpass => '.pass',
:testfail => '.fail',
:dependencies => '.d',
},
:unity => {
:vendor_path => CEEDLING_VENDOR,
:defines => []
},
:cmock => {
:vendor_path => CEEDLING_VENDOR,
:defines => [],
:includes => []
},
:cexception => {
:vendor_path => CEEDLING_VENDOR,
:defines => []
},
:test_runner => {
:includes => [],
:file_suffix => '_runner',
},
# all tools populated while building up config structure
:tools => {},
# empty argument lists for default tools
# (these can be overridden in project file to add arguments to tools without totally redefining tools)
:test_compiler => { :arguments => [] },
:test_linker => { :arguments => [] },
:test_fixture => {
:arguments => [],
:link_objects => [], # compiled object files to always be linked in (e.g. cmock.o if using mocks)
},
:test_includes_preprocessor => { :arguments => [] },
:test_file_preprocessor => { :arguments => [] },
:test_file_preprocessor_directives => { :arguments => [] },
:test_dependencies_generator => { :arguments => [] },
:release_compiler => { :arguments => [] },
:release_linker => { :arguments => [] },
:release_assembler => { :arguments => [] },
:release_dependencies_generator => { :arguments => [] },
:plugins => {
:load_paths => CEEDLING_PLUGINS,
:enabled => [],
}
}.freeze
DEFAULT_TESTS_RESULTS_REPORT_TEMPLATE = %q{
% ignored = hash[:results][:counts][:ignored]
% failed = hash[:results][:counts][:failed]
% stdout_count = hash[:results][:counts][:stdout]
% header_prepend = ((hash[:header].length > 0) ? "#{hash[:header]}: " : '')
% banner_width = 25 + header_prepend.length # widest message
% if (stdout_count > 0)
<%=@ceedling[:plugin_reportinator].generate_banner(header_prepend + 'TEST OUTPUT')%>
% hash[:results][:stdout].each do |string|
% string[:collection].each do |item|
<%=string[:source][:path]%><%=File::SEPARATOR%><%=string[:source][:file]%>: "<%=item%>"
% end
% end
% end
% if (ignored > 0)
<%=@ceedling[:plugin_reportinator].generate_banner(header_prepend + 'IGNORED TEST SUMMARY')%>
% hash[:results][:ignores].each do |ignore|
% ignore[:collection].each do |item|
<%=ignore[:source][:path]%><%=File::SEPARATOR%><%=ignore[:source][:file]%>:<%=item[:line]%>:<%=item[:test]%>
% if (item[:message].length > 0)
: "<%=item[:message]%>"
% else
<%="\n"%>
% end
% end
% end
% end
% if (failed > 0)
<%=@ceedling[:plugin_reportinator].generate_banner(header_prepend + 'FAILED TEST SUMMARY')%>
% hash[:results][:failures].each do |failure|
% failure[:collection].each do |item|
<%=failure[:source][:path]%><%=File::SEPARATOR%><%=failure[:source][:file]%>:<%=item[:line]%>:<%=item[:test]%>
% if (item[:message].length > 0)
: "<%=item[:message]%>"
% else
<%="\n"%>
% end
% end
% end
% end
% total_string = hash[:results][:counts][:total].to_s
% format_string = "%#{total_string.length}i"
<%=@ceedling[:plugin_reportinator].generate_banner(header_prepend + 'OVERALL TEST SUMMARY')%>
% if (hash[:results][:counts][:total] > 0)
TESTED: <%=hash[:results][:counts][:total].to_s%>
PASSED: <%=sprintf(format_string, hash[:results][:counts][:passed])%>
FAILED: <%=sprintf(format_string, failed)%>
IGNORED: <%=sprintf(format_string, ignored)%>
% else
No tests executed.
% end
}

View File

@ -1,97 +0,0 @@
class Dependinator
constructor :configurator, :project_config_manager, :test_includes_extractor, :file_path_utils, :rake_wrapper, :file_wrapper
def touch_force_rebuild_files
@file_wrapper.touch( @configurator.project_test_force_rebuild_filepath )
@file_wrapper.touch( @configurator.project_release_force_rebuild_filepath ) if (@configurator.project_release_build)
end
def load_release_object_deep_dependencies(dependencies_list)
dependencies_list.each do |dependencies_file|
if File.exists?(dependencies_file)
@rake_wrapper.load_dependencies( dependencies_file )
end
end
end
def enhance_release_file_dependencies(files)
files.each do |filepath|
@rake_wrapper[filepath].enhance( [@configurator.project_release_force_rebuild_filepath] ) if (@project_config_manager.release_config_changed)
end
end
def load_test_object_deep_dependencies(files_list)
dependencies_list = @file_path_utils.form_test_dependencies_filelist(files_list)
dependencies_list.each do |dependencies_file|
if File.exists?(dependencies_file)
@rake_wrapper.load_dependencies(dependencies_file)
end
end
end
def enhance_runner_dependencies(runner_filepath)
@rake_wrapper[runner_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
end
def enhance_shallow_include_lists_dependencies(include_lists)
include_lists.each do |include_list_filepath|
@rake_wrapper[include_list_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
end
end
def enhance_preprocesed_file_dependencies(files)
files.each do |filepath|
@rake_wrapper[filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
end
end
def enhance_mock_dependencies(mocks_list)
# if input configuration or ceedling changes, make sure these guys get rebuilt
mocks_list.each do |mock_filepath|
@rake_wrapper[mock_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
@rake_wrapper[mock_filepath].enhance( @configurator.cmock_unity_helper ) if (@configurator.cmock_unity_helper)
end
end
def enhance_dependencies_dependencies(dependencies)
dependencies.each do |dependencies_filepath|
@rake_wrapper[dependencies_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
end
end
def enhance_test_build_object_dependencies(objects)
objects.each do |object_filepath|
@rake_wrapper[object_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
end
end
def enhance_results_dependencies(result_filepath)
@rake_wrapper[result_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if @project_config_manager.test_config_changed
end
def enhance_test_executable_dependencies(test, objects)
@rake_wrapper[ @file_path_utils.form_test_executable_filepath(test) ].enhance( objects )
end
end

View File

@ -1,9 +0,0 @@
require 'erb'
class ErbWrapper
def generate_file(template, data, output_file)
File.open(output_file, "w") do |f|
f << ERB.new(template, 0, "<>").result(binding)
end
end
end

View File

@ -1,148 +0,0 @@
require 'rubygems'
require 'rake' # for adding ext() method to string
require 'thread'
class FileFinder
SEMAPHORE = Mutex.new
constructor :configurator, :file_finder_helper, :cacheinator, :file_path_utils, :file_wrapper, :yaml_wrapper
def prepare_search_sources
@all_test_source_and_header_file_collection =
@configurator.collection_all_tests +
@configurator.collection_all_source +
@configurator.collection_all_headers
end
def find_header_file(mock_file)
header = File.basename(mock_file).sub(/#{@configurator.cmock_mock_prefix}/, '').ext(@configurator.extension_header)
found_path = @file_finder_helper.find_file_in_collection(header, @configurator.collection_all_headers, :error)
return found_path
end
def find_header_input_for_mock_file(mock_file)
found_path = find_header_file(mock_file)
mock_input = found_path
if (@configurator.project_use_test_preprocessor)
mock_input = @cacheinator.diff_cached_test_file( @file_path_utils.form_preprocessed_file_filepath( found_path ) )
end
return mock_input
end
def find_source_from_test(test, complain)
test_prefix = @configurator.project_test_file_prefix
source_paths = @configurator.collection_all_source
source = File.basename(test).sub(/#{test_prefix}/, '')
# we don't blow up if a test file has no corresponding source file
return @file_finder_helper.find_file_in_collection(source, source_paths, complain)
end
def find_test_from_runner_path(runner_path)
extension_source = @configurator.extension_source
test_file = File.basename(runner_path).sub(/#{@configurator.test_runner_file_suffix}#{'\\'+extension_source}/, extension_source)
found_path = @file_finder_helper.find_file_in_collection(test_file, @configurator.collection_all_tests, :error)
return found_path
end
def find_test_input_for_runner_file(runner_path)
found_path = find_test_from_runner_path(runner_path)
runner_input = found_path
if (@configurator.project_use_test_preprocessor)
runner_input = @cacheinator.diff_cached_test_file( @file_path_utils.form_preprocessed_file_filepath( found_path ) )
end
return runner_input
end
def find_test_from_file_path(file_path)
test_file = File.basename(file_path).ext(@configurator.extension_source)
found_path = @file_finder_helper.find_file_in_collection(test_file, @configurator.collection_all_tests, :error)
return found_path
end
def find_test_or_source_or_header_file(file_path)
file = File.basename(file_path)
return @file_finder_helper.find_file_in_collection(file, @all_test_source_and_header_file_collection, :error)
end
def find_compilation_input_file(file_path, complain=:error, release=false)
found_file = nil
source_file = File.basename(file_path).ext(@configurator.extension_source)
# We only collect files that already exist when we start up.
# FileLists can produce undesired results for dynamically generated files depending on when they're accessed.
# So collect mocks and runners separately and right now.
SEMAPHORE.synchronize {
if (source_file =~ /#{@configurator.test_runner_file_suffix}/)
found_file =
@file_finder_helper.find_file_in_collection(
source_file,
@file_wrapper.directory_listing( File.join(@configurator.project_test_runners_path, '*') ),
complain)
elsif (@configurator.project_use_mocks and (source_file =~ /#{@configurator.cmock_mock_prefix}/))
found_file =
@file_finder_helper.find_file_in_collection(
source_file,
@file_wrapper.directory_listing( File.join(@configurator.cmock_mock_path, '*') ),
complain)
elsif release
found_file =
@file_finder_helper.find_file_in_collection(
source_file,
@configurator.collection_release_existing_compilation_input,
complain)
else
temp_complain = (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY) ? :ignore : complain
found_file =
@file_finder_helper.find_file_in_collection(
source_file,
@configurator.collection_all_existing_compilation_input,
temp_complain)
found_file ||= find_assembly_file(file_path, false) if (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY)
end
}
return found_file
end
def find_source_file(file_path, complain)
source_file = File.basename(file_path).ext(@configurator.extension_source)
return @file_finder_helper.find_file_in_collection(source_file, @configurator.collection_all_source, complain)
end
def find_assembly_file(file_path, complain = :error)
assembly_file = File.basename(file_path).ext(@configurator.extension_assembly)
return @file_finder_helper.find_file_in_collection(assembly_file, @configurator.collection_all_assembly, complain)
end
def find_file_from_list(file_path, file_list, complain)
return @file_finder_helper.find_file_in_collection(file_path, file_list, complain)
end
end

View File

@ -1,54 +0,0 @@
require 'fileutils'
require 'ceedling/constants' # for Verbosity enumeration
class FileFinderHelper
constructor :streaminator
def find_file_in_collection(file_name, file_list, complain, extra_message="")
file_to_find = nil
file_list.each do |item|
base_file = File.basename(item)
# case insensitive comparison
if (base_file.casecmp(file_name) == 0)
# case sensitive check
if (base_file == file_name)
file_to_find = item
break
else
blow_up(file_name, "However, a filename having different capitalization was found: '#{item}'.")
end
end
end
if file_to_find.nil?
case (complain)
when :error then blow_up(file_name, extra_message)
when :warn then gripe(file_name, extra_message)
#when :ignore then
end
end
return file_to_find
end
private
def blow_up(file_name, extra_message="")
error = "ERROR: Found no file '#{file_name}' in search paths."
error += ' ' if (extra_message.length > 0)
@streaminator.stderr_puts(error + extra_message, Verbosity::ERRORS)
raise
end
def gripe(file_name, extra_message="")
warning = "WARNING: Found no file '#{file_name}' in search paths."
warning += ' ' if (extra_message.length > 0)
@streaminator.stderr_puts(warning + extra_message, Verbosity::COMPLAIN)
end
end

View File

@ -1,202 +0,0 @@
require 'rubygems'
require 'rake' # for ext()
require 'fileutils'
require 'ceedling/system_wrapper'
# global utility methods (for plugins, project files, etc.)
def ceedling_form_filepath(destination_path, original_filepath, new_extension=nil)
filename = File.basename(original_filepath)
filename.replace(filename.ext(new_extension)) if (!new_extension.nil?)
return File.join( destination_path.gsub(/\\/, '/'), filename )
end
class FilePathUtils
GLOB_MATCHER = /[\*\?\{\}\[\]]/
constructor :configurator, :file_wrapper
######### class methods ##########
# standardize path to use '/' path separator & have no trailing path separator
def self.standardize(path)
if path.is_a? String
path.strip!
path.gsub!(/\\/, '/')
path.chomp!('/')
end
return path
end
def self.os_executable_ext(executable)
return executable.ext('.exe') if SystemWrapper.windows?
return executable
end
# extract directory path from between optional add/subtract aggregation modifiers and up to glob specifiers
# note: slightly different than File.dirname in that /files/foo remains /files/foo and does not become /files
def self.extract_path(path)
path = path.sub(/^(\+|-):/, '')
# find first occurrence of path separator followed by directory glob specifier: *, ?, {, }, [, ]
find_index = (path =~ GLOB_MATCHER)
# no changes needed (lop off final path separator)
return path.chomp('/') if (find_index.nil?)
# extract up to first glob specifier
path = path[0..(find_index-1)]
# lop off everything up to and including final path separator
find_index = path.rindex('/')
return path[0..(find_index-1)] if (not find_index.nil?)
# return string up to first glob specifier if no path separator found
return path
end
# return whether the given path is to be aggregated (no aggregation modifier defaults to same as +:)
def self.add_path?(path)
return (path =~ /^-:/).nil?
end
# get path (and glob) lopping off optional +: / -: prefixed aggregation modifiers
def self.extract_path_no_aggregation_operators(path)
return path.sub(/^(\+|-):/, '')
end
# all the globs that may be in a path string work fine with one exception;
# to recurse through all subdirectories, the glob is dir/**/** but our paths use
# convention of only dir/**
def self.reform_glob(path)
return path if (path =~ /\/\*\*$/).nil?
return path + '/**'
end
######### instance methods ##########
def form_temp_path(filepath, prefix='')
return File.join( @configurator.project_temp_path, prefix + File.basename(filepath) )
end
### release ###
def form_release_build_cache_path(filepath)
return File.join( @configurator.project_release_build_cache_path, File.basename(filepath) )
end
def form_release_dependencies_filepath(filepath)
return File.join( @configurator.project_release_dependencies_path, File.basename(filepath).ext(@configurator.extension_dependencies) )
end
def form_release_build_c_object_filepath(filepath)
return File.join( @configurator.project_release_build_output_c_path, File.basename(filepath).ext(@configurator.extension_object) )
end
def form_release_build_asm_object_filepath(filepath)
return File.join( @configurator.project_release_build_output_asm_path, File.basename(filepath).ext(@configurator.extension_object) )
end
def form_release_build_c_objects_filelist(files)
return (@file_wrapper.instantiate_file_list(files)).pathmap("#{@configurator.project_release_build_output_c_path}/%n#{@configurator.extension_object}")
end
def form_release_build_asm_objects_filelist(files)
return (@file_wrapper.instantiate_file_list(files)).pathmap("#{@configurator.project_release_build_output_asm_path}/%n#{@configurator.extension_object}")
end
def form_release_build_c_list_filepath(filepath)
return File.join( @configurator.project_release_build_output_c_path, File.basename(filepath).ext(@configurator.extension_list) )
end
def form_release_dependencies_filelist(files)
return (@file_wrapper.instantiate_file_list(files)).pathmap("#{@configurator.project_release_dependencies_path}/%n#{@configurator.extension_dependencies}")
end
### tests ###
def form_test_build_cache_path(filepath)
return File.join( @configurator.project_test_build_cache_path, File.basename(filepath) )
end
def form_test_dependencies_filepath(filepath)
return File.join( @configurator.project_test_dependencies_path, File.basename(filepath).ext(@configurator.extension_dependencies) )
end
def form_pass_results_filepath(filepath)
return File.join( @configurator.project_test_results_path, File.basename(filepath).ext(@configurator.extension_testpass) )
end
def form_fail_results_filepath(filepath)
return File.join( @configurator.project_test_results_path, File.basename(filepath).ext(@configurator.extension_testfail) )
end
def form_runner_filepath_from_test(filepath)
return File.join( @configurator.project_test_runners_path, File.basename(filepath, @configurator.extension_source)) + @configurator.test_runner_file_suffix + @configurator.extension_source
end
def form_test_filepath_from_runner(filepath)
return filepath.sub(/#{TEST_RUNNER_FILE_SUFFIX}/, '')
end
def form_runner_object_filepath_from_test(filepath)
return (form_test_build_c_object_filepath(filepath)).sub(/(#{@configurator.extension_object})$/, "#{@configurator.test_runner_file_suffix}\\1")
end
def form_test_build_c_object_filepath(filepath)
return File.join( @configurator.project_test_build_output_c_path, File.basename(filepath).ext(@configurator.extension_object) )
end
def form_test_build_asm_object_filepath(filepath)
return File.join( @configurator.project_test_build_output_asm_path, File.basename(filepath).ext(@configurator.extension_object) )
end
def form_test_executable_filepath(filepath)
return File.join( @configurator.project_test_build_output_path, File.basename(filepath).ext(@configurator.extension_executable) )
end
def form_test_build_map_filepath(filepath)
return File.join( @configurator.project_test_build_output_path, File.basename(filepath).ext(@configurator.extension_map) )
end
def form_test_build_list_filepath(filepath)
return File.join( @configurator.project_test_build_output_path, File.basename(filepath).ext(@configurator.extension_list) )
end
def form_preprocessed_file_filepath(filepath)
return File.join( @configurator.project_test_preprocess_files_path, File.basename(filepath) )
end
def form_preprocessed_includes_list_filepath(filepath)
return File.join( @configurator.project_test_preprocess_includes_path, File.basename(filepath) )
end
def form_test_build_objects_filelist(sources)
return (@file_wrapper.instantiate_file_list(sources)).pathmap("#{@configurator.project_test_build_output_c_path}/%n#{@configurator.extension_object}")
end
def form_preprocessed_mockable_headers_filelist(mocks)
list = @file_wrapper.instantiate_file_list(mocks)
headers = list.map do |file|
module_name = File.basename(file).sub(/^#{@configurator.cmock_mock_prefix}/, '').sub(/\.[a-zA-Z]+$/,'')
"#{@configurator.project_test_preprocess_files_path}/#{module_name}#{@configurator.extension_header}"
end
return headers
end
def form_mocks_source_filelist(mocks)
list = (@file_wrapper.instantiate_file_list(mocks))
sources = list.map{|file| "#{@configurator.cmock_mock_path}/#{file}#{@configurator.extension_source}"}
return sources
end
def form_test_dependencies_filelist(files)
list = @file_wrapper.instantiate_file_list(files)
return list.pathmap("#{@configurator.project_test_dependencies_path}/%n#{@configurator.extension_dependencies}")
end
def form_pass_results_filelist(path, files)
list = @file_wrapper.instantiate_file_list(files)
return list.pathmap("#{path}/%n#{@configurator.extension_testpass}")
end
end

View File

@ -1,69 +0,0 @@
require 'rubygems'
require 'rake'
require 'set'
require 'fileutils'
require 'ceedling/file_path_utils'
class FileSystemUtils
constructor :file_wrapper
# build up path list from input of one or more strings or arrays of (+/-) paths & globs
def collect_paths(*paths)
raw = [] # all paths and globs
plus = Set.new # all paths to expand and add
minus = Set.new # all paths to remove from plus set
# assemble all globs and simple paths, reforming our glob notation to ruby globs
paths.each do |paths_container|
case (paths_container)
when String then raw << (FilePathUtils::reform_glob(paths_container))
when Array then paths_container.each {|path| raw << (FilePathUtils::reform_glob(path))}
else raise "Don't know how to handle #{paths_container.class}"
end
end
# iterate through each path and glob
raw.each do |path|
dirs = [] # container for only (expanded) paths
# if a glob, expand it and slurp up all non-file paths
if path.include?('*')
# grab base directory only if globs are snug up to final path separator
if (path =~ /\/\*+$/)
dirs << FilePathUtils.extract_path(path)
end
# grab expanded sub-directory globs
expanded = @file_wrapper.directory_listing( FilePathUtils.extract_path_no_aggregation_operators(path) )
expanded.each do |entry|
dirs << entry if @file_wrapper.directory?(entry)
end
# else just grab simple path
# note: we could just run this through glob expansion but such an
# approach doesn't handle a path not yet on disk)
else
dirs << FilePathUtils.extract_path_no_aggregation_operators(path)
end
# add dirs to the appropriate set based on path aggregation modifier if present
FilePathUtils.add_path?(path) ? plus.merge(dirs) : minus.merge(dirs)
end
return (plus - minus).to_a.uniq
end
# given a file list, add to it or remove from it
def revise_file_list(list, revisions)
revisions.each do |revision|
# include or exclude file or glob to file list
file = FilePathUtils.extract_path_no_aggregation_operators( revision )
FilePathUtils.add_path?(revision) ? list.include(file) : list.exclude(file)
end
end
end

View File

@ -1,10 +0,0 @@
class FileSystemWrapper
def cd(path)
FileUtils.cd path do
yield
end
end
end

View File

@ -1,83 +0,0 @@
require 'rubygems'
require 'rake' # for FileList
require 'fileutils'
require 'ceedling/constants'
class FileWrapper
def get_expanded_path(path)
return File.expand_path(path)
end
def basename(path, extension=nil)
return File.basename(path, extension) if extension
return File.basename(path)
end
def exist?(filepath)
return true if (filepath == NULL_FILE_PATH)
return File.exist?(filepath)
end
def directory?(path)
return File.directory?(path)
end
def dirname(path)
return File.dirname(path)
end
def directory_listing(glob)
return Dir.glob(glob, File::FNM_PATHNAME)
end
def rm_f(filepath, options={})
FileUtils.rm_f(filepath, **options)
end
def rm_r(filepath, options={})
FileUtils.rm_r(filepath, **options={})
end
def cp(source, destination, options={})
FileUtils.cp(source, destination, **options)
end
def compare(from, to)
return FileUtils.compare_file(from, to)
end
def open(filepath, flags)
File.open(filepath, flags) do |file|
yield(file)
end
end
def read(filepath)
return File.read(filepath)
end
def touch(filepath, options={})
FileUtils.touch(filepath, **options)
end
def write(filepath, contents, flags='w')
File.open(filepath, flags) do |file|
file.write(contents)
end
end
def readlines(filepath)
return File.readlines(filepath)
end
def instantiate_file_list(files=[])
return FileList.new(files)
end
def mkdir(folder)
return FileUtils.mkdir_p(folder)
end
end

View File

@ -1,74 +0,0 @@
require 'rubygems'
require 'rake' # for ext()
require 'fileutils'
require 'ceedling/constants'
# :flags:
# :release:
# :compile:
# :'test_.+'
# - -pedantic # add '-pedantic' to every test file
# :*: # add '-foo' to compilation of all files not main.c
# - -foo
# :main: # add '-Wall' to compilation of main.c
# - -Wall
# :test:
# :link:
# :test_main: # add '--bar --baz' to linking of test_main.exe
# - --bar
# - --baz
def partition(hash, &predicate)
hash.partition(&predicate).map(&:to_h)
end
class Flaginator
constructor :configurator
def get_flag(hash, file_name)
file_key = file_name.to_sym
# 1. try literals
literals, magic = partition(hash) { |k, v| k.to_s =~ /^\w+$/ }
return literals[file_key] if literals.include?(file_key)
any, regex = partition(magic) { |k, v| (k == :'*') || (k == :'.*') } # glob or regex wild card
# 2. try regexes
find_res = regex.find { |k, v| file_name =~ /^#{k.to_s}$/ }
return find_res[1] if find_res
# 3. try anything
find_res = any.find { |k, v| file_name =~ /.*/ }
return find_res[1] if find_res
# 4. well, we've tried
return []
end
def flag_down( operation, context, file )
# create configurator accessor method
accessor = ('flags_' + context.to_s).to_sym
# create simple filename key from whatever filename provided
file_name = File.basename( file ).ext('')
file_key = File.basename( file ).ext('').to_sym
# if no entry in configuration for flags for this context, bail out
return [] if not @configurator.respond_to?( accessor )
# get flags sub hash associated with this context
flags = @configurator.send( accessor )
# if operation not represented in flags hash, bail out
return [] if not flags.include?( operation )
# redefine flags to sub hash associated with the operation
flags = flags[operation]
return get_flag(flags, file_name)
end
end

View File

@ -1,186 +0,0 @@
require 'ceedling/constants'
class Generator
constructor :configurator,
:generator_helper,
:preprocessinator,
:cmock_builder,
:generator_test_runner,
:generator_test_results,
:flaginator,
:test_includes_extractor,
:tool_executor,
:file_finder,
:file_path_utils,
:streaminator,
:plugin_manager,
:file_wrapper
def generate_shallow_includes_list(context, file)
@streaminator.stdout_puts("Generating include list for #{File.basename(file)}...", Verbosity::NORMAL)
@preprocessinator.preprocess_shallow_includes(file)
end
def generate_preprocessed_file(context, file)
@streaminator.stdout_puts("Preprocessing #{File.basename(file)}...", Verbosity::NORMAL)
@preprocessinator.preprocess_file(file)
end
def generate_dependencies_file(tool, context, source, object, dependencies)
@streaminator.stdout_puts("Generating dependencies for #{File.basename(source)}...", Verbosity::NORMAL)
command =
@tool_executor.build_command_line(
tool,
[], # extra per-file command line parameters
source,
dependencies,
object)
@tool_executor.exec( command[:line], command[:options] )
end
def generate_mock(context, header_filepath)
arg_hash = {:header_file => header_filepath, :context => context}
@plugin_manager.pre_mock_generate( arg_hash )
begin
@cmock_builder.cmock.setup_mocks( arg_hash[:header_file] )
rescue
raise
ensure
@plugin_manager.post_mock_generate( arg_hash )
end
end
# test_filepath may be either preprocessed test file or original test file
def generate_test_runner(context, test_filepath, runner_filepath)
arg_hash = {:context => context, :test_file => test_filepath, :runner_file => runner_filepath}
@plugin_manager.pre_runner_generate(arg_hash)
# collect info we need
module_name = File.basename(arg_hash[:test_file])
test_cases = @generator_test_runner.find_test_cases( @file_finder.find_test_from_runner_path(runner_filepath) )
mock_list = @test_includes_extractor.lookup_raw_mock_list(arg_hash[:test_file])
@streaminator.stdout_puts("Generating runner for #{module_name}...", Verbosity::NORMAL)
test_file_includes = [] # Empty list for now, since apparently unused
# build runner file
begin
@generator_test_runner.generate(module_name, runner_filepath, test_cases, mock_list, test_file_includes)
rescue
raise
ensure
@plugin_manager.post_runner_generate(arg_hash)
end
end
def generate_object_file(tool, operation, context, source, object, list='', dependencies='')
shell_result = {}
arg_hash = {:tool => tool, :operation => operation, :context => context, :source => source, :object => object, :list => list, :dependencies => dependencies}
@plugin_manager.pre_compile_execute(arg_hash)
@streaminator.stdout_puts("Compiling #{File.basename(arg_hash[:source])}...", Verbosity::NORMAL)
command =
@tool_executor.build_command_line( arg_hash[:tool],
@flaginator.flag_down( operation, context, source ),
arg_hash[:source],
arg_hash[:object],
arg_hash[:list],
arg_hash[:dependencies])
@streaminator.stdout_puts("Command: #{command}", Verbosity::DEBUG)
begin
shell_result = @tool_executor.exec( command[:line], command[:options] )
rescue ShellExecutionException => ex
shell_result = ex.shell_result
raise ex
ensure
arg_hash[:shell_command] = command[:line]
arg_hash[:shell_result] = shell_result
@plugin_manager.post_compile_execute(arg_hash)
end
end
def generate_executable_file(tool, context, objects, executable, map='', libraries=[], libpaths=[])
shell_result = {}
arg_hash = { :tool => tool,
:context => context,
:objects => objects,
:executable => executable,
:map => map,
:libraries => libraries,
:libpaths => libpaths
}
@plugin_manager.pre_link_execute(arg_hash)
@streaminator.stdout_puts("Linking #{File.basename(arg_hash[:executable])}...", Verbosity::NORMAL)
command =
@tool_executor.build_command_line( arg_hash[:tool],
@flaginator.flag_down( OPERATION_LINK_SYM, context, executable ),
arg_hash[:objects],
arg_hash[:executable],
arg_hash[:map],
arg_hash[:libraries],
arg_hash[:libpaths]
)
@streaminator.stdout_puts("Command: #{command}", Verbosity::DEBUG)
begin
shell_result = @tool_executor.exec( command[:line], command[:options] )
rescue ShellExecutionException => ex
notice = "\n" +
"NOTICE: If the linker reports missing symbols, the following may be to blame:\n" +
" 1. Test lacks #include statements corresponding to needed source files.\n" +
" 2. Project search paths do not contain source files corresponding to #include statements in the test.\n"
if (@configurator.project_use_mocks)
notice += " 3. Test does not #include needed mocks.\n\n"
else
notice += "\n"
end
@streaminator.stderr_puts(notice, Verbosity::COMPLAIN)
shell_result = ex.shell_result
raise ''
ensure
arg_hash[:shell_result] = shell_result
@plugin_manager.post_link_execute(arg_hash)
end
end
def generate_test_results(tool, context, executable, result)
arg_hash = {:tool => tool, :context => context, :executable => executable, :result_file => result}
@plugin_manager.pre_test_fixture_execute(arg_hash)
@streaminator.stdout_puts("Running #{File.basename(arg_hash[:executable])}...", Verbosity::NORMAL)
# Unity's exit code is equivalent to the number of failed tests, so we tell @tool_executor not to fail out if there are failures
# so that we can run all tests and collect all results
command = @tool_executor.build_command_line(arg_hash[:tool], [], arg_hash[:executable])
@streaminator.stdout_puts("Command: #{command}", Verbosity::DEBUG)
command[:options][:boom] = false
shell_result = @tool_executor.exec( command[:line], command[:options] )
#Don't Let The Failure Count Make Us Believe Things Aren't Working
shell_result[:exit_code] = 0
@generator_helper.test_results_error_handler(executable, shell_result)
processed = @generator_test_results.process_and_write_results( shell_result,
arg_hash[:result_file],
@file_finder.find_test_from_file_path(arg_hash[:executable]) )
arg_hash[:result_file] = processed[:result_file]
arg_hash[:results] = processed[:results]
arg_hash[:shell_result] = shell_result # for raw output display if no plugins for formatted display
@plugin_manager.post_test_fixture_execute(arg_hash)
end
end

View File

@ -1,40 +0,0 @@
require 'ceedling/constants'
class GeneratorHelper
constructor :streaminator
def test_results_error_handler(executable, shell_result)
notice = ''
error = false
if (shell_result[:output].nil? or shell_result[:output].strip.empty?)
error = true
# mirror style of generic tool_executor failure output
notice = "\n" +
"ERROR: Test executable \"#{File.basename(executable)}\" failed.\n" +
"> Produced no output to $stdout.\n"
elsif ((shell_result[:output] =~ TEST_STDOUT_STATISTICS_PATTERN).nil?)
error = true
# mirror style of generic tool_executor failure output
notice = "\n" +
"ERROR: Test executable \"#{File.basename(executable)}\" failed.\n" +
"> Produced no final test result counts in $stdout:\n" +
"#{shell_result[:output].strip}\n"
end
if (error)
# since we told the tool executor to ignore the exit code, handle it explicitly here
notice += "> And exited with status: [#{shell_result[:exit_code]}] (count of failed tests).\n" if (shell_result[:exit_code] != nil)
notice += "> And then likely crashed.\n" if (shell_result[:exit_code] == nil)
notice += "> This is often a symptom of a bad memory access in source or test code.\n\n"
@streaminator.stderr_puts(notice, Verbosity::COMPLAIN)
raise
end
end
end

View File

@ -1,100 +0,0 @@
require 'rubygems'
require 'rake' # for .ext()
require 'ceedling/constants'
class GeneratorTestResults
constructor :configurator, :generator_test_results_sanity_checker, :yaml_wrapper
def process_and_write_results(unity_shell_result, results_file, test_file)
output_file = results_file
results = get_results_structure
results[:source][:path] = File.dirname(test_file)
results[:source][:file] = File.basename(test_file)
results[:time] = unity_shell_result[:time] unless unity_shell_result[:time].nil?
# process test statistics
if (unity_shell_result[:output] =~ TEST_STDOUT_STATISTICS_PATTERN)
results[:counts][:total] = $1.to_i
results[:counts][:failed] = $2.to_i
results[:counts][:ignored] = $3.to_i
results[:counts][:passed] = (results[:counts][:total] - results[:counts][:failed] - results[:counts][:ignored])
end
# remove test statistics lines
output_string = unity_shell_result[:output].sub(TEST_STDOUT_STATISTICS_PATTERN, '')
output_string.lines do |line|
# process unity output
case line
when /(:IGNORE)/
elements = extract_line_elements(line, results[:source][:file])
results[:ignores] << elements[0]
results[:stdout] << elements[1] if (!elements[1].nil?)
when /(:PASS$)/
elements = extract_line_elements(line, results[:source][:file])
results[:successes] << elements[0]
results[:stdout] << elements[1] if (!elements[1].nil?)
when /(:PASS \(.* ms\)$)/
elements = extract_line_elements(line, results[:source][:file])
results[:successes] << elements[0]
results[:stdout] << elements[1] if (!elements[1].nil?)
when /(:FAIL)/
elements = extract_line_elements(line, results[:source][:file])
results[:failures] << elements[0]
results[:stdout] << elements[1] if (!elements[1].nil?)
else # collect up all other
results[:stdout] << line.chomp
end
end
@generator_test_results_sanity_checker.verify(results, unity_shell_result[:exit_code])
output_file = results_file.ext(@configurator.extension_testfail) if (results[:counts][:failed] > 0)
@yaml_wrapper.dump(output_file, results)
return { :result_file => output_file, :result => results }
end
private
def get_results_structure
return {
:source => {:path => '', :file => ''},
:successes => [],
:failures => [],
:ignores => [],
:counts => {:total => 0, :passed => 0, :failed => 0, :ignored => 0},
:stdout => [],
:time => 0.0
}
end
def extract_line_elements(line, filename)
# handle anything preceding filename in line as extra output to be collected
stdout = nil
stdout_regex = /(.+)#{Regexp.escape(filename)}.+/i
unity_test_time = 0
if (line =~ stdout_regex)
stdout = $1.clone
line.sub!(/#{Regexp.escape(stdout)}/, '')
end
# collect up test results minus and extra output
elements = (line.strip.split(':'))[1..-1]
# find timestamp if available
if (elements[-1] =~ / \((\d*(?:\.\d*)?) ms\)/)
unity_test_time = $1.to_f / 1000
elements[-1].sub!(/ \((\d*(?:\.\d*)?) ms\)/, '')
end
return {:test => elements[1], :line => elements[0].to_i, :message => (elements[3..-1].join(':')).strip, :unity_test_time => unity_test_time}, stdout if elements.size >= 3
return {:test => '???', :line => -1, :message => nil, :unity_test_time => unity_test_time} #fallback safe option. TODO better handling
end
end

View File

@ -1,65 +0,0 @@
require 'rubygems'
require 'rake' # for ext() method
require 'ceedling/constants'
class GeneratorTestResultsSanityChecker
constructor :configurator, :streaminator
def verify(results, unity_exit_code)
# do no sanity checking if it's disabled
return if (@configurator.sanity_checks == TestResultsSanityChecks::NONE)
raise "results nil or empty" if results.nil? || results.empty?
ceedling_ignores_count = results[:ignores].size
ceedling_failures_count = results[:failures].size
ceedling_tests_summation = (ceedling_ignores_count + ceedling_failures_count + results[:successes].size)
# Exit code handling is not a sanity check that can always be performed because
# command line simulators may or may not pass through Unity's exit code
if (@configurator.sanity_checks >= TestResultsSanityChecks::THOROUGH)
# many platforms limit exit codes to a maximum of 255
if ((ceedling_failures_count != unity_exit_code) and (unity_exit_code < 255))
sanity_check_warning(results[:source][:file], "Unity's exit code (#{unity_exit_code}) does not match Ceedling's summation of failed test cases (#{ceedling_failures_count}).")
end
if ((ceedling_failures_count < 255) and (unity_exit_code == 255))
sanity_check_warning(results[:source][:file], "Ceedling's summation of failed test cases (#{ceedling_failures_count}) is less than Unity's exit code (255 or more).")
end
end
if (ceedling_ignores_count != results[:counts][:ignored])
sanity_check_warning(results[:source][:file], "Unity's final ignore count (#{results[:counts][:ignored]}) does not match Ceedling's summation of ignored test cases (#{ceedling_ignores_count}).")
end
if (ceedling_failures_count != results[:counts][:failed])
sanity_check_warning(results[:source][:file], "Unity's final fail count (#{results[:counts][:failed]}) does not match Ceedling's summation of failed test cases (#{ceedling_failures_count}).")
end
if (ceedling_tests_summation != results[:counts][:total])
sanity_check_warning(results[:source][:file], "Unity's final test count (#{results[:counts][:total]}) does not match Ceedling's summation of all test cases (#{ceedling_tests_summation}).")
end
end
private
def sanity_check_warning(file, message)
unless defined?(CEEDLING_IGNORE_SANITY_CHECK)
notice = "\n" +
"ERROR: Internal sanity check for test fixture '#{file.ext(@configurator.extension_executable)}' finds that #{message}\n" +
" Possible causes:\n" +
" 1. Your test + source dereferenced a null pointer.\n" +
" 2. Your test + source indexed past the end of a buffer.\n" +
" 3. Your test + source committed a memory access violation.\n" +
" 4. Your test fixture produced an exit code of 0 despite execution ending prematurely.\n" +
" Sanity check failures of test results are usually a symptom of interrupted test execution.\n\n"
@streaminator.stderr_puts( notice )
raise
end
end
end

View File

@ -1,58 +0,0 @@
class GeneratorTestRunner
constructor :configurator, :file_path_utils, :file_wrapper
def find_test_cases(test_file)
#Pull in Unity's Test Runner Generator
require 'generate_test_runner.rb'
@test_runner_generator ||= UnityTestRunnerGenerator.new( @configurator.get_runner_config )
if (@configurator.project_use_test_preprocessor)
#redirect to use the preprocessor file if we're doing that sort of thing
pre_test_file = @file_path_utils.form_preprocessed_file_filepath(test_file)
#actually look for the tests using Unity's test runner generator
contents = @file_wrapper.read(pre_test_file)
tests_and_line_numbers = @test_runner_generator.find_tests(contents)
@test_runner_generator.find_setup_and_teardown(contents)
#look up the line numbers in the original file
source_lines = @file_wrapper.read(test_file).split("\n")
source_index = 0;
tests_and_line_numbers.size.times do |i|
source_lines[source_index..-1].each_with_index do |line, index|
if (line =~ /#{tests_and_line_numbers[i][:test]}/)
source_index += index
tests_and_line_numbers[i][:line_number] = source_index + 1
break
end
end
end
else
#Just look for the tests using Unity's test runner generator
contents = @file_wrapper.read(test_file)
tests_and_line_numbers = @test_runner_generator.find_tests(contents)
@test_runner_generator.find_setup_and_teardown(contents)
end
return tests_and_line_numbers
end
def generate(module_name, runner_filepath, test_cases, mock_list, test_file_includes=[])
require 'generate_test_runner.rb'
header_extension = @configurator.extension_header
#actually build the test runner using Unity's test runner generator
#(there is no need to use preprocessor here because we've already looked up test cases and are passing them in here)
@test_runner_generator ||= UnityTestRunnerGenerator.new( @configurator.get_runner_config )
@test_runner_generator.generate( module_name,
runner_filepath,
test_cases,
mock_list.map{|f| File.basename(f,'.*')+header_extension},
test_file_includes.map{|f| File.basename(f,'.*')+header_extension})
end
end

View File

@ -1,31 +0,0 @@
class Loginator
constructor :configurator, :project_file_loader, :project_config_manager, :file_wrapper, :system_wrapper
def setup_log_filepath
config_files = []
config_files << @project_file_loader.main_file
config_files << @project_file_loader.user_file
config_files.concat( @project_config_manager.options_files )
config_files.compact!
config_files.map! { |file| file.ext('') }
log_name = config_files.join( '_' )
@project_log_filepath = File.join( @configurator.project_log_path, log_name.ext('.log') )
end
def log(string, heading=nil)
return if (not @configurator.project_logging)
output = "\n[#{@system_wrapper.time_now}]"
output += " :: #{heading}" if (not heading.nil?)
output += "\n#{string.strip}\n"
@file_wrapper.write(@project_log_filepath, output, 'a')
end
end

View File

@ -1,46 +0,0 @@
# modified version of Rake's provided make-style dependency loader
# customizations:
# (1) handles windows drives in paths -- colons don't confuse task demarcation
# (2) handles spaces in directory paths
module Rake
# Makefile loader to be used with the import file loader.
class MakefileLoader
# Load the makefile dependencies in +fn+.
def load(fn)
open(fn) do |mf|
lines = mf.read
lines.gsub!(/#[^\n]*\n/m, "") # remove comments
lines.gsub!(/\\\n/, ' ') # string together line continuations into single line
lines.split("\n").each do |line|
process_line(line)
end
end
end
private
# Process one logical line of makefile data.
def process_line(line)
# split on presence of task demaractor followed by space (i.e don't get confused by a colon in a win path)
file_tasks, args = line.split(/:\s/)
return if args.nil?
# split at non-escaped space boundary between files (i.e. escaped spaces in paths are left alone)
dependents = args.split(/\b\s+/)
# replace escaped spaces and clean up any extra whitespace
dependents.map! { |path| path.gsub(/\\ /, ' ').strip }
file_tasks.strip.split.each do |file_task|
file file_task => dependents
end
end
end
# Install the handler
Rake.application.add_loader('mf', MakefileLoader.new)
end

View File

@ -1,313 +0,0 @@
file_wrapper:
file_system_wrapper:
stream_wrapper:
rake_wrapper:
yaml_wrapper:
system_wrapper:
cmock_builder:
reportinator:
rake_utils:
compose:
- rake_wrapper
system_utils:
compose:
- system_wrapper
file_path_utils:
compose:
- configurator
- file_wrapper
file_system_utils:
compose: file_wrapper
project_file_loader:
compose:
- yaml_wrapper
- stream_wrapper
- system_wrapper
- file_wrapper
project_config_manager:
compose:
- cacheinator
- configurator
- yaml_wrapper
- file_wrapper
cacheinator:
compose:
- cacheinator_helper
- file_path_utils
- file_wrapper
- yaml_wrapper
cacheinator_helper:
compose:
- file_wrapper
- yaml_wrapper
tool_executor:
compose:
- configurator
- tool_executor_helper
- streaminator
- system_wrapper
tool_executor_helper:
compose:
- streaminator
- system_utils
- system_wrapper
configurator:
compose:
- configurator_setup
- configurator_plugins
- configurator_builder
- cmock_builder
- yaml_wrapper
- system_wrapper
configurator_setup:
compose:
- configurator_builder
- configurator_validator
- configurator_plugins
- stream_wrapper
configurator_plugins:
compose:
- stream_wrapper
- file_wrapper
- system_wrapper
configurator_validator:
compose:
- file_wrapper
- stream_wrapper
- system_wrapper
configurator_builder:
compose:
- file_system_utils
- file_wrapper
- system_wrapper
loginator:
compose:
- configurator
- project_file_loader
- project_config_manager
- file_wrapper
- system_wrapper
streaminator:
compose:
- streaminator_helper
- verbosinator
- loginator
- stream_wrapper
streaminator_helper:
setupinator:
plugin_builder:
plugin_manager:
compose:
- configurator
- plugin_manager_helper
- streaminator
- reportinator
- system_wrapper
plugin_manager_helper:
plugin_reportinator:
compose:
- plugin_reportinator_helper
- plugin_manager
- reportinator
plugin_reportinator_helper:
compose:
- configurator
- streaminator
- yaml_wrapper
- file_wrapper
verbosinator:
compose: configurator
file_finder:
compose:
- configurator
- file_finder_helper
- cacheinator
- file_path_utils
- file_wrapper
- yaml_wrapper
file_finder_helper:
compose: streaminator
test_includes_extractor:
compose:
- configurator
- yaml_wrapper
- file_wrapper
task_invoker:
compose:
- dependinator
- rake_utils
- rake_wrapper
- project_config_manager
flaginator:
compose:
- configurator
generator:
compose:
- configurator
- generator_helper
- preprocessinator
- cmock_builder
- generator_test_runner
- generator_test_results
- flaginator
- test_includes_extractor
- tool_executor
- file_finder
- file_path_utils
- streaminator
- plugin_manager
- file_wrapper
generator_helper:
compose:
- streaminator
generator_test_results:
compose:
- configurator
- generator_test_results_sanity_checker
- yaml_wrapper
generator_test_results_sanity_checker:
compose:
- configurator
- streaminator
generator_test_runner:
compose:
- configurator
- file_path_utils
- file_wrapper
dependinator:
compose:
- configurator
- project_config_manager
- test_includes_extractor
- file_path_utils
- rake_wrapper
- file_wrapper
preprocessinator:
compose:
- preprocessinator_helper
- preprocessinator_includes_handler
- preprocessinator_file_handler
- task_invoker
- file_path_utils
- yaml_wrapper
- project_config_manager
- configurator
preprocessinator_helper:
compose:
- configurator
- test_includes_extractor
- task_invoker
- file_finder
- file_path_utils
preprocessinator_includes_handler:
compose:
- configurator
- tool_executor
- task_invoker
- file_path_utils
- yaml_wrapper
- file_wrapper
- file_finder
preprocessinator_file_handler:
compose:
- preprocessinator_extractor
- configurator
- tool_executor
- file_path_utils
- file_wrapper
preprocessinator_extractor:
test_invoker:
compose:
- configurator
- test_invoker_helper
- plugin_manager
- streaminator
- preprocessinator
- task_invoker
- dependinator
- project_config_manager
- build_invoker_utils
- file_path_utils
- file_wrapper
test_invoker_helper:
compose:
- configurator
- task_invoker
- test_includes_extractor
- file_finder
- file_path_utils
- file_wrapper
release_invoker:
compose:
- configurator
- release_invoker_helper
- build_invoker_utils
- dependinator
- task_invoker
- file_path_utils
- file_wrapper
release_invoker_helper:
compose:
- configurator
- dependinator
- task_invoker
build_invoker_utils:
compose:
- configurator
- streaminator
erb_wrapper:

View File

@ -1,18 +0,0 @@
def par_map(n, things, &block)
queue = Queue.new
things.each { |thing| queue << thing }
threads = (1..n).collect do
Thread.new do
begin
while true
yield queue.pop(true)
end
rescue ThreadError
end
end
end
threads.each { |t| t.join }
end

View File

@ -1,80 +0,0 @@
class String
# reformat a multiline string to have given number of whitespace columns;
# helpful for formatting heredocs
def left_margin(margin=0)
non_whitespace_column = 0
new_lines = []
# find first line with non-whitespace and count left columns of whitespace
self.each_line do |line|
if (line =~ /^\s*\S/)
non_whitespace_column = $&.length - 1
break
end
end
# iterate through each line, chopping off leftmost whitespace columns and add back the desired whitespace margin
self.each_line do |line|
columns = []
margin.times{columns << ' '}
# handle special case of line being narrower than width to be lopped off
if (non_whitespace_column < line.length)
new_lines << "#{columns.join}#{line[non_whitespace_column..-1]}"
else
new_lines << "\n"
end
end
return new_lines.join
end
end
class Plugin
attr_reader :name, :environment
attr_accessor :plugin_objects
def initialize(system_objects, name)
@environment = []
@ceedling = system_objects
@name = name
self.setup
end
def setup; end
# mock generation
def pre_mock_generate(arg_hash); end
def post_mock_generate(arg_hash); end
# test runner generation
def pre_runner_generate(arg_hash); end
def post_runner_generate(arg_hash); end
# compilation (test or source)
def pre_compile_execute(arg_hash); end
def post_compile_execute(arg_hash); end
# linking (test or source)
def pre_link_execute(arg_hash); end
def post_link_execute(arg_hash); end
# test fixture execution
def pre_test_fixture_execute(arg_hash); end
def post_test_fixture_execute(arg_hash); end
# test task
def pre_test(test); end
def post_test(test); end
# release task
def pre_release; end
def post_release; end
# whole shebang (any use of Ceedling)
def pre_build; end
def post_build; end
def summary; end
end

View File

@ -1,53 +0,0 @@
require 'ceedling/plugin'
class PluginBuilder
attr_accessor :plugin_objects
def construct_plugin(plugin_name, object_map_yaml, system_objects)
# @streaminator.stdout_puts("Constructing plugin #{plugin_name}...", Verbosity::OBNOXIOUS)
object_map = {}
@plugin_objects = {}
@system_objects = system_objects
if object_map_yaml
@object_map = YAML.load(object_map_yaml)
@object_map.each_key do |obj|
construct_object(obj)
end
else
raise "Invalid object map for plugin #{plugin_name}!"
end
return @plugin_objects
end
private
def camelize(underscored_name)
return underscored_name.gsub(/(_|^)([a-z0-9])/) {$2.upcase}
end
def construct_object(obj)
if @plugin_objects[obj].nil?
if @object_map[obj] && @object_map[obj]['compose']
@object_map[obj]['compose'].each do |dep|
construct_object(dep)
end
end
build_object(obj)
end
end
def build_object(new_object)
if @plugin_objects[new_object.to_sym].nil?
# @streaminator.stdout_puts("Building plugin object #{new_object}", Verbosity::OBNOXIOUS)
require new_object
class_name = camelize(new_object)
new_instance = eval("#{class_name}.new(@system_objects, class_name.to_s)")
new_instance.plugin_objects = @plugin_objects
@plugin_objects[new_object.to_sym] = new_instance
end
end
end

View File

@ -1,107 +0,0 @@
require 'ceedling/constants'
class PluginManager
constructor :configurator, :plugin_manager_helper, :streaminator, :reportinator, :system_wrapper
def setup
@build_fail_registry = []
@plugin_objects = [] # so we can preserve order
end
def load_plugin_scripts(script_plugins, system_objects)
environment = []
script_plugins.each do |plugin|
# protect against instantiating object multiple times due to processing config multiple times (option files, etc)
next if (@plugin_manager_helper.include?(@plugin_objects, plugin))
begin
@system_wrapper.require_file( "#{plugin}.rb" )
object = @plugin_manager_helper.instantiate_plugin_script( camelize(plugin), system_objects, plugin )
@plugin_objects << object
environment += object.environment
# add plugins to hash of all system objects
system_objects[plugin.downcase.to_sym] = object
rescue
puts "Exception raised while trying to load plugin: #{plugin}"
raise
end
end
yield( { :environment => environment } ) if (environment.size > 0)
end
def plugins_failed?
return (@build_fail_registry.size > 0)
end
def print_plugin_failures
if (@build_fail_registry.size > 0)
report = @reportinator.generate_banner('BUILD FAILURE SUMMARY')
@build_fail_registry.each do |failure|
report += "#{' - ' if (@build_fail_registry.size > 1)}#{failure}\n"
end
report += "\n"
@streaminator.stderr_puts(report, Verbosity::ERRORS)
end
end
def register_build_failure(message)
@build_fail_registry << message if (message and not message.empty?)
end
#### execute all plugin methods ####
def pre_mock_generate(arg_hash); execute_plugins(:pre_mock_generate, arg_hash); end
def post_mock_generate(arg_hash); execute_plugins(:post_mock_generate, arg_hash); end
def pre_runner_generate(arg_hash); execute_plugins(:pre_runner_generate, arg_hash); end
def post_runner_generate(arg_hash); execute_plugins(:post_runner_generate, arg_hash); end
def pre_compile_execute(arg_hash); execute_plugins(:pre_compile_execute, arg_hash); end
def post_compile_execute(arg_hash); execute_plugins(:post_compile_execute, arg_hash); end
def pre_link_execute(arg_hash); execute_plugins(:pre_link_execute, arg_hash); end
def post_link_execute(arg_hash); execute_plugins(:post_link_execute, arg_hash); end
def pre_test_fixture_execute(arg_hash); execute_plugins(:pre_test_fixture_execute, arg_hash); end
def post_test_fixture_execute(arg_hash)
# special arbitration: raw test results are printed or taken over by plugins handling the job
@streaminator.stdout_puts(arg_hash[:shell_result][:output]) if (@configurator.plugins_display_raw_test_results)
execute_plugins(:post_test_fixture_execute, arg_hash)
end
def pre_test(test); execute_plugins(:pre_test, test); end
def post_test(test); execute_plugins(:post_test, test); end
def pre_release; execute_plugins(:pre_release); end
def post_release; execute_plugins(:post_release); end
def pre_build; execute_plugins(:pre_build); end
def post_build; execute_plugins(:post_build); end
def post_error; execute_plugins(:post_error); end
def summary; execute_plugins(:summary); end
private ####################################
def camelize(underscored_name)
return underscored_name.gsub(/(_|^)([a-z0-9])/) {$2.upcase}
end
def execute_plugins(method, *args)
@plugin_objects.each do |plugin|
begin
plugin.send(method, *args) if plugin.respond_to?(method)
rescue
puts "Exception raised in plugin: #{plugin.name}, in method #{method}"
raise
end
end
end
end

View File

@ -1,19 +0,0 @@
class PluginManagerHelper
def include?(plugins, name)
include = false
plugins.each do |plugin|
if (plugin.name == name)
include = true
break
end
end
return include
end
def instantiate_plugin_script(plugin, system_objects, name)
return eval("#{plugin}.new(system_objects, name)")
end
end

View File

@ -1,76 +0,0 @@
require 'ceedling/constants'
require 'ceedling/defaults'
class PluginReportinator
constructor :plugin_reportinator_helper, :plugin_manager, :reportinator
def setup
@test_results_template = nil
end
def set_system_objects(system_objects)
@plugin_reportinator_helper.ceedling = system_objects
end
def fetch_results(results_path, test, options={:boom => false})
return @plugin_reportinator_helper.fetch_results( File.join(results_path, test), options )
end
def generate_banner(message)
return @reportinator.generate_banner(message)
end
def assemble_test_results(results_list, options={:boom => false})
aggregated_results = get_results_structure
results_list.each do |result_path|
results = @plugin_reportinator_helper.fetch_results( result_path, options )
@plugin_reportinator_helper.process_results(aggregated_results, results)
end
return aggregated_results
end
def register_test_results_template(template)
@test_results_template = template if (@test_results_template.nil?)
end
def run_test_results_report(hash, verbosity=Verbosity::NORMAL, &block)
run_report( $stdout,
((@test_results_template.nil?) ? DEFAULT_TESTS_RESULTS_REPORT_TEMPLATE : @test_results_template),
hash,
verbosity,
&block )
end
def run_report(stream, template, hash=nil, verbosity=Verbosity::NORMAL)
failure = nil
failure = yield() if block_given?
@plugin_manager.register_build_failure( failure )
@plugin_reportinator_helper.run_report( stream, template, hash, verbosity )
end
private ###############################
def get_results_structure
return {
:successes => [],
:failures => [],
:ignores => [],
:stdout => [],
:counts => {:total => 0, :passed => 0, :failed => 0, :ignored => 0, :stdout => 0},
:time => 0.0
}
end
end

View File

@ -1,51 +0,0 @@
require 'erb'
require 'rubygems'
require 'rake' # for ext()
require 'ceedling/constants'
class PluginReportinatorHelper
attr_writer :ceedling
constructor :configurator, :streaminator, :yaml_wrapper, :file_wrapper
def fetch_results(results_path, options)
pass_path = File.join(results_path.ext( @configurator.extension_testpass ))
fail_path = File.join(results_path.ext( @configurator.extension_testfail ))
if (@file_wrapper.exist?(fail_path))
return @yaml_wrapper.load(fail_path)
elsif (@file_wrapper.exist?(pass_path))
return @yaml_wrapper.load(pass_path)
else
if (options[:boom])
@streaminator.stderr_puts("Could find no test results for '#{File.basename(results_path).ext(@configurator.extension_source)}'", Verbosity::ERRORS)
raise
end
end
return {}
end
def process_results(aggregate_results, results)
return if (results.empty?)
aggregate_results[:successes] << { :source => results[:source].clone, :collection => results[:successes].clone } if (results[:successes].size > 0)
aggregate_results[:failures] << { :source => results[:source].clone, :collection => results[:failures].clone } if (results[:failures].size > 0)
aggregate_results[:ignores] << { :source => results[:source].clone, :collection => results[:ignores].clone } if (results[:ignores].size > 0)
aggregate_results[:stdout] << { :source => results[:source].clone, :collection => results[:stdout].clone } if (results[:stdout].size > 0)
aggregate_results[:counts][:total] += results[:counts][:total]
aggregate_results[:counts][:passed] += results[:counts][:passed]
aggregate_results[:counts][:failed] += results[:counts][:failed]
aggregate_results[:counts][:ignored] += results[:counts][:ignored]
aggregate_results[:counts][:stdout] += results[:stdout].size
aggregate_results[:time] += results[:time]
end
def run_report(stream, template, hash, verbosity)
output = ERB.new(template, 0, "%<>")
@streaminator.stream_puts(stream, output.result(binding()), verbosity)
end
end

View File

@ -1,56 +0,0 @@
class Preprocessinator
constructor :preprocessinator_helper, :preprocessinator_includes_handler, :preprocessinator_file_handler, :task_invoker, :file_path_utils, :yaml_wrapper, :project_config_manager, :configurator
def setup
# fashion ourselves callbacks @preprocessinator_helper can use
@preprocess_includes_proc = Proc.new { |filepath| self.preprocess_shallow_includes(filepath) }
@preprocess_mock_file_proc = Proc.new { |filepath| self.preprocess_file(filepath) }
@preprocess_test_file_directives_proc = Proc.new { |filepath| self.preprocess_file_directives(filepath) }
@preprocess_test_file_proc = Proc.new { |filepath| self.preprocess_file(filepath) }
end
def preprocess_shallow_source_includes(test)
@preprocessinator_helper.preprocess_source_includes(test)
end
def preprocess_test_and_invoke_test_mocks(test)
@preprocessinator_helper.preprocess_includes(test, @preprocess_includes_proc)
mocks_list = @preprocessinator_helper.assemble_mocks_list(test)
@project_config_manager.process_test_defines_change(mocks_list)
@preprocessinator_helper.preprocess_mockable_headers(mocks_list, @preprocess_mock_file_proc)
@task_invoker.invoke_test_mocks(mocks_list)
if (@configurator.project_use_preprocessor_directives)
@preprocessinator_helper.preprocess_test_file(test, @preprocess_test_file_directives_proc)
else
@preprocessinator_helper.preprocess_test_file(test, @preprocess_test_file_proc)
end
return mocks_list
end
def preprocess_shallow_includes(filepath)
includes = @preprocessinator_includes_handler.extract_includes(filepath)
@preprocessinator_includes_handler.write_shallow_includes_list(
@file_path_utils.form_preprocessed_includes_list_filepath(filepath), includes)
end
def preprocess_file(filepath)
@preprocessinator_includes_handler.invoke_shallow_includes_list(filepath)
@preprocessinator_file_handler.preprocess_file( filepath, @yaml_wrapper.load(@file_path_utils.form_preprocessed_includes_list_filepath(filepath)) )
end
def preprocess_file_directives(filepath)
@preprocessinator_includes_handler.invoke_shallow_includes_list( filepath )
@preprocessinator_file_handler.preprocess_file_directives( filepath,
@yaml_wrapper.load( @file_path_utils.form_preprocessed_includes_list_filepath( filepath ) ) )
end
end

View File

@ -1,55 +0,0 @@
class PreprocessinatorExtractor
def extract_base_file_from_preprocessed_expansion(filepath)
# preprocessing by way of toolchain preprocessor expands macros, eliminates
# comments, strips out #ifdef code, etc. however, it also expands in place
# each #include'd file. so, we must extract only the lines of the file
# that belong to the file originally preprocessed
# iterate through all lines and alternate between extract and ignore modes
# all lines between a '#'line containing file name of our filepath and the
# next '#'line should be extracted
base_name = File.basename(filepath)
not_pragma = /^#(?!pragma\b)/ # preprocessor directive that's not a #pragma
pattern = /^#.*(\s|\/|\\|\")#{Regexp.escape(base_name)}/
found_file = false # have we found the file we care about?
lines = []
File.readlines(filepath).each do |line|
line.encode!('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: '')
if found_file and not line =~ not_pragma
lines << line
else
found_file = false
end
found_file = true if line =~ pattern
end
return lines
end
def extract_base_file_from_preprocessed_directives(filepath)
# preprocessing by way of toolchain preprocessor eliminates directives only
# like #ifdef's and leave other code
# iterate through all lines and only get last chunk of file after a last
# '#'line containing file name of our filepath
base_name = File.basename(filepath)
pattern = /^#.*(\s|\/|\\|\")#{Regexp.escape(base_name)}/
found_file = false # have we found the file we care about?
lines = []
File.readlines(filepath).each do |line|
line.encode!('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: '')
lines << line
if line =~ pattern
lines = []
end
end
return lines
end
end

View File

@ -1,34 +0,0 @@
class PreprocessinatorFileHandler
constructor :preprocessinator_extractor, :configurator, :tool_executor, :file_path_utils, :file_wrapper
def preprocess_file(filepath, includes)
preprocessed_filepath = @file_path_utils.form_preprocessed_file_filepath(filepath)
command = @tool_executor.build_command_line(@configurator.tools_test_file_preprocessor, [], filepath, preprocessed_filepath)
@tool_executor.exec(command[:line], command[:options])
contents = @preprocessinator_extractor.extract_base_file_from_preprocessed_expansion(preprocessed_filepath)
includes.each{|include| contents.unshift("#include \"#{include}\"")}
@file_wrapper.write(preprocessed_filepath, contents.join("\n"))
end
def preprocess_file_directives(filepath, includes)
preprocessed_filepath = @file_path_utils.form_preprocessed_file_filepath(filepath)
command = @tool_executor.build_command_line(@configurator.tools_test_file_preprocessor_directives, [], filepath, preprocessed_filepath)
@tool_executor.exec(command[:line], command[:options])
contents = @preprocessinator_extractor.extract_base_file_from_preprocessed_directives(preprocessed_filepath)
includes.each{|include| contents.unshift("#include \"#{include}\"")}
@file_wrapper.write(preprocessed_filepath, contents.join("\n"))
end
end

View File

@ -1,50 +0,0 @@
class PreprocessinatorHelper
constructor :configurator, :test_includes_extractor, :task_invoker, :file_finder, :file_path_utils
def preprocess_includes(test, preprocess_includes_proc)
if (@configurator.project_use_test_preprocessor)
preprocessed_includes_list = @file_path_utils.form_preprocessed_includes_list_filepath(test)
preprocess_includes_proc.call( @file_finder.find_test_from_file_path(preprocessed_includes_list) )
@test_includes_extractor.parse_includes_list(preprocessed_includes_list)
else
@test_includes_extractor.parse_test_file(test)
end
end
def preprocess_source_includes(test)
@test_includes_extractor.parse_test_file_source_include(test)
end
def assemble_mocks_list(test)
return @file_path_utils.form_mocks_source_filelist( @test_includes_extractor.lookup_raw_mock_list(test) )
end
def preprocess_mockable_headers(mock_list, preprocess_file_proc)
if (@configurator.project_use_test_preprocessor)
preprocess_files_smartly(
@file_path_utils.form_preprocessed_mockable_headers_filelist(mock_list),
preprocess_file_proc ) { |file| @file_finder.find_header_file(file) }
end
end
def preprocess_test_file(test, preprocess_file_proc)
return if (!@configurator.project_use_test_preprocessor)
preprocess_file_proc.call(test)
end
private ############################
def preprocess_files_smartly(file_list, preprocess_file_proc)
if (@configurator.project_use_deep_dependencies)
@task_invoker.invoke_test_preprocessed_files(file_list)
else
file_list.each { |file| preprocess_file_proc.call( yield(file) ) }
end
end
end

View File

@ -1,189 +0,0 @@
class PreprocessinatorIncludesHandler
constructor :configurator, :tool_executor, :task_invoker, :file_path_utils, :yaml_wrapper, :file_wrapper, :file_finder
@@makefile_cache = {}
# shallow includes: only those headers a source file explicitly includes
def invoke_shallow_includes_list(filepath)
@task_invoker.invoke_test_shallow_include_lists( [@file_path_utils.form_preprocessed_includes_list_filepath(filepath)] )
end
##
# Ask the preprocessor for a make-style dependency rule of only the headers
# the source file immediately includes.
#
# === Arguments
# +filepath+ _String_:: Path to the test file to process.
#
# === Return
# _String_:: The text of the dependency rule generated by the preprocessor.
def form_shallow_dependencies_rule(filepath)
if @@makefile_cache.has_key?(filepath)
return @@makefile_cache[filepath]
end
# change filename (prefix of '_') to prevent preprocessor from finding
# include files in temp directory containing file it's scanning
temp_filepath = @file_path_utils.form_temp_path(filepath, '_')
# read the file and replace all include statements with a decorated version
# (decorating the names creates file names that don't exist, thus preventing
# the preprocessor from snaking out and discovering the entire include path
# that winds through the code). The decorated filenames indicate files that
# are included directly by the test file.
contents = @file_wrapper.read(filepath)
if !contents.valid_encoding?
contents = contents.encode("UTF-16be", :invalid=>:replace, :replace=>"?").encode('UTF-8')
end
contents.gsub!( /^\s*#include\s+[\"<]\s*(\S+)\s*[\">]/, "#include \"\\1\"\n#include \"@@@@\\1\"" )
contents.gsub!( /^\s*TEST_FILE\(\s*\"\s*(\S+)\s*\"\s*\)/, "#include \"\\1\"\n#include \"@@@@\\1\"")
@file_wrapper.write( temp_filepath, contents )
# extract the make-style dependency rule telling the preprocessor to
# ignore the fact that it can't find the included files
command = @tool_executor.build_command_line(@configurator.tools_test_includes_preprocessor, [], temp_filepath)
shell_result = @tool_executor.exec(command[:line], command[:options])
@@makefile_cache[filepath] = shell_result[:output]
return shell_result[:output]
end
##
# Extract the headers that are directly included by a source file using the
# provided, annotated Make dependency rule.
#
# === Arguments
# +filepath+ _String_:: C source or header file to extract includes for.
#
# === Return
# _Array_ of _String_:: Array of the direct dependencies for the source file.
def extract_includes(filepath)
to_process = [filepath]
ignore_list = []
list = []
all_mocks = []
include_paths = @configurator.project_config_hash[:collection_paths_include]
include_paths = [] if include_paths.nil?
include_paths.map! {|path| File.expand_path(path)}
while to_process.length > 0
target = to_process.shift()
ignore_list << target
new_deps, new_to_process, all_mocks = extract_includes_helper(target, include_paths, ignore_list, all_mocks)
list += new_deps
to_process += new_to_process
if !@configurator.project_config_hash[:project_auto_link_deep_dependencies]
break
else
list = list.uniq()
to_process = to_process.uniq()
end
end
return list
end
def extract_includes_helper(filepath, include_paths, ignore_list, mocks)
# Extract the dependencies from the make rule
make_rule = self.form_shallow_dependencies_rule(filepath)
target_file = make_rule.split[0].gsub(':', '').gsub('\\','/')
base = File.basename(target_file, File.extname(target_file))
make_rule_dependencies = make_rule.gsub(/.*\b#{Regexp.escape(base)}\S*/, '').gsub(/\\$/, '')
# Extract the headers dependencies from the make rule
hdr_ext = @configurator.extension_header
headers_dependencies = make_rule_dependencies.split.find_all {|path| path.end_with?(hdr_ext) }.uniq
headers_dependencies.map! {|hdr| hdr.gsub('\\','/') }
full_path_headers_dependencies = extract_full_path_dependencies(headers_dependencies)
# Extract the sources dependencies from the make rule
src_ext = @configurator.extension_source
sources_dependencies = make_rule_dependencies.split.find_all {|path| path.end_with?(src_ext) }.uniq
sources_dependencies.map! {|src| src.gsub('\\','/') }
full_path_sources_dependencies = extract_full_path_dependencies(sources_dependencies)
list = full_path_headers_dependencies + full_path_sources_dependencies
mock_prefix = @configurator.project_config_hash[:cmock_mock_prefix]
# Creating list of mocks
mocks += full_path_headers_dependencies.find_all do |header|
File.basename(header) =~ /^#{mock_prefix}.*$/
end.compact
# ignore real file when both mock and real file exist
mocks.each do |mock|
list.each do |filename|
if File.basename(filename) == File.basename(mock).sub(mock_prefix, '')
ignore_list << filename
end
end
end.compact
# Filtering list of final includes to only include mocks and anything that is NOT in the ignore_list
list = list.select do |item|
mocks.include? item or !(ignore_list.any? { |ignore_item| !item.match(/^(.*\/)?#{Regexp.escape(ignore_item)}$/).nil? })
end
to_process = []
if @configurator.project_config_hash[:project_auto_link_deep_dependencies]
# Creating list of headers that should be recursively pre-processed
# Skipping mocks and vendor headers
headers_to_deep_link = full_path_headers_dependencies.select do |hdr|
!(mocks.include? hdr) and (hdr.match(/^(.*\/)(#{VENDORS_FILES.join('|')}) + #{Regexp.escape(hdr_ext)}$/).nil?)
end
headers_to_deep_link.map! {|hdr| File.expand_path(hdr) }
headers_to_deep_link.compact!
headers_to_deep_link.each do |hdr|
if (ignore_list.none? {|ignore_header| hdr.match(/^(.*\/)?#{Regexp.escape(ignore_header)}$/)} and
include_paths.none? {|include_path| hdr =~ /^#{include_path}\.*/})
if File.exist?(hdr)
to_process << hdr
src = @file_finder.find_compilation_input_file(hdr, :ignore)
to_process << src if src
end
end
end
end
return list, to_process, mocks
end
def write_shallow_includes_list(filepath, list)
@yaml_wrapper.dump(filepath, list)
end
private
def extract_full_path_dependencies(dependencies)
# Separate the real files form the annotated ones and remove the '@@@@'
annotated_files, real_files = dependencies.partition {|file| file =~ /^@@@@/}
annotated_files.map! {|file| file.gsub('@@@@','') }
# Matching annotated_files values against real_files to ensure that
# annotated_files contain full path entries (as returned by make rule)
annotated_files.map! {|file| real_files.find {|real| !real.match(/^(.*\/)?#{Regexp.escape(file)}$/).nil?}}
annotated_files = annotated_files.compact
# Find which of our annotated files are "real" dependencies. This is
# intended to weed out dependencies that have been removed due to build
# options defined in the project yaml and/or in the files themselves.
return annotated_files.find_all do |annotated_file|
# find the index of the "real" file that matches the annotated one.
idx = real_files.find_index do |real_file|
real_file =~ /^(.*\/)?#{Regexp.escape(annotated_file)}$/
end
# If we found a real file, delete it from the array and return it,
# otherwise return nil. Since nil is falsy this has the effect of making
# find_all return only the annotated files for which a real file was
# found/deleted
idx ? real_files.delete_at(idx) : nil
end.compact
end
end

View File

@ -1,52 +0,0 @@
require 'ceedling/constants'
class ProjectConfigManager
attr_reader :options_files, :release_config_changed, :test_config_changed, :test_defines_changed
attr_accessor :config_hash
constructor :cacheinator, :configurator, :yaml_wrapper, :file_wrapper
def setup
@options_files = []
@release_config_changed = false
@test_config_changed = false
@test_defines_changed = false
end
def merge_options(config_hash, option_filepath)
@options_files << File.basename( option_filepath )
config_hash.deep_merge!( @yaml_wrapper.load( option_filepath ) )
end
def filter_internal_sources(sources)
filtered_sources = sources.clone
filtered_sources.delete_if { |item| item =~ /#{CMOCK_MOCK_PREFIX}.+#{Regexp.escape(EXTENSION_SOURCE)}$/ }
filtered_sources.delete_if { |item| item =~ /#{VENDORS_FILES.map{|source| '\b' + Regexp.escape(source.ext(EXTENSION_SOURCE)) + '\b'}.join('|')}$/ }
return filtered_sources
end
def process_release_config_change
# has project configuration changed since last release build
@release_config_changed = @cacheinator.diff_cached_release_config?( @config_hash )
end
def process_test_config_change
# has project configuration changed since last test build
@test_config_changed = @cacheinator.diff_cached_test_config?( @config_hash )
end
def process_test_defines_change(files)
# has definitions changed since last test build
@test_defines_changed = @cacheinator.diff_cached_test_defines?( files )
if @test_defines_changed
# update timestamp for rake task prerequisites
@file_wrapper.touch( @configurator.project_test_force_rebuild_filepath, :mtime => Time.now + 10 )
end
end
end

View File

@ -1,99 +0,0 @@
require 'ceedling/constants'
class ProjectFileLoader
attr_reader :main_file, :user_file
constructor :yaml_wrapper, :stream_wrapper, :system_wrapper, :file_wrapper
def setup
@main_file = nil
@mixin_files = []
@user_file = nil
@main_project_filepath = ''
@mixin_project_filepaths = []
@user_project_filepath = ''
end
def find_project_files
# first go hunting for optional user project file by looking for environment variable and then default location on disk
user_filepath = @system_wrapper.env_get('CEEDLING_USER_PROJECT_FILE')
if ( not user_filepath.nil? and @file_wrapper.exist?(user_filepath) )
@user_project_filepath = user_filepath
elsif (@file_wrapper.exist?(DEFAULT_CEEDLING_USER_PROJECT_FILE))
@user_project_filepath = DEFAULT_CEEDLING_USER_PROJECT_FILE
end
# next check for mixin project files by looking for environment variable
mixin_filepaths = @system_wrapper.env_get('CEEDLING_MIXIN_PROJECT_FILES')
if ( not mixin_filepaths.nil? )
mixin_filepaths.split(File::PATH_SEPARATOR).each do |filepath|
if ( @file_wrapper.exist?(filepath) )
@mixin_project_filepaths.push(filepath)
end
end
end
# next check for main project file by looking for environment variable and then default location on disk;
# blow up if we don't find this guy -- like, he's so totally important
main_filepath = @system_wrapper.env_get('CEEDLING_MAIN_PROJECT_FILE')
if ( not main_filepath.nil? and @file_wrapper.exist?(main_filepath) )
@main_project_filepath = main_filepath
elsif (@file_wrapper.exist?(DEFAULT_CEEDLING_MAIN_PROJECT_FILE))
@main_project_filepath = DEFAULT_CEEDLING_MAIN_PROJECT_FILE
else
# no verbosity checking since this is lowest level reporting anyhow &
# verbosity checking depends on configurator which in turns needs this class (circular dependency)
@stream_wrapper.stderr_puts('Found no Ceedling project file (*.yml)')
raise
end
@main_file = File.basename( @main_project_filepath )
@mixin_project_filepaths.each do |filepath|
@mixin_files.push(File.basename( filepath ))
end
@user_file = File.basename( @user_project_filepath ) if ( not @user_project_filepath.empty? )
end
def yaml_merger(y1, y2)
o1 = y1
y2.each_pair do |k,v|
if o1[k].nil?
o1[k] = v
else
if (o1[k].instance_of? Hash)
o1[k] = yaml_merger(o1[k], v)
elsif (o1[k].instance_of? Array)
o1[k] += v
else
o1[k] = v
end
end
end
return o1
end
def load_project_config
config_hash = @yaml_wrapper.load(@main_project_filepath)
# if there are mixin project files, then use them
@mixin_project_filepaths.each do |filepath|
mixin = @yaml_wrapper.load(filepath)
config_hash = yaml_merger( config_hash, mixin )
end
# if there's a user project file, then use it
if ( not @user_project_filepath.empty? )
user_hash = @yaml_wrapper.load(@user_project_filepath)
config_hash = yaml_merger( config_hash, user_hash )
end
return config_hash
end
end

View File

@ -1,17 +0,0 @@
class RakeUtils
constructor :rake_wrapper
def task_invoked?(task_regex)
task_invoked = false
@rake_wrapper.task_list.each do |task|
if ((task.already_invoked) and (task.to_s =~ task_regex))
task_invoked = true
break
end
end
return task_invoked
end
end

View File

@ -1,33 +0,0 @@
require 'rubygems'
require 'rake'
require 'ceedling/makefile' # our replacement for rake's make-style dependency loader
include Rake::DSL if defined?(Rake::DSL)
class Rake::Task
attr_reader :already_invoked
end
class RakeWrapper
def initialize
@makefile_loader = Rake::MakefileLoader.new # use our custom replacement noted above
end
def [](task)
return Rake::Task[task]
end
def task_list
return Rake::Task.tasks
end
def create_file_task(file_task, dependencies)
file(file_task => dependencies)
end
def load_dependencies(dependencies_path)
@makefile_loader.load(dependencies_path)
end
end

View File

@ -1,85 +0,0 @@
require 'fileutils'
# get directory containing this here file, back up one directory, and expand to full path
CEEDLING_ROOT = File.expand_path(File.dirname(__FILE__) + '/../..')
CEEDLING_LIB = File.join(CEEDLING_ROOT, 'lib')
CEEDLING_VENDOR = File.join(CEEDLING_ROOT, 'vendor')
CEEDLING_RELEASE = File.join(CEEDLING_ROOT, 'release')
$LOAD_PATH.unshift( CEEDLING_LIB )
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'unity/auto') )
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'diy/lib') )
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'cmock/lib') )
require 'rake'
#Let's make sure we remember the task descriptions in case we need them
Rake::TaskManager.record_task_metadata = true
require 'diy'
require 'constructor'
require 'ceedling/constants'
require 'ceedling/target_loader'
# construct all our objects
# ensure load path contains all libraries needed first
lib_ceedling_load_path_temp = File.join(CEEDLING_LIB, 'ceedling')
$LOAD_PATH.unshift( lib_ceedling_load_path_temp )
@ceedling = DIY::Context.from_yaml( File.read( File.join(lib_ceedling_load_path_temp, 'objects.yml') ) )
@ceedling.build_everything
# now that all objects are built, delete 'lib/ceedling' from load path
$LOAD_PATH.delete(lib_ceedling_load_path_temp)
# one-stop shopping for all our setup and such after construction
@ceedling[:setupinator].ceedling = @ceedling
project_config =
begin
cfg = @ceedling[:setupinator].load_project_files
TargetLoader.inspect(cfg, ENV['TARGET'])
rescue TargetLoader::NoTargets
cfg
rescue TargetLoader::RequestReload
@ceedling[:setupinator].load_project_files
end
@ceedling[:setupinator].do_setup( project_config )
# tell all our plugins we're about to do something
@ceedling[:plugin_manager].pre_build
# load rakefile component files (*.rake)
PROJECT_RAKEFILE_COMPONENT_FILES.each { |component| load(component) }
# tell rake to shut up by default (overridden in verbosity / debug tasks as appropriate)
verbose(false)
# end block always executed following rake run
END {
$stdout.flush unless $stdout.nil?
$stderr.flush unless $stderr.nil?
# cache our input configurations to use in comparison upon next execution
@ceedling[:cacheinator].cache_test_config( @ceedling[:setupinator].config_hash ) if (@ceedling[:task_invoker].test_invoked?)
@ceedling[:cacheinator].cache_release_config( @ceedling[:setupinator].config_hash ) if (@ceedling[:task_invoker].release_invoked?)
# delete all temp files unless we're in debug mode
if (not @ceedling[:configurator].project_debug)
@ceedling[:file_wrapper].rm_f( @ceedling[:file_wrapper].directory_listing( File.join(@ceedling[:configurator].project_temp_path, '*') ))
end
# only perform these final steps if we got here without runtime exceptions or errors
if (@ceedling[:system_wrapper].ruby_success)
# tell all our plugins the build is done and process results
@ceedling[:plugin_manager].post_build
@ceedling[:plugin_manager].print_plugin_failures
exit(1) if (@ceedling[:plugin_manager].plugins_failed? && !@ceedling[:setupinator].config_hash[:graceful_fail])
else
puts "ERROR: Ceedling Failed"
@ceedling[:plugin_manager].post_error
end
}

View File

@ -1,98 +0,0 @@
require 'ceedling/constants'
class ReleaseInvoker
constructor :configurator, :release_invoker_helper, :build_invoker_utils, :dependinator, :task_invoker, :file_path_utils, :file_wrapper
def setup_and_invoke_c_objects( c_files )
objects = @file_path_utils.form_release_build_c_objects_filelist( c_files )
begin
@release_invoker_helper.process_deep_dependencies( @file_path_utils.form_release_dependencies_filelist( c_files ) )
@dependinator.enhance_release_file_dependencies( objects )
@task_invoker.invoke_release_objects( objects )
rescue => e
@build_invoker_utils.process_exception( e, RELEASE_SYM, false )
end
return objects
end
def setup_and_invoke_asm_objects( asm_files )
objects = @file_path_utils.form_release_build_asm_objects_filelist( asm_files )
begin
@dependinator.enhance_release_file_dependencies( objects )
@task_invoker.invoke_release_objects( objects )
rescue => e
@build_invoker_utils.process_exception( e, RELEASE_SYM, false )
end
return objects
end
def refresh_c_deep_dependencies
return if (not @configurator.project_use_deep_dependencies)
@file_wrapper.rm_f(
@file_wrapper.directory_listing(
File.join( @configurator.project_release_dependencies_path, '*' + @configurator.extension_dependencies ) ) )
@release_invoker_helper.process_deep_dependencies(
@file_path_utils.form_release_dependencies_filelist(
@configurator.collection_all_source ) )
end
def artifactinate( *files )
files.flatten.each do |file|
@file_wrapper.cp( file, @configurator.project_release_artifacts_path ) if @file_wrapper.exist?( file )
end
end
def convert_libraries_to_arguments(libraries)
args = ((libraries || []) + ((defined? LIBRARIES_SYSTEM) ? LIBRARIES_SYSTEM : [])).flatten
if (defined? LIBRARIES_FLAG)
args.map! {|v| LIBRARIES_FLAG.gsub(/\$\{1\}/, v) }
end
return args
end
def get_library_paths_to_arguments()
paths = (defined? PATHS_LIBRARIES) ? (PATHS_LIBRARIES || []).clone : []
if (defined? LIBRARIES_PATH_FLAG)
paths.map! {|v| LIBRARIES_PATH_FLAG.gsub(/\$\{1\}/, v) }
end
return paths
end
def sort_objects_and_libraries(both)
extension = if ((defined? EXTENSION_SUBPROJECTS) && (defined? EXTENSION_LIBRARIES))
extension_libraries = if (EXTENSION_LIBRARIES.class == Array)
EXTENSION_LIBRARIES.join(")|(?:\\")
else
EXTENSION_LIBRARIES
end
"(?:\\#{EXTENSION_SUBPROJECTS})|(?:\\#{extension_libraries})"
elsif (defined? EXTENSION_SUBPROJECTS)
"\\#{EXTENSION_SUBPROJECTS}"
elsif (defined? EXTENSION_LIBRARIES)
if (EXTENSION_LIBRARIES.class == Array)
"(?:\\#{EXTENSION_LIBRARIES.join(")|(?:\\")})"
else
"\\#{EXTENSION_LIBRARIES}"
end
else
"\\.LIBRARY"
end
sorted_objects = both.group_by {|v| v.match(/.+#{extension}$/) ? :libraries : :objects }
libraries = sorted_objects[:libraries] || []
objects = sorted_objects[:objects] || []
return objects, libraries
end
end

View File

@ -1,19 +0,0 @@
class ReleaseInvokerHelper
constructor :configurator, :dependinator, :task_invoker
def process_deep_dependencies(dependencies_list)
return if (not @configurator.project_use_deep_dependencies)
if @configurator.project_generate_deep_dependencies
@dependinator.enhance_release_file_dependencies( dependencies_list )
@task_invoker.invoke_release_dependencies_files( dependencies_list )
end
@dependinator.load_release_object_deep_dependencies( dependencies_list )
end
end

View File

@ -1,26 +0,0 @@
##
# Pretifies reports
class Reportinator
##
# Generates a banner for a message based on the length of the message or a
# given width.
# ==== Attributes
#
# * _message_: The message to put.
# * _width_: The width of the message. If nil the size of the banner is
# determined by the length of the message.
#
# ==== Examples
#
# rp = Reportinator.new
# rp.generate_banner("Hello world!") => "------------\nHello world!\n------------\n"
# rp.generate_banner("Hello world!", 3) => "---\nHello world!\n---\n"
#
#
def generate_banner(message, width=nil)
dash_count = ((width.nil?) ? message.strip.length : width)
return "#{'-' * dash_count}\n#{message}\n#{'-' * dash_count}\n"
end
end

View File

@ -1,9 +0,0 @@
rule(/#{CMOCK_MOCK_PREFIX}[^\/\\]+#{'\\'+EXTENSION_SOURCE}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_header_input_for_mock_file(task_name)
end
]) do |mock|
@ceedling[:generator].generate_mock(TEST_SYM, mock.source)
end

View File

@ -1,25 +0,0 @@
# invocations against this rule should only happen when enhanced dependencies are enabled;
# otherwise, dependency tracking will be too shallow and preprocessed files could intermittently
# fail to be updated when they actually need to be.
rule(/#{PROJECT_TEST_PREPROCESS_FILES_PATH}\/.+/ => [
proc do |task_name|
@ceedling[:file_finder].find_test_or_source_or_header_file(task_name)
end
]) do |file|
if (not @ceedling[:configurator].project_use_deep_dependencies)
raise 'ERROR: Ceedling preprocessing rule invoked though necessary auxiliary dependency support not enabled.'
end
@ceedling[:generator].generate_preprocessed_file(TEST_SYM, file.source)
end
# invocations against this rule can always happen as there are no deeper dependencies to consider
rule(/#{PROJECT_TEST_PREPROCESS_INCLUDES_PATH}\/.+/ => [
proc do |task_name|
@ceedling[:file_finder].find_test_or_source_or_header_file(task_name)
end
]) do |file|
@ceedling[:generator].generate_shallow_includes_list(TEST_SYM, file.source)
end

View File

@ -1,98 +0,0 @@
RELEASE_COMPILE_TASK_ROOT = RELEASE_TASK_ROOT + 'compile:' unless defined?(RELEASE_COMPILE_TASK_ROOT)
RELEASE_ASSEMBLE_TASK_ROOT = RELEASE_TASK_ROOT + 'assemble:' unless defined?(RELEASE_ASSEMBLE_TASK_ROOT)
# If GCC and Releasing a Library, Update Tools to Automatically Have Necessary Tags
if (TOOLS_RELEASE_COMPILER[:executable] == DEFAULT_RELEASE_COMPILER_TOOL[:executable])
if (File.extname(PROJECT_RELEASE_BUILD_TARGET) == '.so')
TOOLS_RELEASE_COMPILER[:arguments] << "-fPIC" unless TOOLS_RELEASE_COMPILER[:arguments].include?("-fPIC")
TOOLS_RELEASE_LINKER[:arguments] << "-shared" unless TOOLS_RELEASE_LINKER[:arguments].include?("-shared")
elsif (File.extname(PROJECT_RELEASE_BUILD_TARGET) == '.a')
TOOLS_RELEASE_COMPILER[:arguments] << "-fPIC" unless TOOLS_RELEASE_COMPILER[:arguments].include?("-fPIC")
TOOLS_RELEASE_LINKER[:executable] = 'ar'
TOOLS_RELEASE_LINKER[:arguments] = ['rcs', '${2}', '${1}'].compact
end
end
if (RELEASE_BUILD_USE_ASSEMBLY)
rule(/#{PROJECT_RELEASE_BUILD_OUTPUT_ASM_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_assembly_file(task_name)
end
]) do |object|
@ceedling[:generator].generate_object_file(
TOOLS_RELEASE_ASSEMBLER,
OPERATION_ASSEMBLE_SYM,
RELEASE_SYM,
object.source,
object.name )
end
end
rule(/#{PROJECT_RELEASE_BUILD_OUTPUT_C_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_compilation_input_file(task_name, :error, true)
end
]) do |object|
@ceedling[:generator].generate_object_file(
TOOLS_RELEASE_COMPILER,
OPERATION_COMPILE_SYM,
RELEASE_SYM,
object.source,
object.name,
@ceedling[:file_path_utils].form_release_build_c_list_filepath( object.name ),
@ceedling[:file_path_utils].form_release_dependencies_filepath( object.name ) )
end
rule(/#{PROJECT_RELEASE_BUILD_TARGET}/) do |bin_file|
objects, libraries = @ceedling[:release_invoker].sort_objects_and_libraries(bin_file.prerequisites)
tool = TOOLS_RELEASE_LINKER.clone
lib_args = @ceedling[:release_invoker].convert_libraries_to_arguments(libraries)
lib_paths = @ceedling[:release_invoker].get_library_paths_to_arguments()
map_file = @ceedling[:configurator].project_release_build_map
@ceedling[:generator].generate_executable_file(
tool,
RELEASE_SYM,
objects,
bin_file.name,
map_file,
lib_args,
lib_paths )
@ceedling[:release_invoker].artifactinate( bin_file.name, map_file, @ceedling[:configurator].release_build_artifacts )
end
namespace RELEASE_SYM do
# use rules to increase efficiency for large projects (instead of iterating through all sources and creating defined tasks)
namespace :compile do
rule(/^#{RELEASE_COMPILE_TASK_ROOT}\S+#{'\\'+EXTENSION_SOURCE}$/ => [ # compile task names by regex
proc do |task_name|
source = task_name.sub(/#{RELEASE_COMPILE_TASK_ROOT}/, '')
@ceedling[:file_finder].find_source_file(source, :error)
end
]) do |compile|
@ceedling[:rake_wrapper][:directories].invoke
@ceedling[:project_config_manager].process_release_config_change
@ceedling[:release_invoker].setup_and_invoke_c_objects( [compile.source] )
end
end
if (RELEASE_BUILD_USE_ASSEMBLY)
namespace :assemble do
rule(/^#{RELEASE_ASSEMBLE_TASK_ROOT}\S+#{'\\'+EXTENSION_ASSEMBLY}$/ => [ # assemble task names by regex
proc do |task_name|
source = task_name.sub(/#{RELEASE_ASSEMBLE_TASK_ROOT}/, '')
@ceedling[:file_finder].find_assembly_file(source)
end
]) do |assemble|
@ceedling[:rake_wrapper][:directories].invoke
@ceedling[:project_config_manager].process_release_config_change
@ceedling[:release_invoker].setup_and_invoke_asm_objects( [assemble.source] )
end
end
end
end

View File

@ -1,14 +0,0 @@
rule(/#{PROJECT_RELEASE_DEPENDENCIES_PATH}\/#{'.+\\'+EXTENSION_DEPENDENCIES}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_compilation_input_file(task_name, :error, true)
end
]) do |dep|
@ceedling[:generator].generate_dependencies_file(
TOOLS_RELEASE_DEPENDENCIES_GENERATOR,
RELEASE_SYM,
dep.source,
@ceedling[:file_path_utils].form_release_build_c_object_filepath(dep.source),
dep.name)
end

View File

@ -1,72 +0,0 @@
rule(/#{PROJECT_TEST_FILE_PREFIX}#{'.+'+TEST_RUNNER_FILE_SUFFIX}#{'\\'+EXTENSION_SOURCE}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_test_input_for_runner_file(task_name)
end
]) do |runner|
@ceedling[:generator].generate_test_runner(TEST_SYM, runner.source, runner.name)
end
rule(/#{PROJECT_TEST_BUILD_OUTPUT_C_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_compilation_input_file(task_name)
end
]) do |object|
if (File.basename(object.source) =~ /#{EXTENSION_SOURCE}$/)
@ceedling[:generator].generate_object_file(
TOOLS_TEST_COMPILER,
OPERATION_COMPILE_SYM,
TEST_SYM,
object.source,
object.name,
@ceedling[:file_path_utils].form_test_build_list_filepath( object.name ),
@ceedling[:file_path_utils].form_test_dependencies_filepath( object.name ))
elsif (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY)
@ceedling[:generator].generate_object_file(
TOOLS_TEST_ASSEMBLER,
OPERATION_ASSEMBLE_SYM,
TEST_SYM,
object.source,
object.name )
end
end
rule(/#{PROJECT_TEST_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_EXECUTABLE}$/) do |bin_file|
lib_args = @ceedling[:test_invoker].convert_libraries_to_arguments()
lib_paths = @ceedling[:test_invoker].get_library_paths_to_arguments()
@ceedling[:generator].generate_executable_file(
TOOLS_TEST_LINKER,
TEST_SYM,
bin_file.prerequisites,
bin_file.name,
@ceedling[:file_path_utils].form_test_build_map_filepath( bin_file.name ),
lib_args,
lib_paths )
end
rule(/#{PROJECT_TEST_RESULTS_PATH}\/#{'.+\\'+EXTENSION_TESTPASS}$/ => [
proc do |task_name|
@ceedling[:file_path_utils].form_test_executable_filepath(task_name)
end
]) do |test_result|
@ceedling[:generator].generate_test_results(TOOLS_TEST_FIXTURE, TEST_SYM, test_result.source, test_result.name)
end
namespace TEST_SYM do
# use rules to increase efficiency for large projects (instead of iterating through all sources and creating defined tasks)
rule(/^#{TEST_TASK_ROOT}\S+$/ => [ # test task names by regex
proc do |task_name|
test = task_name.sub(/#{TEST_TASK_ROOT}/, '')
test = "#{PROJECT_TEST_FILE_PREFIX}#{test}" if not (test.start_with?(PROJECT_TEST_FILE_PREFIX))
@ceedling[:file_finder].find_test_from_file_path(test)
end
]) do |test|
@ceedling[:rake_wrapper][:test_deps].invoke
@ceedling[:test_invoker].setup_and_invoke([test.source])
end
end

View File

@ -1,14 +0,0 @@
rule(/#{PROJECT_TEST_DEPENDENCIES_PATH}\/#{'.+\\'+EXTENSION_DEPENDENCIES}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_compilation_input_file(task_name)
end
]) do |dep|
@ceedling[:generator].generate_dependencies_file(
TOOLS_TEST_DEPENDENCIES_GENERATOR,
TEST_SYM,
dep.source,
@ceedling[:file_path_utils].form_test_build_c_object_filepath(dep.source),
dep.name)
end

View File

@ -1,53 +0,0 @@
class Setupinator
attr_reader :config_hash
attr_writer :ceedling
def setup
@ceedling = {}
@config_hash = {}
end
def load_project_files
@ceedling[:project_file_loader].find_project_files
return @ceedling[:project_file_loader].load_project_config
end
def do_setup(config_hash)
@config_hash = config_hash
# load up all the constants and accessors our rake files, objects, & external scripts will need;
# note: configurator modifies the cmock section of the hash with a couple defaults to tie
# project together - the modified hash is used to build cmock object
@ceedling[:configurator].populate_defaults( config_hash )
@ceedling[:configurator].populate_unity_defaults( config_hash )
@ceedling[:configurator].populate_cmock_defaults( config_hash )
@ceedling[:configurator].find_and_merge_plugins( config_hash )
@ceedling[:configurator].merge_imports( config_hash )
@ceedling[:configurator].eval_environment_variables( config_hash )
@ceedling[:configurator].tools_setup( config_hash )
@ceedling[:configurator].eval_paths( config_hash )
@ceedling[:configurator].standardize_paths( config_hash )
@ceedling[:configurator].validate( config_hash )
@ceedling[:configurator].build( config_hash, :environment )
@ceedling[:configurator].insert_rake_plugins( @ceedling[:configurator].rake_plugins )
@ceedling[:configurator].tools_supplement_arguments( config_hash )
# merge in any environment variables plugins specify, after the main build
@ceedling[:plugin_manager].load_plugin_scripts( @ceedling[:configurator].script_plugins, @ceedling ) do |env|
@ceedling[:configurator].eval_environment_variables( env )
@ceedling[:configurator].build_supplement( config_hash, env )
end
@ceedling[:plugin_reportinator].set_system_objects( @ceedling )
@ceedling[:file_finder].prepare_search_sources
@ceedling[:loginator].setup_log_filepath
@ceedling[:project_config_manager].config_hash = config_hash
end
def reset_defaults(config_hash)
@ceedling[:configurator].reset_defaults( config_hash )
end
end

View File

@ -1,28 +0,0 @@
class StreamWrapper
def stdout_override(&fnc)
@stdout_overide_fnc = fnc
end
def stdout_puts(string)
if @stdout_overide_fnc
@stdout_overide_fnc.call(string)
else
$stdout.puts(string)
end
end
def stdout_flush
$stdout.flush
end
def stderr_puts(string)
$stderr.puts(string)
end
def stderr_flush
$stderr.flush
end
end

View File

@ -1,40 +0,0 @@
require 'ceedling/constants'
class Streaminator
constructor :streaminator_helper, :verbosinator, :loginator, :stream_wrapper
# for those objects for whom the configurator has already been instantiated,
# Streaminator is a convenience object for handling verbosity and writing to the std streams
def stdout_puts(string, verbosity=Verbosity::NORMAL)
if (@verbosinator.should_output?(verbosity))
@stream_wrapper.stdout_puts(string)
@stream_wrapper.stdout_flush
end
# write to log as though Verbosity::OBNOXIOUS
@loginator.log( string, @streaminator_helper.extract_name($stdout) )
end
def stderr_puts(string, verbosity=Verbosity::NORMAL)
if (@verbosinator.should_output?(verbosity))
@stream_wrapper.stderr_puts(string)
@stream_wrapper.stderr_flush
end
# write to log as though Verbosity::OBNOXIOUS
@loginator.log( string, @streaminator_helper.extract_name($stderr) )
end
def stream_puts(stream, string, verbosity=Verbosity::NORMAL)
if (@verbosinator.should_output?(verbosity))
stream.puts(string)
stream.flush
end
# write to log as though Verbosity::OBNOXIOUS
@loginator.log( string, @streaminator_helper.extract_name(stream) )
end
end

View File

@ -1,15 +0,0 @@
class StreaminatorHelper
def extract_name(stream)
name = case (stream.fileno)
when 0 then '#<IO:$stdin>'
when 1 then '#<IO:$stdout>'
when 2 then '#<IO:$stderr>'
else stream.inspect
end
return name
end
end

View File

@ -1,37 +0,0 @@
class Object
def deep_clone
Marshal::load(Marshal.dump(self))
end
end
##
# Class containing system utility functions.
class SystemUtils
constructor :system_wrapper
##
# Sets up the class.
def setup
@tcsh_shell = nil
end
##
# Checks the system shell to see if it a tcsh shell.
def tcsh_shell?
# once run a single time, return state determined at that execution
return @tcsh_shell if not @tcsh_shell.nil?
result = @system_wrapper.shell_backticks('echo $version')
if ((result[:exit_code] == 0) and (result[:output].strip =~ /^tcsh/))
@tcsh_shell = true
else
@tcsh_shell = false
end
return @tcsh_shell
end
end

View File

@ -1,80 +0,0 @@
require 'rbconfig'
class SystemWrapper
# static method for use in defaults
def self.windows?
return ((RbConfig::CONFIG['host_os'] =~ /mswin|mingw/) ? true : false) if defined?(RbConfig)
return ((Config::CONFIG['host_os'] =~ /mswin|mingw/) ? true : false)
end
# class method so as to be mockable for tests
def windows?
return SystemWrapper.windows?
end
def module_eval(string)
return Object.module_eval("\"" + string + "\"")
end
def eval(string)
return eval(string)
end
def search_paths
return ENV['PATH'].split(File::PATH_SEPARATOR)
end
def cmdline_args
return ARGV
end
def env_set(name, value)
ENV[name] = value
end
def env_get(name)
return ENV[name]
end
def time_now
return Time.now.asctime
end
def shell_backticks(command, boom = true)
retval = `#{command}`.freeze
$exit_code = ($?.exitstatus).freeze if boom
return {
:output => retval.freeze,
:exit_code => ($?.exitstatus).freeze
}
end
def shell_system(command, boom = true)
system( command )
$exit_code = ($?.exitstatus).freeze if boom
return {
:output => "".freeze,
:exit_code => ($?.exitstatus).freeze
}
end
def add_load_path(path)
$LOAD_PATH.unshift(path)
end
def require_file(path)
require(path)
end
def ruby_success
# We are successful if we've never had an exit code that went boom (either because it's empty or it was 0)
return ($exit_code.nil? || ($exit_code == 0)) && ($!.nil? || $!.is_a?(SystemExit) && $!.success?)
end
def constants_include?(item)
# forcing to strings provides consistency across Ruby versions
return Object.constants.map{|constant| constant.to_s}.include?(item.to_s)
end
end

View File

@ -1,38 +0,0 @@
module TargetLoader
class NoTargets < Exception; end
class NoDirectory < Exception; end
class NoDefault < Exception; end
class NoSuchTarget < Exception; end
class RequestReload < Exception; end
def self.inspect(config, target_name=nil)
unless config[:targets]
raise NoTargets
end
targets = config[:targets]
unless targets[:targets_directory]
raise NoDirectory.new("No targets directory specified.")
end
unless targets[:default_target]
raise NoDefault.new("No default target specified.")
end
target_path = lambda {|name| File.join(targets[:targets_directory], name + ".yml")}
target = if target_name
target_path.call(target_name)
else
target_path.call(targets[:default_target])
end
unless File.exists? target
raise NoSuchTarget.new("No such target: #{target}")
end
ENV['CEEDLING_MAIN_PROJECT_FILE'] = target
raise RequestReload
end
end

View File

@ -1,122 +0,0 @@
require 'ceedling/par_map'
class TaskInvoker
attr_accessor :first_run
constructor :dependinator, :rake_utils, :rake_wrapper, :project_config_manager
def setup
@test_regexs = [/^#{TEST_ROOT_NAME}:/]
@release_regexs = [/^#{RELEASE_ROOT_NAME}(:|$)/]
@first_run = true
end
def add_test_task_regex(regex)
@test_regexs << regex
end
def add_release_task_regex(regex)
@release_regexs << regex
end
def test_invoked?
invoked = false
@test_regexs.each do |regex|
invoked = true if (@rake_utils.task_invoked?(regex))
break if invoked
end
return invoked
end
def release_invoked?
invoked = false
@release_regexs.each do |regex|
invoked = true if (@rake_utils.task_invoked?(regex))
break if invoked
end
return invoked
end
def invoked?(regex)
return @rake_utils.task_invoked?(regex)
end
def reset_rake_task_for_changed_defines(file)
if !(file =~ /#{VENDORS_FILES.map{|ignore| '\b' + ignore.ext(File.extname(file)) + '\b'}.join('|')}$/)
@rake_wrapper[file].clear_actions if @first_run == false && @project_config_manager.test_defines_changed
@rake_wrapper[file].reenable if @first_run == false && @project_config_manager.test_defines_changed
end
end
def invoke_test_mocks(mocks)
@dependinator.enhance_mock_dependencies( mocks )
mocks.each { |mock|
reset_rake_task_for_changed_defines( mock )
@rake_wrapper[mock].invoke
}
end
def invoke_test_runner(runner)
@dependinator.enhance_runner_dependencies( runner )
reset_rake_task_for_changed_defines( runner )
@rake_wrapper[runner].invoke
end
def invoke_test_shallow_include_lists(files)
@dependinator.enhance_shallow_include_lists_dependencies( files )
par_map(PROJECT_COMPILE_THREADS, files) do |file|
reset_rake_task_for_changed_defines( file )
@rake_wrapper[file].invoke
end
end
def invoke_test_preprocessed_files(files)
@dependinator.enhance_preprocesed_file_dependencies( files )
par_map(PROJECT_COMPILE_THREADS, files) do |file|
reset_rake_task_for_changed_defines( file )
@rake_wrapper[file].invoke
end
end
def invoke_test_dependencies_files(files)
@dependinator.enhance_dependencies_dependencies( files )
par_map(PROJECT_COMPILE_THREADS, files) do |file|
reset_rake_task_for_changed_defines( file )
@rake_wrapper[file].invoke
end
end
def invoke_test_objects(objects)
par_map(PROJECT_COMPILE_THREADS, objects) do |object|
reset_rake_task_for_changed_defines( object )
@rake_wrapper[object].invoke
end
end
def invoke_test_executable(file)
@rake_wrapper[file].invoke
end
def invoke_test_results(result)
@dependinator.enhance_results_dependencies( result )
@rake_wrapper[result].invoke
end
def invoke_release_dependencies_files(files)
par_map(PROJECT_COMPILE_THREADS, files) do |file|
@rake_wrapper[file].invoke
end
end
def invoke_release_objects(objects)
par_map(PROJECT_COMPILE_THREADS, objects) do |object|
@rake_wrapper[object].invoke
end
end
end

View File

@ -1,115 +0,0 @@
require 'ceedling/constants'
require 'ceedling/file_path_utils'
require 'ceedling/version'
desc "Display build environment version info."
task :version do
puts " Ceedling:: #{Ceedling::Version::CEEDLING}"
puts " Unity:: #{Ceedling::Version::UNITY}"
puts " CMock:: #{Ceedling::Version::CMOCK}"
puts " CException:: #{Ceedling::Version::CEXCEPTION}"
end
desc "Set verbose output (silent:[#{Verbosity::SILENT}] - obnoxious:[#{Verbosity::OBNOXIOUS}])."
task :verbosity, :level do |t, args|
verbosity_level = args.level.to_i
if (PROJECT_USE_MOCKS)
# don't store verbosity level in setupinator's config hash, use a copy;
# otherwise, the input configuration will change and trigger entire project rebuilds
hash = @ceedling[:setupinator].config_hash[:cmock].clone
hash[:verbosity] = verbosity_level
@ceedling[:cmock_builder].manufacture( hash )
end
@ceedling[:configurator].project_verbosity = verbosity_level
# control rake's verbosity with new setting
verbose( ((verbosity_level >= Verbosity::OBNOXIOUS) ? true : false) )
end
desc "Enable logging"
task :logging do
@ceedling[:configurator].project_logging = true
end
# non advertised debug task
task :debug do
Rake::Task[:verbosity].invoke(Verbosity::DEBUG)
Rake.application.options.trace = true
@ceedling[:configurator].project_debug = true
end
# non advertised sanity checking task
task :sanity_checks, :level do |t, args|
check_level = args.level.to_i
@ceedling[:configurator].sanity_checks = check_level
end
# non advertised catch for calling upgrade in the wrong place
task :upgrade do
puts "WARNING: You're currently IN your project directory. Take a step out and try"
puts "again if you'd like to perform an upgrade."
end
# list expanded environment variables
if (not ENVIRONMENT.empty?)
desc "List all configured environment variables."
task :environment do
env_list = []
ENVIRONMENT.each do |env|
env.each_key do |key|
name = key.to_s.upcase
env_list.push(" - #{name}: \"#{env[key]}\"")
end
end
env_list.sort.each do |env_line|
puts env_line
end
end
end
namespace :options do
COLLECTION_PROJECT_OPTIONS.each do |option_path|
option = File.basename(option_path, '.yml')
desc "Merge #{option} project options."
task option.to_sym do
hash = @ceedling[:project_config_manager].merge_options( @ceedling[:setupinator].config_hash, option_path )
@ceedling[:setupinator].do_setup( hash )
if @ceedling[:configurator].project_release_build
load(File.join(CEEDLING_LIB, 'ceedling', 'rules_release.rake'))
end
end
end
# This is to give nice errors when typing options
rule /^options:.*/ do |t, args|
filename = t.to_s.split(':')[-1] + '.yml'
filelist = COLLECTION_PROJECT_OPTIONS.map{|s| File.basename(s) }
@ceedling[:file_finder].find_file_from_list(filename, filelist, :error)
end
# This will output the fully-merged tools options to their own project.yml file
desc "Export tools options to a new project file"
task :export, :filename do |t, args|
outfile = args.filename || 'tools.yml'
toolcfg = {}
@ceedling[:configurator].project_config_hash.each_pair do |k,v|
toolcfg[k] = v if (k.to_s[0..5] == 'tools_')
end
File.open(outfile,'w') {|f| f << toolcfg.to_yaml({:indentation => 2})}
end
end
# do not present task if there's no plugins
if (not PLUGINS_ENABLED.empty?)
desc "Execute plugin result summaries (no build triggering)."
task :summary do
@ceedling[:plugin_manager].summary
puts "\nNOTE: Summaries may be out of date with project sources.\n\n"
end
end

View File

@ -1,111 +0,0 @@
# rather than require 'rake/clean' & try to override, we replicate for finer control
CLEAN = Rake::FileList["**/*~", "**/*.bak"]
CLOBBER = Rake::FileList.new
CLEAN.clear_exclude.exclude { |fn| fn.pathmap("%f") == 'core' && File.directory?(fn) }
CLEAN.include(File.join(PROJECT_TEST_BUILD_OUTPUT_PATH, '*'))
CLEAN.include(File.join(PROJECT_TEST_RESULTS_PATH, '*'))
CLEAN.include(File.join(PROJECT_TEST_DEPENDENCIES_PATH, '*'))
CLEAN.include(File.join(PROJECT_BUILD_RELEASE_ROOT, '*.*'))
CLEAN.include(File.join(PROJECT_RELEASE_BUILD_OUTPUT_PATH, '*'))
CLEAN.include(File.join(PROJECT_RELEASE_DEPENDENCIES_PATH, '*'))
CLOBBER.include(File.join(PROJECT_BUILD_ARTIFACTS_ROOT, '**/*'))
CLOBBER.include(File.join(PROJECT_BUILD_TESTS_ROOT, '**/*'))
CLOBBER.include(File.join(PROJECT_BUILD_RELEASE_ROOT, '**/*'))
CLOBBER.include(File.join(PROJECT_LOG_PATH, '**/*'))
CLOBBER.include(File.join(PROJECT_TEMP_PATH, '**/*'))
# just in case they're using git, let's make sure we allow them to preserved the build directory if desired.
CLOBBER.exclude(File.join(TESTS_BASE_PATH), '**/.gitkeep')
# because of cmock config, mock path can optionally exist apart from standard test build paths
CLOBBER.include(File.join(CMOCK_MOCK_PATH, '*'))
REMOVE_FILE_PROC = Proc.new { |fn| rm_r fn rescue nil }
# redefine clean so we can override how it advertises itself
desc "Delete all build artifacts and temporary products."
task(:clean) do
# because :clean is a prerequisite for :clobber, intelligently display the progress message
if (not @ceedling[:task_invoker].invoked?(/^clobber$/))
@ceedling[:streaminator].stdout_puts("\nCleaning build artifacts...\n(For large projects, this task may take a long time to complete)\n\n")
end
begin
CLEAN.each { |fn| REMOVE_FILE_PROC.call(fn) }
rescue
end
end
# redefine clobber so we can override how it advertises itself
desc "Delete all generated files (and build artifacts)."
task(:clobber => [:clean]) do
@ceedling[:streaminator].stdout_puts("\nClobbering all generated files...\n(For large projects, this task may take a long time to complete)\n\n")
begin
CLOBBER.each { |fn| REMOVE_FILE_PROC.call(fn) }
@ceedling[:rake_wrapper][:directories].invoke
@ceedling[:dependinator].touch_force_rebuild_files
rescue
end
end
# create a directory task for each of the paths, so we know how to build them
PROJECT_BUILD_PATHS.each { |path| directory(path) }
# create a single directory task which verifies all the others get built
task :directories => PROJECT_BUILD_PATHS
# when the force file doesn't exist, it probably means we clobbered or are on a fresh
# install. In either case, stuff was deleted, so assume we want to rebuild it all
file @ceedling[:configurator].project_test_force_rebuild_filepath do
unless File.exists?(@ceedling[:configurator].project_test_force_rebuild_filepath)
@ceedling[:dependinator].touch_force_rebuild_files
end
end
# list paths discovered at load time
namespace :paths do
standard_paths = ['test','source','include']
paths = @ceedling[:setupinator].config_hash[:paths].keys.map{|n| n.to_s.downcase}
paths = (paths + standard_paths).uniq
paths.each do |name|
path_list = Object.const_get("COLLECTION_PATHS_#{name.upcase}")
if (path_list.size != 0) || (standard_paths.include?(name))
desc "List all collected #{name} paths."
task(name.to_sym) { puts "#{name} paths:"; path_list.sort.each {|path| puts " - #{path}" } }
end
end
end
# list files & file counts discovered at load time
namespace :files do
categories = [
['test', COLLECTION_ALL_TESTS],
['source', COLLECTION_ALL_SOURCE],
['include', COLLECTION_ALL_HEADERS],
['support', COLLECTION_ALL_SUPPORT]
]
using_assembly = (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY) ||
(defined?(RELEASE_BUILD_USE_ASSEMBLY) && RELEASE_BUILD_USE_ASSEMBLY)
categories << ['assembly', COLLECTION_ALL_ASSEMBLY] if using_assembly
categories.each do |category|
name = category[0]
collection = category[1]
desc "List all collected #{name} files."
task(name.to_sym) do
puts "#{name} files:"
collection.sort.each { |filepath| puts " - #{filepath}" }
puts "file count: #{collection.size}"
end
end
end

View File

@ -1,29 +0,0 @@
require 'ceedling/constants'
require 'ceedling/file_path_utils'
desc "Build release target."
task RELEASE_SYM => [:directories] do
header = "Release build '#{File.basename(PROJECT_RELEASE_BUILD_TARGET)}'"
@ceedling[:streaminator].stdout_puts("\n\n#{header}\n#{'-' * header.length}")
begin
@ceedling[:plugin_manager].pre_release
core_objects = []
extra_objects = @ceedling[:file_path_utils].form_release_build_c_objects_filelist( COLLECTION_RELEASE_ARTIFACT_EXTRA_LINK_OBJECTS )
@ceedling[:project_config_manager].process_release_config_change
core_objects.concat( @ceedling[:release_invoker].setup_and_invoke_c_objects( COLLECTION_ALL_SOURCE ) )
# if assembler use isn't enabled, COLLECTION_ALL_ASSEMBLY is empty array & nothing happens
core_objects.concat( @ceedling[:release_invoker].setup_and_invoke_asm_objects( COLLECTION_ALL_ASSEMBLY ) )
# if we're using libraries, we need to add those to our collection as well
library_objects = (defined? LIBRARIES_RELEASE && !LIBRARIES_RELEASE.empty?) ? LIBRARIES_RELEASE.flatten.compact : []
file( PROJECT_RELEASE_BUILD_TARGET => (core_objects + extra_objects + library_objects) )
Rake::Task[PROJECT_RELEASE_BUILD_TARGET].invoke
ensure
@ceedling[:plugin_manager].post_release
end
end

View File

@ -1,9 +0,0 @@
require 'ceedling/constants'
namespace REFRESH_SYM do
task RELEASE_SYM do
@ceedling[:release_invoker].refresh_c_deep_dependencies
end
end

View File

@ -1,61 +0,0 @@
require 'ceedling/constants'
task :test_deps => [:directories]
task :test => [:test_deps] do
Rake.application['test:all'].invoke
end
namespace TEST_SYM do
desc "Run all unit tests (also just 'test' works)."
task :all => [:test_deps] do
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS)
end
desc "Run single test ([*] real test or source file name, no path)."
task :* do
message = "\nOops! '#{TEST_ROOT_NAME}:*' isn't a real task. " +
"Use a real test or source file name (no path) in place of the wildcard.\n" +
"Example: rake #{TEST_ROOT_NAME}:foo.c\n\n"
@ceedling[:streaminator].stdout_puts( message )
end
desc "Run tests for changed files."
task :delta => [:test_deps] do
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, TEST_SYM, {:force_run => false})
end
desc "Just build tests without running."
task :build_only => [:test_deps] do
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, TEST_SYM, {:build_only => true})
end
desc "Run tests by matching regular expression pattern."
task :pattern, [:regex] => [:test_deps] do |t, args|
matches = []
COLLECTION_ALL_TESTS.each { |test| matches << test if (test =~ /#{args.regex}/) }
if (matches.size > 0)
@ceedling[:test_invoker].setup_and_invoke(matches, TEST_SYM, {:force_run => false})
else
@ceedling[:streaminator].stdout_puts("\nFound no tests matching pattern /#{args.regex}/.")
end
end
desc "Run tests whose test path contains [dir] or [dir] substring."
task :path, [:dir] => [:test_deps] do |t, args|
matches = []
COLLECTION_ALL_TESTS.each { |test| matches << test if File.dirname(test).include?(args.dir.gsub(/\\/, '/')) }
if (matches.size > 0)
@ceedling[:test_invoker].setup_and_invoke(matches, TEST_SYM, {:force_run => false})
else
@ceedling[:streaminator].stdout_puts("\nFound no tests including the given path or path component.")
end
end
end

View File

@ -1,9 +0,0 @@
require 'ceedling/constants'
namespace REFRESH_SYM do
task TEST_SYM do
@ceedling[:test_invoker].refresh_deep_dependencies
end
end

View File

@ -1,35 +0,0 @@
require 'ceedling/constants'
require 'ceedling/file_path_utils'
# create file dependencies to ensure C-based components of vendor tools are recompiled when they are updated with new versions
# forming these explicitly rather than depend on auxiliary dependencies so all scenarios are explicitly covered
file( @ceedling[:file_path_utils].form_test_build_c_object_filepath( UNITY_C_FILE ) => [
File.join( UNITY_VENDOR_PATH, UNITY_LIB_PATH, UNITY_C_FILE ),
File.join( UNITY_VENDOR_PATH, UNITY_LIB_PATH, UNITY_H_FILE ),
File.join( UNITY_VENDOR_PATH, UNITY_LIB_PATH, UNITY_INTERNALS_H_FILE ) ]
)
if (PROJECT_USE_MOCKS)
file( @ceedling[:file_path_utils].form_test_build_c_object_filepath( CMOCK_C_FILE ) => [
File.join( CMOCK_VENDOR_PATH, CMOCK_LIB_PATH, CMOCK_C_FILE ),
File.join( CMOCK_VENDOR_PATH, CMOCK_LIB_PATH, CMOCK_H_FILE ) ]
)
end
if (PROJECT_USE_EXCEPTIONS)
file( @ceedling[:file_path_utils].form_test_build_c_object_filepath( CEXCEPTION_C_FILE ) => [
File.join( CEXCEPTION_VENDOR_PATH, CEXCEPTION_LIB_PATH, CEXCEPTION_C_FILE ),
File.join( CEXCEPTION_VENDOR_PATH, CEXCEPTION_LIB_PATH, CEXCEPTION_H_FILE ) ]
)
end
if (PROJECT_USE_EXCEPTIONS and PROJECT_RELEASE_BUILD)
file( @ceedling[:file_path_utils].form_release_build_c_object_filepath( CEXCEPTION_C_FILE ) => [
File.join( CEXCEPTION_VENDOR_PATH, CEXCEPTION_LIB_PATH, CEXCEPTION_C_FILE ),
File.join( CEXCEPTION_VENDOR_PATH, CEXCEPTION_LIB_PATH, CEXCEPTION_H_FILE ) ]
)
end

View File

@ -1,111 +0,0 @@
class TestIncludesExtractor
constructor :configurator, :yaml_wrapper, :file_wrapper
def setup
@includes = {}
@mocks = {}
end
# for includes_list file, slurp up array from yaml file and sort & store includes
def parse_includes_list(includes_list)
gather_and_store_includes( includes_list, @yaml_wrapper.load(includes_list) )
end
# open, scan for, and sort & store includes of test file
def parse_test_file(test)
gather_and_store_includes( test, extract_from_file(test) )
end
# open, scan for, and sort & store includes of test file
def parse_test_file_source_include(test)
return extract_source_include_from_file(test)
end
# mocks with no file extension
def lookup_raw_mock_list(test)
file_key = form_file_key(test)
return [] if @mocks[file_key].nil?
return @mocks[file_key]
end
# includes with file extension
def lookup_includes_list(file)
file_key = form_file_key(file)
return [] if (@includes[file_key]).nil?
return @includes[file_key]
end
private #################################
def form_file_key(filepath)
return File.basename(filepath).to_sym
end
def extract_from_file(file)
includes = []
header_extension = @configurator.extension_header
contents = @file_wrapper.read(file)
# remove line comments
contents = contents.gsub(/\/\/.*$/, '')
# remove block comments
contents = contents.gsub(/\/\*.*?\*\//m, '')
contents.split("\n").each do |line|
# look for include statement
scan_results = line.scan(/#include\s+\"\s*(.+#{'\\'+header_extension})\s*\"/)
includes << scan_results[0][0] if (scan_results.size > 0)
# look for TEST_FILE statement
scan_results = line.scan(/TEST_FILE\(\s*\"\s*(.+\.\w+)\s*\"\s*\)/)
includes << scan_results[0][0] if (scan_results.size > 0)
end
return includes.uniq
end
def extract_source_include_from_file(file)
source_includes = []
source_extension = @configurator.extension_source
contents = @file_wrapper.read(file)
# remove line comments
contents = contents.gsub(/\/\/.*$/, '')
# remove block comments
contents = contents.gsub(/\/\*.*?\*\//m, '')
contents.split("\n").each do |line|
# look for include statement
scan_results = line.scan(/#include\s+\"\s*(.+#{'\\'+source_extension})\s*\"/)
source_includes << scan_results[0][0] if (scan_results.size > 0)
end
return source_includes.uniq
end
def gather_and_store_includes(file, includes)
mock_prefix = @configurator.cmock_mock_prefix
header_extension = @configurator.extension_header
file_key = form_file_key(file)
@mocks[file_key] = []
# add includes to lookup hash
@includes[file_key] = includes
includes.each do |include_file|
# check if include is a mock
scan_results = include_file.scan(/(#{mock_prefix}.+)#{'\\'+header_extension}/)
# add mock to lookup hash
@mocks[file_key] << scan_results[0][0] if (scan_results.size > 0)
end
end
end

View File

@ -1,165 +0,0 @@
require 'ceedling/constants'
class TestInvoker
attr_reader :sources, :tests, :mocks
constructor :configurator,
:test_invoker_helper,
:plugin_manager,
:streaminator,
:preprocessinator,
:task_invoker,
:dependinator,
:project_config_manager,
:build_invoker_utils,
:file_path_utils,
:file_wrapper
def setup
@sources = []
@tests = []
@mocks = []
end
# Convert libraries configuration form YAML configuration
# into a string that can be given to the compiler.
def convert_libraries_to_arguments()
args = ((@configurator.project_config_hash[:libraries_test] || []) + ((defined? LIBRARIES_SYSTEM) ? LIBRARIES_SYSTEM : [])).flatten
if (defined? LIBRARIES_FLAG)
args.map! {|v| LIBRARIES_FLAG.gsub(/\$\{1\}/, v) }
end
return args
end
def get_library_paths_to_arguments()
paths = (defined? PATHS_LIBRARIES) ? (PATHS_LIBRARIES || []).clone : []
if (defined? LIBRARIES_PATH_FLAG)
paths.map! {|v| LIBRARIES_PATH_FLAG.gsub(/\$\{1\}/, v) }
end
return paths
end
def setup_and_invoke(tests, context=TEST_SYM, options={:force_run => true, :build_only => false})
@tests = tests
@project_config_manager.process_test_config_change
@tests.each do |test|
# announce beginning of test run
header = "Test '#{File.basename(test)}'"
@streaminator.stdout_puts("\n\n#{header}\n#{'-' * header.length}")
begin
@plugin_manager.pre_test( test )
test_name ="#{File.basename(test)}".chomp('.c')
def_test_key="defines_#{test_name.downcase}"
if @configurator.project_config_hash.has_key?(def_test_key.to_sym) || @configurator.defines_use_test_definition
defs_bkp = Array.new(COLLECTION_DEFINES_TEST_AND_VENDOR)
tst_defs_cfg = Array.new(defs_bkp)
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
tst_defs_cfg.replace(@configurator.project_config_hash[def_test_key.to_sym])
tst_defs_cfg .concat(COLLECTION_DEFINES_VENDOR) if COLLECTION_DEFINES_VENDOR
end
if @configurator.defines_use_test_definition
tst_defs_cfg << File.basename(test, ".*").strip.upcase.sub(/@.*$/, "")
end
COLLECTION_DEFINES_TEST_AND_VENDOR.replace(tst_defs_cfg)
end
# redefine the project out path and preprocessor defines
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
@streaminator.stdout_puts("Updating test definitions for #{test_name}", Verbosity::NORMAL)
orig_path = @configurator.project_test_build_output_path
@configurator.project_config_hash[:project_test_build_output_path] = File.join(@configurator.project_test_build_output_path, test_name)
@file_wrapper.mkdir(@configurator.project_test_build_output_path)
end
# collect up test fixture pieces & parts
runner = @file_path_utils.form_runner_filepath_from_test( test )
mock_list = @preprocessinator.preprocess_test_and_invoke_test_mocks( test )
sources = @test_invoker_helper.extract_sources( test )
extras = @configurator.collection_test_fixture_extra_link_objects
core = [test] + mock_list + sources
objects = @file_path_utils.form_test_build_objects_filelist( [runner] + core + extras ).uniq
results_pass = @file_path_utils.form_pass_results_filepath( test )
results_fail = @file_path_utils.form_fail_results_filepath( test )
# identify all the objects shall not be linked and then remove them from objects list.
no_link_objects = @file_path_utils.form_test_build_objects_filelist(@preprocessinator.preprocess_shallow_source_includes( test ))
objects = objects.uniq - no_link_objects
@project_config_manager.process_test_defines_change(@project_config_manager.filter_internal_sources(sources))
# clean results files so we have a missing file with which to kick off rake's dependency rules
@test_invoker_helper.clean_results( {:pass => results_pass, :fail => results_fail}, options )
# load up auxiliary dependencies so deep changes cause rebuilding appropriately
@test_invoker_helper.process_deep_dependencies( core ) do |dependencies_list|
@dependinator.load_test_object_deep_dependencies( dependencies_list )
end
# tell rake to create test runner if needed
@task_invoker.invoke_test_runner( runner )
# enhance object file dependencies to capture externalities influencing regeneration
@dependinator.enhance_test_build_object_dependencies( objects )
# associate object files with executable
@dependinator.enhance_test_executable_dependencies( test, objects )
# build test objects
@task_invoker.invoke_test_objects( objects )
# if the option build_only has been specified, build only the executable
# but don't run the test
if (options[:build_only])
executable = @file_path_utils.form_test_executable_filepath( test )
@task_invoker.invoke_test_executable( executable )
else
# 3, 2, 1... launch
@task_invoker.invoke_test_results( results_pass )
end
rescue => e
@build_invoker_utils.process_exception( e, context )
ensure
@plugin_manager.post_test( test )
# restore the project test defines
if @configurator.project_config_hash.has_key?(def_test_key.to_sym) || @configurator.defines_use_test_definition
COLLECTION_DEFINES_TEST_AND_VENDOR.replace(defs_bkp)
if @configurator.project_config_hash.has_key?(def_test_key.to_sym)
@configurator.project_config_hash[:project_test_build_output_path] = orig_path
@streaminator.stdout_puts("Restored defines and build path to standard", Verbosity::NORMAL)
end
end
end
# store away what's been processed
@mocks.concat( mock_list )
@sources.concat( sources )
@task_invoker.first_run = false
end
# post-process collected mock list
@mocks.uniq!
# post-process collected sources list
@sources.uniq!
end
def refresh_deep_dependencies
@file_wrapper.rm_f(
@file_wrapper.directory_listing(
File.join( @configurator.project_test_dependencies_path, '*' + @configurator.extension_dependencies ) ) )
@test_invoker_helper.process_deep_dependencies(
@configurator.collection_all_tests + @configurator.collection_all_source )
end
end

View File

@ -1,32 +0,0 @@
class TestInvokerHelper
constructor :configurator, :task_invoker, :test_includes_extractor, :file_finder, :file_path_utils, :file_wrapper
def clean_results(results, options)
@file_wrapper.rm_f( results[:fail] )
@file_wrapper.rm_f( results[:pass] ) if (options[:force_run])
end
def process_deep_dependencies(files)
return if (not @configurator.project_use_deep_dependencies)
dependencies_list = @file_path_utils.form_test_dependencies_filelist( files ).uniq
if @configurator.project_generate_deep_dependencies
@task_invoker.invoke_test_dependencies_files( dependencies_list )
end
yield( dependencies_list ) if block_given?
end
def extract_sources(test)
sources = []
includes = @test_includes_extractor.lookup_includes_list(test)
includes.each { |include| sources << @file_finder.find_compilation_input_file(include, :ignore) }
return sources.compact
end
end

View File

@ -1,229 +0,0 @@
require 'ceedling/constants'
require 'benchmark'
class ShellExecutionException < RuntimeError
attr_reader :shell_result
def initialize(shell_result)
@shell_result = shell_result
end
end
class ToolExecutor
constructor :configurator, :tool_executor_helper, :streaminator, :system_wrapper
def setup
@tool_name = ''
@executable = ''
end
# build up a command line from yaml provided config
# @param extra_params is an array of parameters to append to executable
def build_command_line(tool_config, extra_params, *args)
@tool_name = tool_config[:name]
@executable = tool_config[:executable]
command = {}
# basic premise is to iterate top to bottom through arguments using '$' as
# a string replacement indicator to expand globals or inline yaml arrays
# into command line arguments via substitution strings
# executable must be quoted if it includes spaces (common on windows)
executable = @tool_executor_helper.osify_path_separators( expandify_element(@executable, *args) )
executable = "\"#{executable}\"" if executable.include?(' ')
command[:line] = [
executable,
extra_params.join(' ').strip,
build_arguments(tool_config[:arguments], *args),
].reject{|s| s.nil? || s.empty?}.join(' ').strip
command[:options] = {
:stderr_redirect => @tool_executor_helper.stderr_redirection(tool_config, @configurator.project_logging),
:background_exec => tool_config[:background_exec]
}
return command
end
# shell out, execute command, and return response
def exec(command, options={}, args=[])
options[:boom] = true if (options[:boom].nil?)
options[:stderr_redirect] = StdErrRedirect::NONE if (options[:stderr_redirect].nil?)
options[:background_exec] = BackgroundExec::NONE if (options[:background_exec].nil?)
# build command line
command_line = [
@tool_executor_helper.background_exec_cmdline_prepend( options ),
command.strip,
args,
@tool_executor_helper.stderr_redirect_cmdline_append( options ),
@tool_executor_helper.background_exec_cmdline_append( options ),
].flatten.compact.join(' ')
@streaminator.stderr_puts("Verbose: #{__method__.to_s}(): #{command_line}", Verbosity::DEBUG)
shell_result = {}
# depending on background exec option, we shell out differently
time = Benchmark.realtime do
if (options[:background_exec] != BackgroundExec::NONE)
shell_result = @system_wrapper.shell_system( command_line, options[:boom] )
else
shell_result = @system_wrapper.shell_backticks( command_line, options[:boom] )
end
end
shell_result[:time] = time
#scrub the string for illegal output
unless shell_result[:output].nil?
shell_result[:output] = shell_result[:output].scrub if "".respond_to?(:scrub)
shell_result[:output].gsub!(/\033\[\d\dm/,'')
end
@tool_executor_helper.print_happy_results( command_line, shell_result, options[:boom] )
@tool_executor_helper.print_error_results( command_line, shell_result, options[:boom] )
# go boom if exit code isn't 0 (but in some cases we don't want a non-0 exit code to raise)
raise ShellExecutionException.new(shell_result) if ((shell_result[:exit_code] != 0) and options[:boom])
return shell_result
end
private #############################
def build_arguments(config, *args)
build_string = ''
return nil if (config.nil?)
# iterate through each argument
# the yaml blob array needs to be flattened so that yaml substitution
# is handled correctly, since it creates a nested array when an anchor is
# dereferenced
config.flatten.each do |element|
argument = ''
case(element)
# if we find a simple string then look for string replacement operators
# and expand with the parameters in this method's argument list
when String then argument = expandify_element(element, *args)
# if we find a hash, then we grab the key as a substitution string and expand the
# hash's value(s) within that substitution string
when Hash then argument = dehashify_argument_elements(element)
end
build_string.concat("#{argument} ") if (argument.length > 0)
end
build_string.strip!
return build_string if (build_string.length > 0)
return nil
end
# handle simple text string argument & argument array string replacement operators
def expandify_element(element, *args)
match = //
to_process = nil
args_index = 0
# handle ${#} input replacement
if (element =~ TOOL_EXECUTOR_ARGUMENT_REPLACEMENT_PATTERN)
args_index = ($2.to_i - 1)
if (args.nil? or args[args_index].nil?)
@streaminator.stderr_puts("ERROR: Tool '#{@tool_name}' expected valid argument data to accompany replacement operator #{$1}.", Verbosity::ERRORS)
raise
end
match = /#{Regexp.escape($1)}/
to_process = args[args_index]
end
# simple string argument: replace escaped '\$' and strip
element.sub!(/\\\$/, '$')
element.strip!
# handle inline ruby execution
if (element =~ RUBY_EVAL_REPLACEMENT_PATTERN)
element.replace(eval($1))
end
build_string = ''
# handle array or anything else passed into method to be expanded in place of replacement operators
case (to_process)
when Array then to_process.each {|value| build_string.concat( "#{element.sub(match, value.to_s)} " ) } if (to_process.size > 0)
else build_string.concat( element.sub(match, to_process.to_s) )
end
# handle inline ruby string substitution
if (build_string =~ RUBY_STRING_REPLACEMENT_PATTERN)
build_string.replace(@system_wrapper.module_eval(build_string))
end
return build_string.strip
end
# handle argument hash: keys are substitution strings, values are data to be expanded within substitution strings
def dehashify_argument_elements(hash)
build_string = ''
elements = []
# grab the substitution string (hash key)
substitution = hash.keys[0].to_s
# grab the string(s) to squirt into the substitution string (hash value)
expand = hash[hash.keys[0]]
if (expand.nil?)
@streaminator.stderr_puts("ERROR: Tool '#{@tool_name}' could not expand nil elements for substitution string '#{substitution}'.", Verbosity::ERRORS)
raise
end
# array-ify expansion input if only a single string
expansion = ((expand.class == String) ? [expand] : expand)
expansion.each do |item|
# code eval substitution
if (item =~ RUBY_EVAL_REPLACEMENT_PATTERN)
elements << eval($1)
# string eval substitution
elsif (item =~ RUBY_STRING_REPLACEMENT_PATTERN)
elements << @system_wrapper.module_eval(item)
# global constants
elsif (@system_wrapper.constants_include?(item))
const = Object.const_get(item)
if (const.nil?)
@streaminator.stderr_puts("ERROR: Tool '#{@tool_name}' found constant '#{item}' to be nil.", Verbosity::ERRORS)
raise
else
elements << const
end
elsif (item.class == Array)
elements << item
elsif (item.class == String)
@streaminator.stderr_puts("ERROR: Tool '#{@tool_name}' cannot expand nonexistent value '#{item}' for substitution string '#{substitution}'.", Verbosity::ERRORS)
raise
else
@streaminator.stderr_puts("ERROR: Tool '#{@tool_name}' cannot expand value having type '#{item.class}' for substitution string '#{substitution}'.", Verbosity::ERRORS)
raise
end
end
# expand elements (whether string or array) into substitution string & replace escaped '\$'
elements.flatten!
elements.each do |element|
build_string.concat( substitution.sub(/([^\\]*)\$/, "\\1#{element}") ) # don't replace escaped '\$' but allow us to replace just a lonesome '$'
build_string.gsub!(/\\\$/, '$')
build_string.concat(' ')
end
return build_string.strip
end
end

View File

@ -1,164 +0,0 @@
require 'ceedling/constants' # for Verbosity enumeration & $stderr redirect enumeration
##
# Helper functions for the tool executor
class ToolExecutorHelper
constructor :streaminator, :system_utils, :system_wrapper
##
# Returns the stderr redirection based on the config and logging.
# ==== Attributes
#
# * _tool_config_: A hash containing config information.
# * _logging_: A boolean representing if logging is enabled or not.
#
def stderr_redirection(tool_config, logging)
# if there's no logging enabled, return :stderr_redirect unmodified
return tool_config[:stderr_redirect] if (not logging)
# if there is logging enabled but the redirect is a custom value (not enum), return the custom string
return tool_config[:stderr_redirect] if (tool_config[:stderr_redirect].class == String)
# if logging is enabled but there's no custom string, return the AUTO enumeration so $stderr goes into the log
return StdErrRedirect::AUTO
end
##
# Returns the background execution prepend based on the config.
# ==== Attributes
#
# * _tool_config_: A hash containing config information.
#
def background_exec_cmdline_prepend(tool_config)
return nil if (tool_config.nil? || tool_config[:background_exec].nil?)
config_exec = tool_config[:background_exec]
if ((config_exec == BackgroundExec::AUTO) and (@system_wrapper.windows?))
return 'start'
end
if (config_exec == BackgroundExec::WIN)
return 'start'
end
return nil
end
##
# Modifies an executables path based on platform.
# ==== Attributes
#
# * _executable_: The executable's path.
#
def osify_path_separators(executable)
return executable.gsub(/\//, '\\') if (@system_wrapper.windows?)
return executable
end
##
# Returns the stderr redirect append based on the config.
# ==== Attributes
#
# * _tool_config_: A hash containing config information.
#
def stderr_redirect_cmdline_append(tool_config)
return nil if (tool_config.nil? || tool_config[:stderr_redirect].nil?)
config_redirect = tool_config[:stderr_redirect]
redirect = StdErrRedirect::NONE
if (config_redirect == StdErrRedirect::AUTO)
if (@system_wrapper.windows?)
redirect = StdErrRedirect::WIN
elsif (@system_utils.tcsh_shell?)
redirect = StdErrRedirect::TCSH
else
redirect = StdErrRedirect::UNIX
end
end
case redirect
# we may need more complicated processing after some learning with various environments
when StdErrRedirect::NONE then nil
when StdErrRedirect::WIN then '2>&1'
when StdErrRedirect::UNIX then '2>&1'
when StdErrRedirect::TCSH then '|&'
else redirect.to_s
end
end
##
# Returns the background execution append based on the config.
# ==== Attributes
#
# * _tool_config_: A hash containing config information.
#
def background_exec_cmdline_append(tool_config)
return nil if (tool_config.nil? || tool_config[:background_exec].nil?)
config_exec = tool_config[:background_exec]
# if :auto & windows, then we already prepended 'start' and should append nothing
return nil if ((config_exec == BackgroundExec::AUTO) and (@system_wrapper.windows?))
# if :auto & not windows, then we append standard '&'
return '&' if ((config_exec == BackgroundExec::AUTO) and (not @system_wrapper.windows?))
# if explicitly Unix, then append '&'
return '&' if (config_exec == BackgroundExec::UNIX)
# * _command_str_: A hash containing config information.
# all other cases, including :none, :win, & anything unrecognized, append nothing
return nil
end
##
# Outputs success results if command succeeded and we have verbosity cranked up.
# ==== Attributes
#
# * _command_str_: The command ran.
# * _shell_results_: The outputs of the command including exit code and
# output.
# * _boom_: A boolean representing if a non zero result is erroneous.
#
def print_happy_results(command_str, shell_result, boom=true)
if ((shell_result[:exit_code] == 0) or ((shell_result[:exit_code] != 0) and not boom))
output = "> Shell executed command:\n"
output += "'#{command_str}'\n"
output += "> Produced output:\n" if (not shell_result[:output].empty?)
output += "#{shell_result[:output].strip}\n" if (not shell_result[:output].empty?)
output += "> And exited with status: [#{shell_result[:exit_code]}].\n" if (shell_result[:exit_code] != 0)
output += "\n"
@streaminator.stdout_puts(output, Verbosity::OBNOXIOUS)
end
end
##
# Outputs failures results if command failed and we have verbosity set to minimum error level.
# ==== Attributes
#
# * _command_str_: The command ran.
# * _shell_results_: The outputs of the command including exit code and
# output.
# * _boom_: A boolean representing if a non zero result is erroneous.
#
def print_error_results(command_str, shell_result, boom=true)
if ((shell_result[:exit_code] != 0) and boom)
output = "ERROR: Shell command failed.\n"
output += "> Shell executed command:\n"
output += "'#{command_str}'\n"
output += "> Produced output:\n" if (not shell_result[:output].empty?)
output += "#{shell_result[:output].strip}\n" if (not shell_result[:output].empty?)
output += "> And exited with status: [#{shell_result[:exit_code]}].\n" if (shell_result[:exit_code] != nil)
output += "> And then likely crashed.\n" if (shell_result[:exit_code] == nil)
output += "\n"
@streaminator.stderr_puts(output, Verbosity::ERRORS)
end
end
end

View File

@ -1,10 +0,0 @@
class Verbosinator
constructor :configurator
def should_output?(level)
return (level <= @configurator.project_verbosity)
end
end

View File

@ -1,54 +0,0 @@
# @private
module Ceedling
module Version
{ "UNITY" => File.join("unity","src","unity.h"),
"CMOCK" => File.join("cmock","src","cmock.h"),
"CEXCEPTION" => File.join("c_exception","lib","CException.h")
}.each_pair do |name, path|
# Check for local or global version of vendor directory in order to look up versions
path1 = File.expand_path( File.join("..","..","vendor",path) )
path2 = File.expand_path( File.join(File.dirname(__FILE__),"..","..","vendor",path) )
filename = if (File.exists?(path1))
path1
elsif (File.exists?(path2))
path2
elsif File.exists?(CEEDLING_VENDOR)
path3 = File.expand_path( File.join(CEEDLING_VENDOR,path) )
if (File.exists?(path3))
path3
else
basepath = File.join( CEEDLING_VENDOR, path.split(/\\\//)[0], 'release')
begin
[ @ceedling[:file_wrapper].read( File.join(base_path, 'release', 'version.info') ).strip,
@ceedling[:file_wrapper].read( File.join(base_path, 'release', 'build.info') ).strip ].join('.')
rescue
"#{name}"
end
end
else
module_eval("#{name} = 'unknown'")
continue
end
# Actually look up the versions
a = [0,0,0]
begin
File.readlines(filename).each do |line|
["VERSION_MAJOR", "VERSION_MINOR", "VERSION_BUILD"].each_with_index do |field, i|
m = line.match(/#{name}_#{field}\s+(\d+)/)
a[i] = m[1] unless (m.nil?)
end
end
rescue
abort("Can't collect data for vendor component: \"#{filename}\" . \nPlease check your setup.")
end
# splat it to return the final value
eval("#{name} = '#{a.join(".")}'")
end
GEM = "0.31.1"
CEEDLING = GEM
end
end

View File

@ -1,17 +0,0 @@
require 'yaml'
require 'erb'
class YamlWrapper
def load(filepath)
return YAML.load(ERB.new(File.read(filepath)).result)
end
def dump(filepath, structure)
File.open(filepath, 'w') do |output|
YAML.dump(structure, output)
end
end
end

View File

@ -1,22 +0,0 @@
ceedling-beep
=============
This is a simple plugin that just beeps at the end of a build and/or test sequence. Are you getting too distracted surfing
the internet, chatting with coworkers, or swordfighting while it's building or testing? The friendly beep will let you know
it's time to pay attention again.
This plugin has very few configuration options. At this time it can beep on completion of a task and/or on an error condition.
For each of these, you can configure the method that it should beep.
```
:tools:
:beep_on_done: :bell
:beep_on_error: :bell
```
Each of these have the following options:
- :bell - this option uses the ASCII bell character out stdout
- :speaker_test - this uses the linux speaker-test command if installed
Very likely, we'll be adding to this list if people find this to be useful.

View File

@ -1,39 +0,0 @@
require 'ceedling/plugin'
require 'ceedling/constants'
class Beep < Plugin
attr_reader :config
def setup
@config = {
:on_done => ((defined? TOOLS_BEEP_ON_DONE) ? TOOLS_BEEP_ON_DONE : :bell ),
:on_error => ((defined? TOOLS_BEEP_ON_ERROR) ? TOOLS_BEEP_ON_ERROR : :bell ),
}
end
def post_build
beep @config[:on_done]
end
def post_error
beep @config[:on_error]
end
private
def beep(method = :none)
case method
when :bell
if (SystemWrapper.windows?)
puts "echo '\007'"
else
puts "echo -ne '\007'"
end
when :speaker_test
`speaker-test -t sine -f 1000 -l 1`
else
#do nothing with illegal or :none
end
end
end

View File

@ -1,76 +0,0 @@
ceedling-bullseye
=================
# Plugin Overview
Plugin for integrating Bullseye code coverage tool into Ceedling projects.
This plugin requires a working license to Bullseye code coverage tools. The tools
must be within the path or the path should be added to the environment in the
`project.yml file`.
## Configuration
The bullseye plugin supports configuration options via your `project.yml` provided
by Ceedling. The following is a typical configuration example:
```
:bullseye:
:auto_license: TRUE
:plugins:
:bullseye_lib_path: []
:paths:
:bullseye_toolchain_include: []
:tools:
:bullseye_instrumentation:
:executable: covc
:arguments:
- '--file $': ENVIRONMENT_COVFILE
- -q
- ${1}
:bullseye_compiler:
:executable: gcc
:arguments:
- -g
- -I"$": COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR
- -I"$": COLLECTION_PATHS_BULLSEYE_TOOLCHAIN_INCLUDE
- -D$: COLLECTION_DEFINES_TEST_AND_VENDOR
- -DBULLSEYE_COMPILER
- -c "${1}"
- -o "${2}"
:bullseye_linker:
:executable: gcc
:arguments:
- ${1}
- -o ${2}
- -L$: PLUGINS_BULLSEYE_LIB_PATH
- -lcov
:bullseye_fixture:
:executable: ${1}
:bullseye_report_covsrc:
:executable: covsrc
:arguments:
- '--file $': ENVIRONMENT_COVFILE
- -q
- -w140
:bullseye_report_covfn:
:executable: covfn
:stderr_redirect: :auto
:arguments:
- '--file $': ENVIRONMENT_COVFILE
- --width 120
- --no-source
- '"${1}"'
:bullseye_browser:
:executable: CoverageBrowser
:background_exec: :auto
:optional: TRUE
:arguments:
- '"$"': ENVIRONMENT_COVFILE
```
## Example Usage
```sh
ceedling bullseye:all utils:bullseye
```

View File

@ -1,14 +0,0 @@
% function_string = hash[:coverage][:functions].to_s
% branch_string = hash[:coverage][:branches].to_s
% format_string = "%#{[function_string.length, branch_string.length].max}i"
<%=@ceedling[:plugin_reportinator].generate_banner("#{hash[:header]}: CODE COVERAGE SUMMARY")%>
% if (!hash[:coverage][:functions].nil?)
FUNCTIONS: <%=sprintf(format_string, hash[:coverage][:functions])%>%
% else
FUNCTIONS: none
% end
% if (!hash[:coverage][:branches].nil?)
BRANCHES: <%=sprintf(format_string, hash[:coverage][:branches])%>%
% else
BRANCHES: none
% end

View File

@ -1,173 +0,0 @@
directory(BULLSEYE_BUILD_OUTPUT_PATH)
directory(BULLSEYE_RESULTS_PATH)
directory(BULLSEYE_ARTIFACTS_PATH)
directory(BULLSEYE_DEPENDENCIES_PATH)
CLEAN.include(File.join(BULLSEYE_BUILD_OUTPUT_PATH, '*'))
CLEAN.include(File.join(BULLSEYE_RESULTS_PATH, '*'))
CLEAN.include(File.join(BULLSEYE_DEPENDENCIES_PATH, '*'))
CLOBBER.include(File.join(BULLSEYE_BUILD_PATH, '**/*'))
PLUGINS_BULLSEYE_LIB_PATH = 'C:\\tools\\BullseyeCoverage\\lib' if not defined?(PLUGINS_BULLSEYE_LIB_PATH)
rule(/#{BULLSEYE_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_compilation_input_file(task_name)
end
]) do |object|
if File.basename(object.source) =~ /^(#{PROJECT_TEST_FILE_PREFIX}|#{CMOCK_MOCK_PREFIX}|#{BULLSEYE_IGNORE_SOURCES.join('|')})/i
@ceedling[:generator].generate_object_file(
TOOLS_BULLSEYE_COMPILER,
OPERATION_COMPILE_SYM,
BULLSEYE_SYM,
object.source,
object.name,
@ceedling[:file_path_utils].form_test_build_list_filepath(object.name)
)
else
@ceedling[BULLSEYE_SYM].generate_coverage_object_file(object.source, object.name)
end
end
rule(/#{BULLSEYE_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_EXECUTABLE}$/) do |bin_file|
lib_args = @ceedling[:test_invoker].convert_libraries_to_arguments()
lib_paths = @ceedling[:test_invoker].get_library_paths_to_arguments()
@ceedling[:generator].generate_executable_file(
TOOLS_BULLSEYE_LINKER,
BULLSEYE_SYM,
bin_file.prerequisites,
bin_file.name,
@ceedling[:file_path_utils].form_test_build_map_filepath(bin_file.name),
lib_args,
lib_paths
)
end
rule(/#{BULLSEYE_RESULTS_PATH}\/#{'.+\\'+EXTENSION_TESTPASS}$/ => [
proc do |task_name|
@ceedling[:file_path_utils].form_test_executable_filepath(task_name)
end
]) do |test_result|
@ceedling[:generator].generate_test_results(TOOLS_BULLSEYE_FIXTURE, BULLSEYE_SYM, test_result.source, test_result.name)
end
rule(/#{BULLSEYE_DEPENDENCIES_PATH}\/#{'.+\\'+EXTENSION_DEPENDENCIES}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_compilation_input_file(task_name)
end
]) do |dep|
@ceedling[:generator].generate_dependencies_file(
TOOLS_TEST_DEPENDENCIES_GENERATOR,
BULLSEYE_SYM,
dep.source,
File.join(BULLSEYE_BUILD_OUTPUT_PATH, File.basename(dep.source).ext(EXTENSION_OBJECT) ),
dep.name
)
end
task :directories => [BULLSEYE_BUILD_OUTPUT_PATH, BULLSEYE_RESULTS_PATH, BULLSEYE_DEPENDENCIES_PATH, BULLSEYE_ARTIFACTS_PATH]
namespace BULLSEYE_SYM do
task source_coverage: COLLECTION_ALL_SOURCE.pathmap("#{BULLSEYE_BUILD_OUTPUT_PATH}/%n#{@ceedling[:configurator].extension_object}")
desc 'Run code coverage for all tests'
task all: [:test_deps] do
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
@ceedling[BULLSEYE_SYM].enableBullseye(true)
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, BULLSEYE_SYM)
@ceedling[:configurator].restore_config
end
desc "Run single test w/ coverage ([*] real test or source file name, no path)."
task :* do
message = "\nOops! '#{BULLSEYE_ROOT_NAME}:*' isn't a real task. " +
"Use a real test or source file name (no path) in place of the wildcard.\n" +
"Example: rake #{BULLSEYE_ROOT_NAME}:foo.c\n\n"
@ceedling[:streaminator].stdout_puts( message )
end
desc 'Run tests by matching regular expression pattern.'
task :pattern, [:regex] => [:test_deps] do |_t, args|
matches = []
COLLECTION_ALL_TESTS.each do |test|
matches << test if test =~ /#{args.regex}/
end
if !matches.empty?
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
@ceedling[BULLSEYE_SYM].enableBullseye(true)
@ceedling[:test_invoker].setup_and_invoke(matches, BULLSEYE_SYM, force_run: false)
@ceedling[:configurator].restore_config
else
@ceedling[:streaminator].stdout_puts("\nFound no tests matching pattern /#{args.regex}/.")
end
end
desc 'Run tests whose test path contains [dir] or [dir] substring.'
task :path, [:dir] => [:test_deps] do |_t, args|
matches = []
COLLECTION_ALL_TESTS.each do |test|
matches << test if File.dirname(test).include?(args.dir.tr('\\', '/'))
end
if !matches.empty?
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
@ceedling[BULLSEYE_SYM].enableBullseye(true)
@ceedling[:test_invoker].setup_and_invoke(matches, BULLSEYE_SYM, force_run: false)
@ceedling[:configurator].restore_config
else
@ceedling[:streaminator].stdout_puts("\nFound no tests including the given path or path component.")
end
end
desc 'Run code coverage for changed files'
task delta: [:test_deps] do
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
@ceedling[BULLSEYE_SYM].enableBullseye(true)
@ceedling[:test_invoker].setup_and_invoke(COLLECTION_ALL_TESTS, BULLSEYE_SYM, {:force_run => false})
@ceedling[:configurator].restore_config
end
# use a rule to increase efficiency for large projects
# bullseye test tasks by regex
rule(/^#{BULLSEYE_TASK_ROOT}\S+$/ => [
proc do |task_name|
test = task_name.sub(/#{BULLSEYE_TASK_ROOT}/, '')
test = "#{PROJECT_TEST_FILE_PREFIX}#{test}" unless test.start_with?(PROJECT_TEST_FILE_PREFIX)
@ceedling[:file_finder].find_test_from_file_path(test)
end
]) do |test|
@ceedling[:rake_wrapper][:test_deps].invoke
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
@ceedling[BULLSEYE_SYM].enableBullseye(true)
@ceedling[:test_invoker].setup_and_invoke([test.source], BULLSEYE_SYM)
@ceedling[:configurator].restore_config
end
end
if PROJECT_USE_DEEP_DEPENDENCIES
namespace REFRESH_SYM do
task BULLSEYE_SYM do
@ceedling[:configurator].replace_flattened_config(@ceedling[BULLSEYE_SYM].config)
@ceedling[BULLSEYE_SYM].enableBullseye(true)
@ceedling[:test_invoker].refresh_deep_dependencies
@ceedling[:configurator].restore_config
end
end
end
namespace UTILS_SYM do
desc "Open Bullseye code coverage browser"
task BULLSEYE_SYM do
command = @ceedling[:tool_executor].build_command_line(TOOLS_BULLSEYE_BROWSER, [])
@ceedling[:tool_executor].exec(command[:line], command[:options])
end
end

View File

@ -1,57 +0,0 @@
---
:bullseye:
:auto_license: TRUE
:plugins:
:bullseye_lib_path: []
:paths:
:bullseye_toolchain_include: []
:tools:
:bullseye_instrumentation:
:executable: covc
:arguments:
- '--file $': ENVIRONMENT_COVFILE
- -q
- ${1}
:bullseye_compiler:
:executable: gcc
:arguments:
- -g
- -I"$": COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR
- -I"$": COLLECTION_PATHS_BULLSEYE_TOOLCHAIN_INCLUDE
- -D$: COLLECTION_DEFINES_TEST_AND_VENDOR
- -DBULLSEYE_COMPILER
- -c "${1}"
- -o "${2}"
:bullseye_linker:
:executable: gcc
:arguments:
- ${1}
- -o ${2}
- -L$: PLUGINS_BULLSEYE_LIB_PATH
- -lcov
:bullseye_fixture:
:executable: ${1}
:bullseye_report_covsrc:
:executable: covsrc
:arguments:
- '--file $': ENVIRONMENT_COVFILE
- -q
- -w140
:bullseye_report_covfn:
:executable: covfn
:stderr_redirect: :auto
:arguments:
- '--file $': ENVIRONMENT_COVFILE
- --width 120
- --no-source
- '"${1}"'
:bullseye_browser:
:executable: CoverageBrowser
:background_exec: :auto
:optional: TRUE
:arguments:
- '"$"': ENVIRONMENT_COVFILE
...

View File

@ -1,194 +0,0 @@
require 'ceedling/plugin'
require 'ceedling/constants'
BULLSEYE_ROOT_NAME = 'bullseye'
BULLSEYE_TASK_ROOT = BULLSEYE_ROOT_NAME + ':'
BULLSEYE_SYM = BULLSEYE_ROOT_NAME.to_sym
BULLSEYE_BUILD_PATH = "#{PROJECT_BUILD_ROOT}/#{BULLSEYE_ROOT_NAME}"
BULLSEYE_BUILD_OUTPUT_PATH = "#{BULLSEYE_BUILD_PATH}/out"
BULLSEYE_RESULTS_PATH = "#{BULLSEYE_BUILD_PATH}/results"
BULLSEYE_DEPENDENCIES_PATH = "#{BULLSEYE_BUILD_PATH}/dependencies"
BULLSEYE_ARTIFACTS_PATH = "#{PROJECT_BUILD_ARTIFACTS_ROOT}/#{BULLSEYE_ROOT_NAME}"
BULLSEYE_IGNORE_SOURCES = ['unity', 'cmock', 'cexception']
class Bullseye < Plugin
def setup
@result_list = []
@environment = [ {:covfile => File.join( BULLSEYE_ARTIFACTS_PATH, 'test.cov' )} ]
@plugin_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
@coverage_template_all = @ceedling[:file_wrapper].read(File.join(@plugin_root, 'assets/template.erb'))
end
def config
{
:project_test_build_output_path => BULLSEYE_BUILD_OUTPUT_PATH,
:project_test_results_path => BULLSEYE_RESULTS_PATH,
:project_test_dependencies_path => BULLSEYE_DEPENDENCIES_PATH,
:defines_test => DEFINES_TEST + ['CODE_COVERAGE'],
:collection_defines_test_and_vendor => COLLECTION_DEFINES_TEST_AND_VENDOR + ['CODE_COVERAGE']
}
end
def generate_coverage_object_file(source, object)
arg_hash = {:tool => TOOLS_BULLSEYE_INSTRUMENTATION, :context => BULLSEYE_SYM, :source => source, :object => object}
@ceedling[:plugin_manager].pre_compile_execute(arg_hash)
@ceedling[:streaminator].stdout_puts("Compiling #{File.basename(source)} with coverage...")
compile_command =
@ceedling[:tool_executor].build_command_line(
TOOLS_BULLSEYE_COMPILER,
@ceedling[:flaginator].flag_down( OPERATION_COMPILE_SYM, BULLSEYE_SYM, source ),
source,
object,
@ceedling[:file_path_utils].form_test_build_list_filepath( object ) )
coverage_command = @ceedling[:tool_executor].build_command_line(TOOLS_BULLSEYE_INSTRUMENTATION, [], compile_command[:line] )
shell_result = @ceedling[:tool_executor].exec( coverage_command[:line], coverage_command[:options] )
arg_hash[:shell_result] = shell_result
@ceedling[:plugin_manager].post_compile_execute(arg_hash)
end
def post_test_fixture_execute(arg_hash)
result_file = arg_hash[:result_file]
if ((result_file =~ /#{BULLSEYE_RESULTS_PATH}/) and (not @result_list.include?(result_file)))
@result_list << arg_hash[:result_file]
end
end
def post_build
return if (not @ceedling[:task_invoker].invoked?(/^#{BULLSEYE_TASK_ROOT}/))
# test results
results = @ceedling[:plugin_reportinator].assemble_test_results(@result_list)
hash = {
:header => BULLSEYE_ROOT_NAME.upcase,
:results => results
}
@ceedling[:plugin_reportinator].run_test_results_report(hash) do
message = ''
message = 'Unit test failures.' if (results[:counts][:failed] > 0)
message
end
# coverage results
return if (verify_coverage_file() == false)
if (@ceedling[:task_invoker].invoked?(/^#{BULLSEYE_TASK_ROOT}(all|delta)/))
command = @ceedling[:tool_executor].build_command_line(TOOLS_BULLSEYE_REPORT_COVSRC, [])
shell_result = @ceedling[:tool_executor].exec(command[:line], command[:options])
report_coverage_results_all(shell_result[:output])
else
report_per_function_coverage_results(@ceedling[:test_invoker].sources)
end
end
def summary
return if (verify_coverage_file() == false)
result_list = @ceedling[:file_path_utils].form_pass_results_filelist( BULLSEYE_RESULTS_PATH, COLLECTION_ALL_TESTS )
# test results
# get test results for only those tests in our configuration and of those only tests with results on disk
hash = {
:header => BULLSEYE_ROOT_NAME.upcase,
:results => @ceedling[:plugin_reportinator].assemble_test_results(result_list, {:boom => false})
}
@ceedling[:plugin_reportinator].run_test_results_report(hash)
# coverage results
command = @ceedling[:tool_executor].build_command_line(TOOLS_BULLSEYE_REPORT_COVSRC)
shell_result = @ceedling[:tool_executor].exec(command[:line], command[:options])
report_coverage_results_all(shell_result[:output])
end
def enableBullseye(enable)
if BULLSEYE_AUTO_LICENSE
if (enable)
args = ['push', 'on']
@ceedling[:streaminator].stdout_puts("Enabling Bullseye")
else
args = ['pop']
@ceedling[:streaminator].stdout_puts("Reverting Bullseye to previous state")
end
args.each do |arg|
command = @ceedling[:tool_executor].build_command_line(TOOLS_BULLSEYE_BUILD_ENABLE_DISABLE, [], arg)
shell_result = @ceedling[:tool_executor].exec(command[:line], command[:options])
end
end
end
private ###################################
def report_coverage_results_all(coverage)
results = {
:header => BULLSEYE_ROOT_NAME.upcase,
:coverage => {
:functions => nil,
:branches => nil
}
}
if (coverage =~ /^Total.*?=\s+([0-9]+)\%/)
results[:coverage][:functions] = $1.to_i
end
if (coverage =~ /^Total.*=\s+([0-9]+)\%\s*$/)
results[:coverage][:branches] = $1.to_i
end
@ceedling[:plugin_reportinator].run_report($stdout, @coverage_template_all, results)
end
def report_per_function_coverage_results(sources)
banner = @ceedling[:plugin_reportinator].generate_banner( "#{BULLSEYE_ROOT_NAME.upcase}: CODE COVERAGE SUMMARY" )
@ceedling[:streaminator].stdout_puts "\n" + banner
coverage_sources = sources.clone
coverage_sources.delete_if {|item| item =~ /#{CMOCK_MOCK_PREFIX}.+#{EXTENSION_SOURCE}$/}
coverage_sources.delete_if {|item| item =~ /#{BULLSEYE_IGNORE_SOURCES.join('|')}#{EXTENSION_SOURCE}$/}
coverage_sources.each do |source|
command = @ceedling[:tool_executor].build_command_line(TOOLS_BULLSEYE_REPORT_COVFN, [], source)
shell_results = @ceedling[:tool_executor].exec(command[:line], command[:options])
coverage_results = shell_results[:output].deep_clone
coverage_results.sub!(/.*\n.*\n/,'') # Remove the Bullseye tool banner
if (coverage_results =~ /warning cov814: report is empty/)
coverage_results = "WARNING: #{source} contains no coverage data!\n\n"
@ceedling[:streaminator].stdout_puts(coverage_results, Verbosity::COMPLAIN)
else
coverage_results += "\n"
@ceedling[:streaminator].stdout_puts(coverage_results)
end
end
end
def verify_coverage_file
exist = @ceedling[:file_wrapper].exist?( ENVIRONMENT_COVFILE )
if (!exist)
banner = @ceedling[:plugin_reportinator].generate_banner( "#{BULLSEYE_ROOT_NAME.upcase}: CODE COVERAGE SUMMARY" )
@ceedling[:streaminator].stdout_puts "\n" + banner + "\nNo coverage file.\n\n"
end
return exist
end
end
# end blocks always executed following rake run
END {
# cache our input configurations to use in comparison upon next execution
if (@ceedling[:task_invoker].invoked?(/^#{BULLSEYE_TASK_ROOT}/))
@ceedling[:cacheinator].cache_test_config( @ceedling[:setupinator].config_hash )
@ceedling[BULLSEYE_SYM].enableBullseye(false)
end
}

View File

@ -1,20 +0,0 @@
ceedling-colour-report
======================
## Overview
The colour_report replaces the normal ceedling "pretty" output with
a colorized variant, in order to make the results easier to read from
a standard command line. This is very useful on developer machines, but
can occasionally cause problems with parsing on CI servers.
## Setup
Enable the plugin in your project.yml by adding `colour_report`
to the list of enabled plugins.
``` YAML
:plugins:
:enabled:
- colour_report
```

View File

@ -1,16 +0,0 @@
require 'ceedling/plugin'
require 'ceedling/streaminator'
require 'ceedling/constants'
class ColourReport < Plugin
def setup
@ceedling[:stream_wrapper].stdout_override(&ColourReport.method(:colour_stdout))
end
def self.colour_stdout(string)
require 'colour_reporter.rb'
report string
end
end

View File

@ -1,53 +0,0 @@
ceedling-command-hooks
======================
Plugin for easily calling command line tools at various points in the build process
Define any of these sections in :tools: to provide additional hooks to be called on demand:
```
:pre_mock_generate
:post_mock_generate
:pre_runner_generate
:post_runner_generate
:pre_compile_execute
:post_compile_execute
:pre_link_execute
:post_link_execute
:pre_test_fixture_execute
:pre_test
:post_test
:pre_release
:post_release
:pre_build
:post_build
```
Each of these tools can support an :executable string and an :arguments list, like so:
```
:tools:
:post_link_execute:
:executable: objcopy.exe
:arguments:
- ${1} #This is replaced with the executable name
- output.srec
- --strip-all
```
You may also specify an array of executables to be called in a particular place, like so:
```
:tools:
:post_test:
- :executable: echo
:arguments: "${1} was glorious!"
- :executable: echo
:arguments:
- it kinda made me cry a little.
- you?
```
Please note that it varies which arguments are being parsed down to the
hooks. For now see `command_hooks.rb` to figure out which suits you best.
Happy Tweaking!

View File

@ -1,91 +0,0 @@
require 'ceedling/plugin'
require 'ceedling/constants'
class CommandHooks < Plugin
attr_reader :config
def setup
@config = {
:pre_mock_generate => ((defined? TOOLS_PRE_MOCK_GENERATE) ? TOOLS_PRE_MOCK_GENERATE : nil ),
:post_mock_generate => ((defined? TOOLS_POST_MOCK_GENERATE) ? TOOLS_POST_MOCK_GENERATE : nil ),
:pre_runner_generate => ((defined? TOOLS_PRE_RUNNER_GENERATE) ? TOOLS_PRE_RUNNER_GENERATE : nil ),
:post_runner_generate => ((defined? TOOLS_POST_RUNNER_GENERATE) ? TOOLS_POST_RUNNER_GENERATE : nil ),
:pre_compile_execute => ((defined? TOOLS_PRE_COMPILE_EXECUTE) ? TOOLS_PRE_COMPILE_EXECUTE : nil ),
:post_compile_execute => ((defined? TOOLS_POST_COMPILE_EXECUTE) ? TOOLS_POST_COMPILE_EXECUTE : nil ),
:pre_link_execute => ((defined? TOOLS_PRE_LINK_EXECUTE) ? TOOLS_PRE_LINK_EXECUTE : nil ),
:post_link_execute => ((defined? TOOLS_POST_LINK_EXECUTE) ? TOOLS_POST_LINK_EXECUTE : nil ),
:pre_test_fixture_execute => ((defined? TOOLS_PRE_TEST_FIXTURE_EXECUTE) ? TOOLS_PRE_TEST_FIXTURE_EXECUTE : nil ),
:post_test_fixture_execute => ((defined? TOOLS_POST_TEST_FIXTURE_EXECUTE) ? TOOLS_POST_TEST_FIXTURE_EXECUTE : nil ),
:pre_test => ((defined? TOOLS_PRE_TEST) ? TOOLS_PRE_TEST : nil ),
:post_test => ((defined? TOOLS_POST_TEST) ? TOOLS_POST_TEST : nil ),
:pre_release => ((defined? TOOLS_PRE_RELEASE) ? TOOLS_PRE_RELEASE : nil ),
:post_release => ((defined? TOOLS_POST_RELEASE) ? TOOLS_POST_RELEASE : nil ),
:pre_build => ((defined? TOOLS_PRE_BUILD) ? TOOLS_PRE_BUILD : nil ),
:post_build => ((defined? TOOLS_POST_BUILD) ? TOOLS_POST_BUILD : nil ),
:post_error => ((defined? TOOLS_POST_ERROR) ? TOOLS_POST_ERROR : nil ),
}
@plugin_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
end
def pre_mock_generate(arg_hash); run_hook(:pre_mock_generate, arg_hash[:header_file] ); end
def post_mock_generate(arg_hash); run_hook(:post_mock_generate, arg_hash[:header_file] ); end
def pre_runner_generate(arg_hash); run_hook(:pre_runner_generate, arg_hash[:source ] ); end
def post_runner_generate(arg_hash); run_hook(:post_runner_generate, arg_hash[:runner_file] ); end
def pre_compile_execute(arg_hash); run_hook(:pre_compile_execute, arg_hash[:source_file] ); end
def post_compile_execute(arg_hash); run_hook(:post_compile_execute, arg_hash[:object_file] ); end
def pre_link_execute(arg_hash); run_hook(:pre_link_execute, arg_hash[:executable] ); end
def post_link_execute(arg_hash); run_hook(:post_link_execute, arg_hash[:executable] ); end
def pre_test_fixture_execute(arg_hash); run_hook(:pre_test_fixture_execute, arg_hash[:executable] ); end
def post_test_fixture_execute(arg_hash); run_hook(:post_test_fixture_execute, arg_hash[:executable] ); end
def pre_test(test); run_hook(:pre_test, test ); end
def post_test(test); run_hook(:post_test, test ); end
def pre_release; run_hook(:pre_release ); end
def post_release; run_hook(:post_release ); end
def pre_build; run_hook(:pre_build ); end
def post_build; run_hook(:post_build ); end
def post_error; run_hook(:post_error ); end
private
##
# Run a hook if its available.
#
# :args:
# - hook: Name of the hook to run
# - name: Name of file (default: "")
#
# :return:
# shell_result.
#
def run_hook_step(hook, name="")
if (hook[:executable])
# Handle argument replacemant ({$1}), and get commandline
cmd = @ceedling[:tool_executor].build_command_line( hook, [], name )
shell_result = @ceedling[:tool_executor].exec(cmd[:line], cmd[:options])
end
end
##
# Run a hook if its available.
#
# If __which_hook__ is an array, run each of them sequentially.
#
# :args:
# - which_hook: Name of the hook to run
# - name: Name of file
#
def run_hook(which_hook, name="")
if (@config[which_hook])
@ceedling[:streaminator].stdout_puts("Running Hook #{which_hook}...", Verbosity::NORMAL)
if (@config[which_hook].is_a? Array)
@config[which_hook].each do |hook|
run_hook_step(hook, name)
end
elsif (@config[which_hook].is_a? Hash)
run_hook_step( @config[which_hook], name )
else
@ceedling[:streaminator].stdout_puts("Hook #{which_hook} was poorly formed", Verbosity::COMPLAINT)
end
end
end
end

View File

@ -1,29 +0,0 @@
compile_commands_json
=====================
## Overview
Syntax highlighting and code completion are hard. Historically each editor or IDE has implemented their own and then competed amongst themselves to offer the best experience for developers. Often developers would still to an IDE that felt cumbersome and slow just because it had the best syntax highlighting on the market. If doing it for one language is hard (and it is) imagine doing it for dozens of them. Imagine a full stack developer who has to work with CSS, HTML, JavaScript and some Ruby - they need excellent support in all those languages which just made things even harder.
In June of 2016, Microsoft with Red Hat and Codenvy got together to create a standard called the Language Server Protocol (LSP). The idea was simple, by standardising on one protocol, all the IDEs and editors out there would only have to support LSP, and not have custom plugins for each language. In turn, the backend code that actually does the highlighting can be written once and used by any IDE that supports LSP. Many editors already support it such as Sublime Text, vim and emacs. This means that if you're using a crufty old IDE or worse, you're using a shiny new editor without code completion, then this could be just the upgrade you're looking for!
For C and C++ projects, many people use the `clangd` backend. So that it can do things like "go to definition", `clangd` needs to know how to build the project so that it can figure out all the pieces to the puzzle. There are manual tools such as `bear` which can be run with `gcc` or `clang` to extract this information it has a big limitation in that if run with `ceedling release` you won't get any auto completion for Unity and you'll also get error messages reported by your IDE because of what it perceives as missing headers. If you do the same with `ceedling test` now you get Unity but you might miss things that are only seen in the release build.
This plugin resolves that issue. As it is run by Ceedling, it has access to all the build information it needs to create the perfect `compile_commands.json`. Once enabled, this plugin will generate that file and place it in `./build/artifacts/compile_commands.json`. `clangd` will search your project for this file, but it is easier to symlink it into the root directory (for example `ln -s ./build/artifacts/compile_commands.json`.
For more information on LSP and to find out if your editor supports it, check out https://langserver.org/
## Setup
Enable the plugin in your project.yml by adding `compile_commands_json` to the list
of enabled plugins.
``` YAML
:plugins:
:enabled:
- compile_commands_json
```
## Configuration
There is no additional configuration necessary to run this plugin.

View File

@ -1,35 +0,0 @@
require 'ceedling/plugin'
require 'ceedling/constants'
require 'json'
class CompileCommandsJson < Plugin
def setup
@fullpath = File.join(PROJECT_BUILD_ARTIFACTS_ROOT, "compile_commands.json")
@database = if (File.exists?(@fullpath))
JSON.parse( File.read(@fullpath) )
else
[]
end
end
def post_compile_execute(arg_hash)
# Create the new Entry
value = {
"directory" => Dir.pwd,
"command" => arg_hash[:shell_command],
"file" => arg_hash[:source]
}
# Determine if we're updating an existing file description or adding a new one
index = @database.index {|h| h["file"] == arg_hash[:source]}
if index
@database[index] = value
else
@database << value
end
# Update the Actual compile_commands.json file
File.open(@fullpath,'w') {|f| f << JSON.pretty_generate(@database)}
end
end

View File

@ -1,254 +0,0 @@
ceedling-dependencies
=====================
Plugin for supporting release dependencies. It's rare for an embedded project to
be built completely free of other libraries and modules. Some of these may be
standard internal libraries. Some of these may be 3rd party libraries. In either
case, they become part of the project's ecosystem.
This plugin is intended to make that relationship easier. It allows you to specify
a source for dependencies. If required, it will automatically grab the appropriate
version of that dependency.
Most 3rd party libraries have a method of building already in place. While we'd
love to convert the world to a place where everything downloads with a test suite
in Ceedling, that's not likely to happen anytime soon. Until then, this plugin
will allow the developer to specify what calls Ceedling should make to oversee
the build process of those third party utilities. Are they using Make? CMake? A
custom series of scripts that only a mad scientist could possibly understand? No
matter. Ceedling has you covered. Just specify what should be called, and Ceedling
will make it happen whenever it notices that the output artifacts are missing.
Output artifacts? Sure! Things like static and dynamic libraries, or folders
containing header files that might want to be included by your release project.
So how does all this magic work?
First, you need to add the `:dependencies` plugin to your list. Then, we'll add a new
section called :dependencies. There, you can list as many dependencies as you desire. Each
has a series of fields which help Ceedling to understand your needs. Many of them are
optional. If you don't need that feature, just don't include it! In the end, it'll look
something like this:
```
:dependencies:
:libraries:
- :name: WolfSSL
:source_path: third_party/wolfssl/source
:build_path: third_party/wolfssl/build
:artifact_path: third_party/wolfssl/install
:fetch:
:method: :zip
:source: \\shared_drive\third_party_libs\wolfssl\wolfssl-4.2.0.zip
:environment:
- CFLAGS+=-DWOLFSSL_DTLS_ALLOW_FUTURE
:build:
- "autoreconf -i"
- "./configure --enable-tls13 --enable-singlethreaded"
- make
- make install
:artifacts:
:static_libraries:
- lib/wolfssl.a
:dynamic_libraries:
- lib/wolfssl.so
:includes:
- include/**
```
Let's take a deeper look at each of these features.
The Starting Dash & Name
------------------------
Yes, that opening dash tells the dependencies plugin that the rest of these fields
belong to our first dependency. If we had a second dependency, we'd have another
dash, lined up with the first, and followed by all the fields indented again.
By convention, we use the `:name` field as the first field for each tool. Ceedling
honestly doesn't care which order the fields are given... but as humans, it makes
it easier for us to see the name of each dependency with starting dash.
The name field is only used to print progress while we're running Ceedling. You may
call the name of the field whatever you wish.
Working Folders
---------------
The `:source_path` field allows us to specify where the source code for each of our
dependencies is stored. If fetching the dependency from elsewhere, it will be fetched
to this location. All commands to build this dependency will be executed from
this location (override this by specifying a `:build_path`). Finally, the output
artifacts will be referenced to this location (override this by specifying a `:artifact_path`)
If unspecified, the `:source_path` will be `dependencies\dep_name` where `dep_name`
is the name specified in `:name` above (with special characters removed). It's best,
though, if you specify exactly where you want your dependencies to live.
If the dependency is directly included in your project (you've specified `:none` as the
`:method` for fetching), then `:source_path` should be where your Ceedling can find the
source for your dependency in you repo.
All artifacts are relative to the `:artifact_path` (which defaults to be the same as
`:source_path`)
Fetching Dependencies
---------------------
The `:dependencies` plugin supports the ability to automatically fetch your dependencies
for you... using some common methods of fetching source. This section contains only a
couple of fields:
- `:method` -- This is the method that this dependency is fetched.
- `:none` -- This tells Ceedling that the code is already included in the project.
- `:zip` -- This tells Ceedling that we want to unpack a zip file to our source path.
- `:git` -- This tells Ceedling that we want to clone a git repo to our source path.
- `:svn` -- This tells Ceedling that we want to checkout a subversion repo to our source path.
- `:custom` -- This tells Ceedling that we want to use a custom command or commands to fetch the code.
- `:source` -- This is the path or url to fetch code when using the zip or git method.
- `:tag`/`:branch` -- This is the specific tag or branch that you wish to retrieve (git only. optional).
- `:hash` -- This is the specific SHA1 hash you want to fetch (git only. optional, requires a deep clone).
- `:revision` -- This is the specific revision you want to fetch (svn only. optional).
- `:executable` -- This is a list of commands to execute when using the `:custom` method
Environment Variables
---------------------
Many build systems support customization through environment variables. By specifying
an array of environment variables, Ceedling will customize the shell environment before
calling the build process.
Environment variables may be specified in three ways. Let's look at one of each:
```
:environment:
- ARCHITECTURE=ARM9
- CFLAGS+=-DADD_AWESOMENESS
- CFLAGS-=-DWASTE
```
In the first example, you see the most straightforward method. The environment variable
`ARCHITECTURE` is set to the value `ARM9`. That's it. Simple.
The next two options modify an existing symbol. In the first one, we use `+=`, which tells
Ceedling to add the define `ADD_AWESOMENESS` to the environment variable `CFLAGS`. The second
tells Ceedling to remove the define `WASTE` from the same environment variable.
There are a couple of things to note here.
First, when adding to a variable, Ceedling has no way of knowing
what delimiter you are expecting. In this example you can see we manually added some whitespace.
If we had been modifying `PATH` instead, we might have had to use a `:` on a unux or `;` on
Windows.
Second, removing an argument will have no effect on the argument if that argument isn't found
precisely. It's case sensitive and the entire string must match. If symbol doesn't already exist,
it WILL after executing this command... however it will be assigned to nothing.
Building Dependencies
---------------------
The heart of the `:dependencies` plugin is the ability for you, the developer, to specify the
build process for each of your dependencies. You will need to have any required tools installed
before using this feature.
The steps are specified as an array of strings. Ceedling will execute those steps in the order
specified, moving from step to step unless an error is encountered. By the end of the process,
the artifacts should have been created by your process... otherwise an error will be produced.
Artifacts
---------
These are the outputs of the build process. There are there types of artifacts. Any dependency
may have none or some of these. Calling out these files tells Ceedling that they are important.
Your dependency's build process may produce many other files... but these are the files that
Ceedling understands it needs to act on.
### `static_libraries`
Specifying one or more static libraries will tell Ceedling where it should find static libraries
output by your build process. These libraries are automatically added to the list of dependencies
and will be linked with the rest of your code to produce the final release.
If any of these libraries don't exist, Ceedling will trigger your build process in order for it
to produce them.
### `dynamic_libraries`
Specifying one or more dynamic libraries will tell Ceedling where it should find dynamic libraries
output by your build process. These libraries are automatically copied to the same folder as your
final release binary.
If any of these libraries don't exist, Ceedling will trigger your build process in order for it
to produce them.
### `includes`
Often when libraries are built, the same process will output a collection of includes so that
your release code knows how to interact with that library. It's the public API for that library.
By specifying the directories that will contain these includes (don't specify the files themselves,
Ceedling only needs the directories), Ceedling is able to automatically add these to its internal
include list. This allows these files to be used while building your release code, as well we making
them mockable during unit testing.
### `source`
It's possible that your external dependency will just produce additional C files as its output.
In this case, Ceedling is able to automatically add these to its internal source list. This allows
these files to be used while building your release code.
Tasks
-----
Once configured correctly, the `:dependencies` plugin should integrate seamlessly into your
workflow and you shouldn't have to think about it. In the real world, that doesn't always happen.
Here are a number of tasks that are added or modified by this plugin.
### `ceedling dependencies:clean`
This can be issued in order to completely remove the dependency from its source path. On the
next build, it will be refetched and rebuilt from scratch. This can also apply to a particular
dependency. For example, by specifying `dependencies:clean:DepName`.
### `ceedling dependencies:fetch`
This can be issued in order to fetch each dependency from its origin. This will have no effect on
dependencies that don't have fetch instructions specified. This can also apply to a particular
dependency. For example, by specifying `dependencies:fetch:DepName`.
### `ceedling dependencies:make`
This will force the dependencies to all build. This should happen automatically when a release
has been triggered... but if you're just getting your dependency configured at this moment, you
may want to just use this feature instead. A single dependency can also be built by specifying its
name, like `dependencies:make:MyTunaBoat`.
### `ceedling dependencies:deploy`
This will force any dynamic libraries produced by your dependencies to be copied to your release
build directory... just in case you clobbered them.
### `paths:include`
Maybe you want to verify that all the include paths are correct. If you query Ceedling with this
request, it will list all the header file paths that it's found, including those produced by
dependencies.
### `files:include`
Maybe you want to take that query further and actually get a list of ALL the header files
Ceedling has found, including those belonging to your dependencies.
Testing
=======
Hopefully all your dependencies are fully tested... but we can't always depend on that.
In the event that they are tested with Ceedling, you'll probably want to consider using
the `:subprojects` plugin instead of this one. The purpose of this plugin is to pull in
third party code for release... and to provide a mockable interface for Ceedling to use
during its tests of other modules.
If that's what you're after... you've found the right plugin!
Happy Testing!

View File

@ -1,5 +0,0 @@
---
:dependencies:
:libraries: []
...

View File

@ -1,147 +0,0 @@
DEPENDENCIES_LIBRARIES.each do |deplib|
# Look up the name of this dependency library
deplib_name = @ceedling[DEPENDENCIES_SYM].get_name(deplib)
# Make sure the required working directories exists
# (don't worry about the subdirectories. That's the job of the dep's build tool)
paths = @ceedling[DEPENDENCIES_SYM].get_working_paths(deplib)
paths.each {|path| directory(path) }
task :directories => paths
all_deps = @ceedling[DEPENDENCIES_SYM].get_static_libraries_for_dependency(deplib) +
@ceedling[DEPENDENCIES_SYM].get_dynamic_libraries_for_dependency(deplib) +
@ceedling[DEPENDENCIES_SYM].get_include_directories_for_dependency(deplib) +
@ceedling[DEPENDENCIES_SYM].get_source_files_for_dependency(deplib)
# Add a rule for building the actual libraries from dependency list
(@ceedling[DEPENDENCIES_SYM].get_static_libraries_for_dependency(deplib) +
@ceedling[DEPENDENCIES_SYM].get_dynamic_libraries_for_dependency(deplib)
).each do |libpath|
file libpath do |filetask|
path = filetask.name
# We double-check that it doesn't already exist, because this process sometimes
# produces multiple files, but they may have already been flagged as invoked
unless (File.exists?(path))
# Set Environment Variables, Fetch, and Build
@ceedling[DEPENDENCIES_SYM].set_env_if_required(path)
@ceedling[DEPENDENCIES_SYM].fetch_if_required(path)
@ceedling[DEPENDENCIES_SYM].build_if_required(path)
end
end
end
# Add a rule for building the source and includes from dependency list
(@ceedling[DEPENDENCIES_SYM].get_include_directories_for_dependency(deplib) +
@ceedling[DEPENDENCIES_SYM].get_source_files_for_dependency(deplib)
).each do |libpath|
task libpath do |filetask|
path = filetask.name
unless (File.file?(path) || File.directory?(path))
# Set Environment Variables, Fetch, and Build
@ceedling[DEPENDENCIES_SYM].set_env_if_required(path)
@ceedling[DEPENDENCIES_SYM].fetch_if_required(path)
@ceedling[DEPENDENCIES_SYM].build_if_required(path)
end
end
end
# Give ourselves a way to trigger individual dependencies
namespace DEPENDENCIES_SYM do
namespace :deploy do
# Add task to directly just build this dependency
task(deplib_name => @ceedling[DEPENDENCIES_SYM].get_dynamic_libraries_for_dependency(deplib)) do |t,args|
@ceedling[DEPENDENCIES_SYM].deploy_if_required(deplib_name)
end
end
namespace :make do
# Add task to directly just build this dependency
task(deplib_name => all_deps)
end
namespace :clean do
# Add task to directly clobber this dependency
task(deplib_name) do
@ceedling[DEPENDENCIES_SYM].clean_if_required(deplib_name)
end
end
namespace :fetch do
# Add task to directly clobber this dependency
task(deplib_name) do
@ceedling[DEPENDENCIES_SYM].fetch_if_required(deplib_name)
end
end
end
# Add source files to our list of things to build during release
source_files = @ceedling[DEPENDENCIES_SYM].get_source_files_for_dependency(deplib)
task PROJECT_RELEASE_BUILD_TARGET => source_files
# Finally, add the static libraries to our RELEASE build dependency list
static_libs = @ceedling[DEPENDENCIES_SYM].get_static_libraries_for_dependency(deplib)
task RELEASE_SYM => static_libs
# Add the dynamic libraries to our RELEASE task dependency list so that they will be copied automatically
dynamic_libs = @ceedling[DEPENDENCIES_SYM].get_dynamic_libraries_for_dependency(deplib)
task RELEASE_SYM => dynamic_libs
# Add the include dirs / files to our list of dependencies for release
headers = @ceedling[DEPENDENCIES_SYM].get_include_directories_for_dependency(deplib)
task RELEASE_SYM => headers
# Paths to Libraries need to be Added to the Lib Path List
all_libs = static_libs + dynamic_libs
PATHS_LIBRARIES ||= []
all_libs.each {|lib| PATHS_LIBRARIES << File.dirname(lib) }
PATHS_LIBRARIES.uniq!
PATHS_LIBRARIES.reject!{|s| s.empty?}
# Libraries Need to be Added to the Library List
LIBRARIES_SYSTEM ||= []
all_libs.each {|lib| LIBRARIES_SYSTEM << File.basename(lib,'.*').sub(/^lib/,'') }
LIBRARIES_SYSTEM.uniq!
LIBRARIES_SYSTEM.reject!{|s| s.empty?}
end
# Add any artifact:include or :source folders to our release & test includes paths so linking and mocking work.
@ceedling[DEPENDENCIES_SYM].add_headers_and_sources()
# Add tasks for building or cleaning ALL dependencies
namespace DEPENDENCIES_SYM do
desc "Deploy missing dependencies."
task :deploy => DEPENDENCIES_LIBRARIES.map{|deplib| "#{DEPENDENCIES_SYM}:deploy:#{@ceedling[DEPENDENCIES_SYM].get_name(deplib)}"}
desc "Build any missing dependencies."
task :make => DEPENDENCIES_LIBRARIES.map{|deplib| "#{DEPENDENCIES_SYM}:make:#{@ceedling[DEPENDENCIES_SYM].get_name(deplib)}"}
desc "Clean all dependencies."
task :clean => DEPENDENCIES_LIBRARIES.map{|deplib| "#{DEPENDENCIES_SYM}:clean:#{@ceedling[DEPENDENCIES_SYM].get_name(deplib)}"}
desc "Fetch all dependencies."
task :fetch => DEPENDENCIES_LIBRARIES.map{|deplib| "#{DEPENDENCIES_SYM}:fetch:#{@ceedling[DEPENDENCIES_SYM].get_name(deplib)}"}
end
namespace :files do
desc "List all collected dependency libraries."
task :dependencies do
puts "dependency files:"
deps = []
DEPENDENCIES_LIBRARIES.each do |deplib|
deps << @ceedling[DEPENDENCIES_SYM].get_static_libraries_for_dependency(deplib)
deps << @ceedling[DEPENDENCIES_SYM].get_dynamic_libraries_for_dependency(deplib)
end
deps.flatten!
deps.sort.each {|dep| puts " - #{dep}"}
puts "file count: #{deps.size}"
end
end
# Make sure that we build dependencies before attempting to tackle any of the unit tests
Rake::Task[:test_deps].enhance ['dependencies:make']

View File

@ -1,237 +0,0 @@
require 'ceedling/plugin'
require 'ceedling/constants'
DEPENDENCIES_ROOT_NAME = 'dependencies'
DEPENDENCIES_TASK_ROOT = DEPENDENCIES_ROOT_NAME + ':'
DEPENDENCIES_SYM = DEPENDENCIES_ROOT_NAME.to_sym
class Dependencies < Plugin
def setup
@plugin_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
# Set up a fast way to look up dependencies by name or static lib path
@dependencies = {}
@dynamic_libraries = []
DEPENDENCIES_LIBRARIES.each do |deplib|
@dependencies[ deplib[:name] ] = deplib.clone
all_deps = get_static_libraries_for_dependency(deplib) +
get_dynamic_libraries_for_dependency(deplib) +
get_include_directories_for_dependency(deplib) +
get_source_files_for_dependency(deplib)
all_deps.each do |key|
@dependencies[key] = @dependencies[ deplib[:name] ]
end
@dynamic_libraries += get_dynamic_libraries_for_dependency(deplib)
end
end
def config
updates = {
:collection_paths_include => COLLECTION_PATHS_INCLUDE,
:collection_all_headers => COLLECTION_ALL_HEADERS,
}
@ceedling[DEPENDENCIES_SYM].get_include_directories_for_dependency(deplib).each do |incpath|
updates[:collection_paths_include] << incpath
Dir[ File.join(incpath, "*#{EXTENSION_HEADER}") ].each do |f|
updates[:collection_all_headers] << f
end
end
return updates
end
def get_name(deplib)
raise "Each dependency must have a name!" if deplib[:name].nil?
return deplib[:name].gsub(/\W*/,'')
end
def get_source_path(deplib)
return deplib[:source_path] || File.join('dependencies', get_name(deplib))
end
def get_build_path(deplib)
return deplib[:build_path] || deplib[:source_path] || File.join('dependencies', get_name(deplib))
end
def get_artifact_path(deplib)
return deplib[:artifact_path] || deplib[:source_path] || File.join('dependencies', get_name(deplib))
end
def get_working_paths(deplib)
paths = [deplib[:source_path], deplib[:build_path], deplib[:artifact_paths]].compact.uniq
paths = [ File.join('dependencies', get_name(deplib)) ] if (paths.empty?)
return paths
end
def get_static_libraries_for_dependency(deplib)
(deplib[:artifacts][:static_libraries] || []).map {|path| File.join(get_artifact_path(deplib), path)}
end
def get_dynamic_libraries_for_dependency(deplib)
(deplib[:artifacts][:dynamic_libraries] || []).map {|path| File.join(get_artifact_path(deplib), path)}
end
def get_source_files_for_dependency(deplib)
(deplib[:artifacts][:source] || []).map {|path| File.join(get_artifact_path(deplib), path)}
end
def get_include_directories_for_dependency(deplib)
paths = (deplib[:artifacts][:includes] || []).map {|path| File.join(get_artifact_path(deplib), path)}
@ceedling[:file_system_utils].collect_paths(paths)
end
def set_env_if_required(lib_path)
blob = @dependencies[lib_path]
raise "Could not find dependency '#{lib_path}'" if blob.nil?
return if (blob[:environment].nil?)
return if (blob[:environment].empty?)
blob[:environment].each do |e|
m = e.match(/^(\w+)\s*(\+?\-?=)\s*(.*)$/)
unless m.nil?
case m[2]
when "+="
ENV[m[1]] = (ENV[m[1]] || "") + m[3]
when "-="
ENV[m[1]] = (ENV[m[1]] || "").gsub(m[3],'')
else
ENV[m[1]] = m[3]
end
end
end
end
def fetch_if_required(lib_path)
blob = @dependencies[lib_path]
raise "Could not find dependency '#{lib_path}'" if blob.nil?
return if (blob[:fetch].nil?)
return if (blob[:fetch][:method].nil?)
return if (directory(blob[:source_path]) && !Dir.empty?(blob[:source_path]))
steps = case blob[:fetch][:method]
when :none
return
when :zip
[ "gzip -d #{blob[:fetch][:source]}" ]
when :git
branch = blob[:fetch][:tag] || blob[:fetch][:branch] || ''
branch = ("-b " + branch) unless branch.empty?
unless blob[:fetch][:hash].nil?
# Do a deep clone to ensure the commit we want is available
retval = [ "git clone #{branch} #{blob[:fetch][:source]} ." ]
# Checkout the specified commit
retval << "git checkout #{blob[:fetch][:hash]}"
else
# Do a thin clone
retval = [ "git clone #{branch} --depth 1 #{blob[:fetch][:source]} ." ]
end
when :svn
revision = blob[:fetch][:revision] || ''
revision = ("--revision " + branch) unless branch.empty?
retval = [ "svn checkout #{revision} #{blob[:fetch][:source]} ." ]
retval
when :custom
blob[:fetch][:executable]
else
raise "Unknown fetch method '#{blob[:fetch][:method].to_s}' for dependency '#{blob[:name]}'"
end
# Perform the actual fetching
@ceedling[:streaminator].stdout_puts("Fetching dependency #{blob[:name]}...", Verbosity::NORMAL)
Dir.chdir(get_source_path(blob)) do
steps.each do |step|
@ceedling[:tool_executor].exec( step )
end
end
end
def build_if_required(lib_path)
blob = @dependencies[lib_path]
raise "Could not find dependency '#{lib_path}'" if blob.nil?
# We don't clean anything unless we know how to fetch a new copy
if (blob[:build].nil? || blob[:build].empty?)
@ceedling[:streaminator].stdout_puts("Nothing to build for dependency #{blob[:name]}", Verbosity::NORMAL)
return
end
# Perform the build
@ceedling[:streaminator].stdout_puts("Building dependency #{blob[:name]}...", Verbosity::NORMAL)
Dir.chdir(get_build_path(blob)) do
blob[:build].each do |step|
@ceedling[:tool_executor].exec( step )
end
end
end
def clean_if_required(lib_path)
blob = @dependencies[lib_path]
raise "Could not find dependency '#{lib_path}'" if blob.nil?
# We don't clean anything unless we know how to fetch a new copy
if (blob[:fetch].nil? || blob[:fetch][:method].nil? || (blob[:fetch][:method] == :none))
@ceedling[:streaminator].stdout_puts("Nothing to clean for dependency #{blob[:name]}", Verbosity::NORMAL)
return
end
# Perform the actual Cleaning
@ceedling[:streaminator].stdout_puts("Cleaning dependency #{blob[:name]}...", Verbosity::NORMAL)
get_working_paths(blob).each do |path|
FileUtils.rm_rf(path) if File.directory?(path)
end
end
def deploy_if_required(lib_path)
blob = @dependencies[lib_path]
raise "Could not find dependency '#{lib_path}'" if blob.nil?
# We don't need to deploy anything if there isn't anything to deploy
if (blob[:artifacts].nil? || blob[:artifacts][:dynamic_libraries].nil? || blob[:artifacts][:dynamic_libraries].empty?)
@ceedling[:streaminator].stdout_puts("Nothing to deploy for dependency #{blob[:name]}", Verbosity::NORMAL)
return
end
# Perform the actual Deploying
@ceedling[:streaminator].stdout_puts("Deploying dependency #{blob[:name]}...", Verbosity::NORMAL)
FileUtils.cp( lib_path, File.dirname(PROJECT_RELEASE_BUILD_TARGET) )
end
def add_headers_and_sources()
# Search for header file paths and files to add to our collections
DEPENDENCIES_LIBRARIES.each do |deplib|
get_include_directories_for_dependency(deplib).each do |header|
cfg = @ceedling[:configurator].project_config_hash
cfg[:collection_paths_include] << header
cfg[:collection_paths_source_and_include] << header
cfg[:collection_paths_test_support_source_include] << header
cfg[:collection_paths_test_support_source_include_vendor] << header
cfg[:collection_paths_release_toolchain_include] << header
Dir[ File.join(header, "*#{EXTENSION_HEADER}") ].each do |f|
cfg[:collection_all_headers] << f
end
end
get_source_files_for_dependency(deplib).each do |source|
cfg = @ceedling[:configurator].project_config_hash
cfg[:collection_paths_source_and_include] << source
cfg[:collection_paths_test_support_source_include] << source
cfg[:collection_paths_test_support_source_include_vendor] << source
cfg[:collection_paths_release_toolchain_include] << source
Dir[ File.join(source, "*#{EXTENSION_SOURCE}") ].each do |f|
cfg[:collection_all_source] << f
end
end
end
# Make all these updated files findable by Ceedling
@ceedling[:file_finder].prepare_search_sources()
end
end
# end blocks always executed following rake run
END {
}

View File

@ -1,250 +0,0 @@
# A Fake Function Framework Plug-in for Ceedling
This is a plug-in for [Ceedling](https://github.com/ThrowTheSwitch/Ceedling) to use the [Fake Function Framework](https://github.com/meekrosoft/fff) for mocking instead of CMock.
Using fff provides less strict mocking than CMock, and allows for more loosely-coupled tests.
And, when tests fail -- since you get the actual line number of the failure -- it's a lot easier to figure out what went wrong.
## Installing the plug-in
To use the plugin you need to 1) get the contents of this repo and 2) configure your project to use it.
### Get the source
The easiest way to get the source is to just clone this repo into the Ceedling plugin folder for your existing Ceedling project.
(Don't have a Ceedling project already? [Here are instructions to create one.](http://www.electronvector.com/blog/try-embedded-test-driven-development-right-now-with-ceedling))
From within `<your-project>/vendor/ceedling/plugins`, run:
`git clone https://github.com/ElectronVector/fake_function_framework.git`
This will create a new folder named `fake_function_framework` in the plugins folder.
### Enable the plug-in.
The plug-in is enabled from within your project.yml file.
In the `:plugins` configuration, add `fake_function_framework` to the list of enabled plugins:
```yaml
:plugins:
:load_paths:
- vendor/ceedling/plugins
:enabled:
- stdout_pretty_tests_report
- module_generator
- fake_function_framework
```
*Note that you could put the plugin source in some other location.
In that case you'd need to add a new path the `:load_paths`.*
## How to use it
You use fff with Ceedling the same way you used to use CMock.
Modules can still be generated with the default module generator: `rake module:create[my_module]`.
If you want to "mock" `some_module.h` in your tests, just `#include "mock_some_module.h"`.
This creates a fake function for each of the functions defined in `some_module.h`.
The name of each fake is the original function name with an appended `_fake`.
For example, if we're generating fakes for a stack module with `push` and `pop` functions, we would have the fakes `push_fake` and `pop_fake`.
These fakes are linked into our test executable so that any time our unit under test calls `push` or `pop` our fakes are called instead.
Each of these fakes is actually a structure containing information about how the function was called, and what it might return.
We can use Unity to inspect these fakes in our tests, and verify the interactions of our units.
There is also a global structure named `fff` which we can use to check the sequence of calls.
The fakes can also be configured to return particular values, so you can exercise the unit under test however you want.
The examples below explain how to use fff to test a variety of module interactions.
Each example uses fakes for a "display" module, created from a display.h file with `#include "mock_display.h"`. The `display.h` file must exist and must contain the prototypes for the functions to be faked.
### Test that a function was called once
```c
void
test_whenTheDeviceIsReset_thenTheStatusLedIsTurnedOff()
{
// When
event_deviceReset();
// Then
TEST_ASSERT_EQUAL(1, display_turnOffStatusLed_fake.call_count);
}
```
### Test that a function was NOT called
```c
void
test_whenThePowerReadingIsLessThan5_thenTheStatusLedIsNotTurnedOn(void)
{
// When
event_powerReadingUpdate(4);
// Then
TEST_ASSERT_EQUAL(0, display_turnOnStatusLed_fake.call_count);
}
```
## Test that a single function was called with the correct argument
```c
void
test_whenTheVolumeKnobIsMaxed_thenVolumeDisplayIsSetTo11(void)
{
// When
event_volumeKnobMaxed();
// Then
TEST_ASSERT_EQUAL(1, display_setVolume_fake.call_count);
TEST_ASSERT_EQUAL(11, display_setVolume_fake.arg0_val);
}
```
## Test that calls are made in a particular sequence
```c
void
test_whenTheModeSelectButtonIsPressed_thenTheDisplayModeIsCycled(void)
{
// When
event_modeSelectButtonPressed();
event_modeSelectButtonPressed();
event_modeSelectButtonPressed();
// Then
TEST_ASSERT_EQUAL_PTR((void*)display_setModeToMinimum, fff.call_history[0]);
TEST_ASSERT_EQUAL_PTR((void*)display_setModeToMaximum, fff.call_history[1]);
TEST_ASSERT_EQUAL_PTR((void*)display_setModeToAverage, fff.call_history[2]);
}
```
## Fake a return value from a function
```c
void
test_givenTheDisplayHasAnError_whenTheDeviceIsPoweredOn_thenTheDisplayIsPoweredDown(void)
{
// Given
display_isError_fake.return_val = true;
// When
event_devicePoweredOn();
// Then
TEST_ASSERT_EQUAL(1, display_powerDown_fake.call_count);
}
```
## Fake a function with a value returned by reference
```c
void
test_givenTheUserHasTypedSleep_whenItIsTimeToCheckTheKeyboard_theDisplayIsPoweredDown(void)
{
// Given
char mockedEntry[] = "sleep";
void return_mock_value(char * entry, int length)
{
if (length > strlen(mockedEntry))
{
strncpy(entry, mockedEntry, length);
}
}
display_getKeyboardEntry_fake.custom_fake = return_mock_value;
// When
event_keyboardCheckTimerExpired();
// Then
TEST_ASSERT_EQUAL(1, display_powerDown_fake.call_count);
}
```
## Fake a function with a function pointer parameter
```
void
test_givenNewDataIsAvailable_whenTheDisplayHasUpdated_thenTheEventIsComplete(void)
{
// A mock function for capturing the callback handler function pointer.
void(*registeredCallback)(void) = 0;
void mock_display_updateData(int data, void(*callback)(void))
{
//Save the callback function.
registeredCallback = callback;
}
display_updateData_fake.custom_fake = mock_display_updateData;
// Given
event_newDataAvailable(10);
// When
if (registeredCallback != 0)
{
registeredCallback();
}
// Then
TEST_ASSERT_EQUAL(true, eventProcessor_isLastEventComplete());
}
```
## Helper macros
For convenience, there are also some helper macros that create new Unity-style asserts:
- `TEST_ASSERT_CALLED(function)`: Asserts that a function was called once.
- `TEST_ASSERT_NOT_CALLED(function)`: Asserts that a function was never called.
- `TEST_ASSERT_CALLED_TIMES(times, function)`: Asserts that a function was called a particular number of times.
- `TEST_ASSERT_CALLED_IN_ORDER(order, function)`: Asserts that a function was called in a particular order.
Here's how you might use one of these instead of simply checking the call_count value:
```c
void
test_whenTheDeviceIsReset_thenTheStatusLedIsTurnedOff()
{
// When
event_deviceReset();
// Then
// This how to directly use fff...
TEST_ASSERT_EQUAL(1, display_turnOffStatusLed_fake.call_count);
// ...and this is how to use the helper macro.
TEST_ASSERT_CALLED(display_turnOffStatusLed);
}
```
## Test setup
All of the fake functions, and any fff global state are all reset automatically between each test.
## CMock configuration
Use still use some of the CMock configuration options for setting things like the mock prefix, and for including additional header files in the mock files.
```yaml
:cmock:
:mock_prefix: mock_
:includes:
-
:includes_h_pre_orig_header:
-
:includes_h_post_orig_header:
-
:includes_c_pre_header:
-
:includes_c_post_header:
```
## Running the tests
There are unit and integration tests for the plug-in itself.
These are run with the default `rake` task.
The integration test runs the tests for the example project in examples/fff_example.
For the integration tests to succeed, this repository must be placed in a Ceedling tree in the plugins folder.
## More examples
There is an example project in examples/fff_example.
It shows how to use the plug-in with some full-size examples.

Some files were not shown because too many files have changed in this diff Show More