Error in makefile

This commit is contained in:
Adam Procházka
2023-02-23 14:34:10 +01:00
parent 9d50a83a78
commit 276d443bb1
1281 changed files with 269818 additions and 54 deletions

View File

@ -0,0 +1,208 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#include "device/dcd.h"
#include "fuzz/fuzz_private.h"
#include <assert.h>
#include <cstdint>
#include <limits>
#define UNUSED(x) (void)(x)
//--------------------------------------------------------------------+
// State tracker
//--------------------------------------------------------------------+
struct State {
bool interrupts_enabled;
bool sof_enabled;
uint8_t address;
};
static State state = {false, 0, 0};
//--------------------------------------------------------------------+
// Controller API
// All no-ops as we are fuzzing.
//--------------------------------------------------------------------+
extern "C" {
void dcd_init(uint8_t rhport) {
UNUSED(rhport);
return;
}
void dcd_int_handler(uint8_t rhport) {
assert(_fuzz_data_provider.has_value());
if (!state.interrupts_enabled) {
return;
}
// Choose if we want to generate a signal based on the fuzzed data.
if (_fuzz_data_provider->ConsumeBool()) {
dcd_event_bus_signal(
rhport,
// Choose a random event based on the fuzz data.
(dcd_eventid_t)_fuzz_data_provider->ConsumeIntegralInRange<uint8_t>(
DCD_EVENT_INVALID + 1, DCD_EVENT_COUNT - 1),
// Identify trigger as either an interrupt or a syncrhonous call
// depending on fuzz data.
_fuzz_data_provider->ConsumeBool());
}
if (_fuzz_data_provider->ConsumeBool()) {
constexpr size_t kSetupFrameLength = 8;
std::vector<uint8_t> setup =
_fuzz_data_provider->ConsumeBytes<uint8_t>(kSetupFrameLength);
// Fuzz consumer may return less than requested. If this is the case
// we want to make sure that at least that length is allocated and available
// to the signal handler.
if (setup.size() != kSetupFrameLength) {
setup.resize(kSetupFrameLength);
}
dcd_event_setup_received(rhport, setup.data(),
// Identify trigger as either an interrupt or a
// syncrhonous call depending on fuzz data.
_fuzz_data_provider->ConsumeBool());
}
}
void dcd_int_enable(uint8_t rhport) {
state.interrupts_enabled = true;
UNUSED(rhport);
return;
}
void dcd_int_disable(uint8_t rhport) {
state.interrupts_enabled = false;
UNUSED(rhport);
return;
}
void dcd_set_address(uint8_t rhport, uint8_t dev_addr) {
UNUSED(rhport);
state.address = dev_addr;
// Respond with status.
dcd_edpt_xfer(rhport, tu_edpt_addr(0, TUSB_DIR_IN), NULL, 0);
return;
}
void dcd_remote_wakeup(uint8_t rhport) {
UNUSED(rhport);
return;
}
void dcd_connect(uint8_t rhport) {
UNUSED(rhport);
return;
}
void dcd_disconnect(uint8_t rhport) {
UNUSED(rhport);
return;
}
void dcd_sof_enable(uint8_t rhport, bool en) {
state.sof_enabled = en;
UNUSED(rhport);
return;
}
//--------------------------------------------------------------------+
// Endpoint API
//--------------------------------------------------------------------+
// Configure endpoint's registers according to descriptor
bool dcd_edpt_open(uint8_t rhport, tusb_desc_endpoint_t const *desc_ep) {
UNUSED(rhport);
UNUSED(desc_ep);
return _fuzz_data_provider->ConsumeBool();
}
// Close all non-control endpoints, cancel all pending transfers if any.
// Invoked when switching from a non-zero Configuration by SET_CONFIGURE
// therefore required for multiple configuration support.
void dcd_edpt_close_all(uint8_t rhport) {
UNUSED(rhport);
return;
}
// Close an endpoint.
// Since it is weak, caller must TU_ASSERT this function's existence before
// calling it.
void dcd_edpt_close(uint8_t rhport, uint8_t ep_addr) {
UNUSED(rhport);
UNUSED(ep_addr);
return;
}
// Submit a transfer, When complete dcd_event_xfer_complete() is invoked to
// notify the stack
bool dcd_edpt_xfer(uint8_t rhport, uint8_t ep_addr, uint8_t *buffer,
uint16_t total_bytes) {
UNUSED(rhport);
UNUSED(buffer);
UNUSED(total_bytes);
uint8_t const dir = tu_edpt_dir(ep_addr);
if (dir == TUSB_DIR_IN) {
std::vector<uint8_t> temp =
_fuzz_data_provider->ConsumeBytes<uint8_t>(total_bytes);
std::copy(temp.begin(), temp.end(), buffer);
}
// Ignore output data as it's not useful for fuzzing without a more
// complex fuzzed backend. But we need to make sure it's not
// optimised out.
volatile uint8_t *dont_optimise0 = buffer;
volatile uint16_t dont_optimise1 = total_bytes;
UNUSED(dont_optimise0);
UNUSED(dont_optimise1);
return _fuzz_data_provider->ConsumeBool();
}
/* TODO: implement a fuzzed version of this.
bool dcd_edpt_xfer_fifo(uint8_t rhport, uint8_t ep_addr, tu_fifo_t *ff,
uint16_t total_bytes) {}
*/
// Stall endpoint, any queuing transfer should be removed from endpoint
void dcd_edpt_stall(uint8_t rhport, uint8_t ep_addr) {
UNUSED(rhport);
UNUSED(ep_addr);
return;
}
// clear stall, data toggle is also reset to DATA0
// This API never calls with control endpoints, since it is auto cleared when
// receiving setup packet
void dcd_edpt_clear_stall(uint8_t rhport, uint8_t ep_addr) {
UNUSED(rhport);
UNUSED(ep_addr);
return;
}
}

View File

@ -0,0 +1,29 @@
cmake_minimum_required(VERSION 3.5)
include(${CMAKE_CURRENT_SOURCE_DIR}/../../../hw/bsp/family_support.cmake)
# gets PROJECT name for the example (e.g. <BOARD>-<DIR_NAME>)
family_get_project_name(PROJECT ${CMAKE_CURRENT_LIST_DIR})
project(${PROJECT})
# Checks this example is valid for the family and initializes the project
family_initialize_project(${PROJECT} ${CMAKE_CURRENT_LIST_DIR})
add_executable(${PROJECT})
# Example source
target_sources(${PROJECT} PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}/src/main.c
${CMAKE_CURRENT_SOURCE_DIR}/src/msc_disk.c
${CMAKE_CURRENT_SOURCE_DIR}/src/usb_descriptors.c
)
# Example include
target_include_directories(${PROJECT} PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}/src
)
# Configure compilation flags and libraries for the example... see the corresponding function
# in hw/bsp/FAMILY/family.cmake for details.
family_configure_device_example(${PROJECT})

View File

@ -0,0 +1,12 @@
include ../../../../tools/top.mk
include ../../make.mk
INC += \
src \
$(TOP)/hw \
# Example source
SRC_C += $(addprefix $(CURRENT_PATH)/, $(wildcard src/*.c))
SRC_CXX += $(addprefix $(CURRENT_PATH)/, $(wildcard src/*.cc))
include ../../rules.mk

View File

@ -0,0 +1,174 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#include <cassert>
#include <fuzzer/FuzzedDataProvider.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "class/cdc/cdc_device.h"
#include "fuzz/fuzz.h"
#include "tusb.h"
#include <cstdint>
#include <string>
#include <vector>
extern "C" {
#define FUZZ_ITERATIONS 500
//--------------------------------------------------------------------+
// MACRO CONSTANT TYPEDEF PROTYPES
//--------------------------------------------------------------------+
void cdc_task(FuzzedDataProvider *provider);
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) {
FuzzedDataProvider provider(Data, Size);
std::vector<uint8_t> callback_data = provider.ConsumeBytes<uint8_t>(
provider.ConsumeIntegralInRange<size_t>(0, Size));
fuzz_init(callback_data.data(), callback_data.size());
// init device stack on configured roothub port
tud_init(BOARD_TUD_RHPORT);
for (int i = 0; i < FUZZ_ITERATIONS; i++) {
if (provider.remaining_bytes() == 0) {
return 0;
}
tud_int_handler(provider.ConsumeIntegral<uint8_t>());
tud_task(); // tinyusb device task
cdc_task(&provider);
}
return 0;
}
//--------------------------------------------------------------------+
// USB CDC
//--------------------------------------------------------------------+
enum CdcApiFuncs {
kCdcNConnected,
kCdcNGetLineState,
kCdcNGetLineCoding,
kCdcNSetWantedChar,
kCdcNAvailable,
kCdcNRead,
kCdcNReadChar,
kCdcNReadFlush,
kCdcNPeek,
kCdcNWrite,
kCdcNWriteChar,
kCdcNWriteStr,
kCdcNWriteFlush,
kCdcNWriteAvailable,
kCdcNWriteClear,
// We don't need to fuzz tud_cdc_<not n>* as they are just wrappers
// calling with n==0.
kMaxValue,
};
void cdc_task(FuzzedDataProvider *provider) {
assert(provider != NULL);
const int kMaxBufferSize = 4096;
switch (provider->ConsumeEnum<CdcApiFuncs>()) {
case kCdcNConnected:
// TODO: Fuzz interface number
(void)tud_cdc_n_connected(0);
break;
case kCdcNGetLineState:
// TODO: Fuzz interface number
(void)tud_cdc_n_get_line_state(0);
break;
case kCdcNGetLineCoding: {
cdc_line_coding_t coding;
// TODO: Fuzz interface number
(void)tud_cdc_n_get_line_coding(0, &coding);
} break;
case kCdcNSetWantedChar:
// TODO: Fuzz interface number
(void)tud_cdc_n_set_wanted_char(0, provider->ConsumeIntegral<char>());
break;
case kCdcNAvailable:
// TODO: Fuzz interface number
(void)tud_cdc_n_available(0);
break;
case kCdcNRead: {
std::vector<uint8_t> buffer;
buffer.resize(provider->ConsumeIntegralInRange<size_t>(0, kMaxBufferSize));
// TODO: Fuzz interface number
(void)tud_cdc_n_read(0, buffer.data(), buffer.size());
break;
}
case kCdcNReadChar:
// TODO: Fuzz interface number
tud_cdc_n_read_char(0);
break;
case kCdcNReadFlush:
// TODO: Fuzz interface number
tud_cdc_n_read_flush(0);
break;
case kCdcNPeek: {
uint8_t peak = 0;
tud_cdc_n_peek(0, &peak);
break;
}
case kCdcNWrite: {
std::vector<uint8_t> buffer = provider->ConsumeBytes<uint8_t>(
provider->ConsumeIntegralInRange<size_t>(0, kMaxBufferSize));
// TODO: Fuzz interface number
(void)tud_cdc_n_write(0, buffer.data(), buffer.size());
} break;
case kCdcNWriteChar:
// TODO: Fuzz interface number
(void)tud_cdc_n_write_char(0, provider->ConsumeIntegral<char>());
break;
case kCdcNWriteStr: {
std::string str = provider->ConsumeRandomLengthString(kMaxBufferSize);
// TODO: Fuzz interface number
(void)tud_cdc_n_write_str(0, str.c_str());
break;
}
case kCdcNWriteFlush:
// TODO: Fuzz interface number
(void)tud_cdc_n_write_flush(0);
break;
case kCdcNWriteAvailable:
// TODO: Fuzz interface number
(void)tud_cdc_n_write_available(0);
break;
case kCdcNWriteClear:
// TODO: Fuzz interface number
(void)tud_cdc_n_write_clear(0);
break;
case kMaxValue:
// Noop.
break;
}
}
}

View File

@ -0,0 +1,114 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#ifndef _TUSB_CONFIG_H_
#define _TUSB_CONFIG_H_
#ifdef __cplusplus
extern "C" {
#endif
//--------------------------------------------------------------------+
// Board Specific Configuration
//--------------------------------------------------------------------+
// RHPort number used for device can be defined by board.mk, default to port 0
#ifndef BOARD_TUD_RHPORT
#define BOARD_TUD_RHPORT 0
#endif
// RHPort max operational speed can defined by board.mk
#ifndef BOARD_TUD_MAX_SPEED
#define BOARD_TUD_MAX_SPEED OPT_MODE_DEFAULT_SPEED
#endif
//--------------------------------------------------------------------
// Common Configuration
//--------------------------------------------------------------------
// defined by compiler flags for flexibility
#ifndef CFG_TUSB_MCU
#error CFG_TUSB_MCU must be defined
#endif
#ifndef CFG_TUSB_OS
#define CFG_TUSB_OS OPT_OS_NONE
#endif
#ifndef CFG_TUSB_DEBUG
#define CFG_TUSB_DEBUG 0
#endif
// Enable Device stack
#define CFG_TUD_ENABLED 1
// Default is max speed that hardware controller could support with on-chip PHY
#define CFG_TUD_MAX_SPEED BOARD_TUD_MAX_SPEED
/* USB DMA on some MCUs can only access a specific SRAM region with restriction on alignment.
* Tinyusb use follows macros to declare transferring memory so that they can be put
* into those specific section.
* e.g
* - CFG_TUSB_MEM SECTION : __attribute__ (( section(".usb_ram") ))
* - CFG_TUSB_MEM_ALIGN : __attribute__ ((aligned(4)))
*/
#ifndef CFG_TUSB_MEM_SECTION
#define CFG_TUSB_MEM_SECTION
#endif
#ifndef CFG_TUSB_MEM_ALIGN
#define CFG_TUSB_MEM_ALIGN __attribute__ ((aligned(4)))
#endif
//--------------------------------------------------------------------
// DEVICE CONFIGURATION
//--------------------------------------------------------------------
#ifndef CFG_TUD_ENDPOINT0_SIZE
#define CFG_TUD_ENDPOINT0_SIZE 64
#endif
//------------- CLASS -------------//
#define CFG_TUD_CDC 1
#define CFG_TUD_MSC 0
#define CFG_TUD_HID 0
#define CFG_TUD_MIDI 0
#define CFG_TUD_VENDOR 0
// CDC FIFO size of TX and RX
#define CFG_TUD_CDC_RX_BUFSIZE (TUD_OPT_HIGH_SPEED ? 512 : 64)
#define CFG_TUD_CDC_TX_BUFSIZE (TUD_OPT_HIGH_SPEED ? 512 : 64)
// CDC Endpoint transfer buffer size, more is faster
#define CFG_TUD_CDC_EP_BUFSIZE (TUD_OPT_HIGH_SPEED ? 512 : 64)
// MSC Buffer size of Device Mass storage
#define CFG_TUD_MSC_EP_BUFSIZE 512
#ifdef __cplusplus
}
#endif
#endif /* _TUSB_CONFIG_H_ */

View File

@ -0,0 +1,229 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#include "tusb.h"
/* A combination of interfaces must have a unique product id, since PC will save
* device driver after the first plug.
* Auto ProductID layout's Bitmap:
* [MSB] HID | CDC [LSB]
*/
#define _PID_MAP(itf, n) ((CFG_TUD_##itf) << (n))
#define USB_PID \
(0x4000 | _PID_MAP(CDC, 0) | _PID_MAP(HID, 2) | _PID_MAP(MIDI, 3) | \
_PID_MAP(VENDOR, 4))
#define USB_VID 0xCafe
#define USB_BCD 0x0200
//--------------------------------------------------------------------+
// Device Descriptors
//--------------------------------------------------------------------+
// Invoked when received GET DEVICE DESCRIPTOR
// Application return pointer to descriptor
uint8_t const *tud_descriptor_device_cb(void) {
static tusb_desc_device_t const desc_device = {
.bLength = sizeof(tusb_desc_device_t),
.bDescriptorType = TUSB_DESC_DEVICE,
.bcdUSB = USB_BCD,
// Use Interface Association Descriptor (IAD) for CDC
// As required by USB Specs IAD's subclass must be common class (2) and
// protocol must be IAD (1)
.bDeviceClass = TUSB_CLASS_MISC,
.bDeviceSubClass = MISC_SUBCLASS_COMMON,
.bDeviceProtocol = MISC_PROTOCOL_IAD,
.bMaxPacketSize0 = CFG_TUD_ENDPOINT0_SIZE,
.idVendor = USB_VID,
.idProduct = USB_PID,
.bcdDevice = 0x0100,
.iManufacturer = 0x01,
.iProduct = 0x02,
.iSerialNumber = 0x03,
.bNumConfigurations = 0x01};
return (uint8_t const *)&desc_device;
}
//--------------------------------------------------------------------+
// Configuration Descriptor
//--------------------------------------------------------------------+
enum { ITF_NUM_CDC = 0, ITF_NUM_CDC_DATA, ITF_NUM_TOTAL };
#define EPNUM_CDC_NOTIF 0x81
#define EPNUM_CDC_OUT 0x02
#define EPNUM_CDC_IN 0x82
#define CONFIG_TOTAL_LEN (TUD_CONFIG_DESC_LEN + TUD_CDC_DESC_LEN)
// full speed configuration
uint8_t const desc_fs_configuration[] = {
// Config number, interface count, string index, total length, attribute,
// power in mA
TUD_CONFIG_DESCRIPTOR(1, ITF_NUM_TOTAL, 0, CONFIG_TOTAL_LEN, 0x00, 100),
// Interface number, string index, EP notification address and size, EP data
// address (out, in) and size.
TUD_CDC_DESCRIPTOR(ITF_NUM_CDC, 4, EPNUM_CDC_NOTIF, 8, EPNUM_CDC_OUT,
EPNUM_CDC_IN, 64),
};
#if TUD_OPT_HIGH_SPEED
// Per USB specs: high speed capable device must report device_qualifier and
// other_speed_configuration
// high speed configuration
uint8_t const desc_hs_configuration[] = {
// Config number, interface count, string index, total length, attribute,
// power in mA
TUD_CONFIG_DESCRIPTOR(1, ITF_NUM_TOTAL, 0, CONFIG_TOTAL_LEN, 0x00, 100),
// Interface number, string index, EP notification address and size, EP data
// address (out, in) and size.
TUD_CDC_DESCRIPTOR(ITF_NUM_CDC, 4, EPNUM_CDC_NOTIF, 8, EPNUM_CDC_OUT,
EPNUM_CDC_IN, 512),
};
// other speed configuration
uint8_t desc_other_speed_config[CONFIG_TOTAL_LEN];
// device qualifier is mostly similar to device descriptor since we don't change
// configuration based on speed
tusb_desc_device_qualifier_t const desc_device_qualifier = {
.bLength = sizeof(tusb_desc_device_qualifier_t),
.bDescriptorType = TUSB_DESC_DEVICE_QUALIFIER,
.bcdUSB = USB_BCD,
.bDeviceClass = TUSB_CLASS_MISC,
.bDeviceSubClass = MISC_SUBCLASS_COMMON,
.bDeviceProtocol = MISC_PROTOCOL_IAD,
.bMaxPacketSize0 = CFG_TUD_ENDPOINT0_SIZE,
.bNumConfigurations = 0x01,
.bReserved = 0x00};
// Invoked when received GET DEVICE QUALIFIER DESCRIPTOR request
// Application return pointer to descriptor, whose contents must exist long
// enough for transfer to complete. device_qualifier descriptor describes
// information about a high-speed capable device that would change if the device
// were operating at the other speed. If not highspeed capable stall this
// request.
uint8_t const *tud_descriptor_device_qualifier_cb(void) {
return (uint8_t const *)&desc_device_qualifier;
}
// Invoked when received GET OTHER SEED CONFIGURATION DESCRIPTOR request
// Application return pointer to descriptor, whose contents must exist long
// enough for transfer to complete Configuration descriptor in the other speed
// e.g if high speed then this is for full speed and vice versa
uint8_t const *tud_descriptor_other_speed_configuration_cb(uint8_t index) {
(void)index; // for multiple configurations
// if link speed is high return fullspeed config, and vice versa
// Note: the descriptor type is OHER_SPEED_CONFIG instead of CONFIG
memcpy(desc_other_speed_config,
(tud_speed_get() == TUSB_SPEED_HIGH) ? desc_fs_configuration
: desc_hs_configuration,
CONFIG_TOTAL_LEN);
desc_other_speed_config[1] = TUSB_DESC_OTHER_SPEED_CONFIG;
return desc_other_speed_config;
}
#endif // highspeed
// Invoked when received GET CONFIGURATION DESCRIPTOR
// Application return pointer to descriptor
// Descriptor contents must exist long enough for transfer to complete
uint8_t const *tud_descriptor_configuration_cb(uint8_t index) {
(void)index; // for multiple configurations
#if TUD_OPT_HIGH_SPEED
// Although we are highspeed, host may be fullspeed.
return (tud_speed_get() == TUSB_SPEED_HIGH) ? desc_hs_configuration
: desc_fs_configuration;
#else
return desc_fs_configuration;
#endif
}
//--------------------------------------------------------------------+
// String Descriptors
//--------------------------------------------------------------------+
// array of pointer to string descriptors
char const *string_desc_arr[] = {
(const char[]){0x09, 0x04}, // 0: is supported language is English (0x0409)
"TinyUSB", // 1: Manufacturer
"TinyUSB Device", // 2: Product
"123456789012", // 3: Serials, should use chip ID
"TinyUSB CDC", // 4: CDC Interface
};
static uint16_t _desc_str[32];
// Invoked when received GET STRING DESCRIPTOR request
// Application return pointer to descriptor, whose contents must exist long
// enough for transfer to complete
uint16_t const *tud_descriptor_string_cb(uint8_t index, uint16_t langid) {
(void)langid;
uint8_t chr_count;
if (index == 0) {
memcpy(&_desc_str[1], string_desc_arr[0], 2);
chr_count = 1;
} else {
// Note: the 0xEE index string is a Microsoft OS 1.0 Descriptors.
// https://docs.microsoft.com/en-us/windows-hardware/drivers/usbcon/microsoft-defined-usb-descriptors
if (!(index < sizeof(string_desc_arr) / sizeof(string_desc_arr[0])))
return NULL;
const char *str = string_desc_arr[index];
// Cap at max char
chr_count = (uint8_t)strlen(str);
if (chr_count > 31)
chr_count = 31;
// Convert ASCII string into UTF-16
for (uint8_t i = 0; i < chr_count; i++) {
_desc_str[1 + i] = str[i];
}
}
// first byte is length (including header), second byte is string type
_desc_str[0] = (uint16_t)((TUSB_DESC_STRING << 8) | (2 * chr_count + 2));
return _desc_str;
}

View File

@ -0,0 +1,29 @@
cmake_minimum_required(VERSION 3.5)
include(${CMAKE_CURRENT_SOURCE_DIR}/../../../hw/bsp/family_support.cmake)
# gets PROJECT name for the example (e.g. <BOARD>-<DIR_NAME>)
family_get_project_name(PROJECT ${CMAKE_CURRENT_LIST_DIR})
project(${PROJECT})
# Checks this example is valid for the family and initializes the project
family_initialize_project(${PROJECT} ${CMAKE_CURRENT_LIST_DIR})
add_executable(${PROJECT})
# Example source
target_sources(${PROJECT} PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}/src/main.c
${CMAKE_CURRENT_SOURCE_DIR}/src/msc_disk.c
${CMAKE_CURRENT_SOURCE_DIR}/src/usb_descriptors.c
)
# Example include
target_include_directories(${PROJECT} PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}/src
)
# Configure compilation flags and libraries for the example... see the corresponding function
# in hw/bsp/FAMILY/family.cmake for details.
family_configure_device_example(${PROJECT})

View File

@ -0,0 +1,12 @@
include ../../../../tools/top.mk
include ../../make.mk
INC += \
src \
$(TOP)/hw \
# Example source
SRC_C += $(addprefix $(CURRENT_PATH)/, $(wildcard src/*.c))
SRC_CXX += $(addprefix $(CURRENT_PATH)/, $(wildcard src/*.cc))
include ../../rules.mk

View File

@ -0,0 +1,62 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#include <cassert>
#include <fuzzer/FuzzedDataProvider.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "class/cdc/cdc_device.h"
#include "fuzz/fuzz.h"
#include "tusb.h"
#include <cstdint>
#include <string>
#include <vector>
//--------------------------------------------------------------------+
// MACRO CONSTANT TYPEDEF PROTYPES
//--------------------------------------------------------------------+
#define FUZZ_ITERATIONS 500
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) {
FuzzedDataProvider provider(Data, Size);
std::vector<uint8_t> callback_data = provider.ConsumeBytes<uint8_t>(
provider.ConsumeIntegralInRange<size_t>(0, Size));
fuzz_init(callback_data.data(), callback_data.size());
// init device stack on configured roothub port
tud_init(BOARD_TUD_RHPORT);
for (int i = 0; i < FUZZ_ITERATIONS; i++) {
if (provider.remaining_bytes() == 0) {
return 0;
}
tud_int_handler(provider.ConsumeIntegral<uint8_t>());
tud_task(); // tinyusb device task
}
return 0;
}

View File

@ -0,0 +1,114 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#ifndef _TUSB_CONFIG_H_
#define _TUSB_CONFIG_H_
#ifdef __cplusplus
extern "C" {
#endif
//--------------------------------------------------------------------+
// Board Specific Configuration
//--------------------------------------------------------------------+
// RHPort number used for device can be defined by board.mk, default to port 0
#ifndef BOARD_TUD_RHPORT
#define BOARD_TUD_RHPORT 0
#endif
// RHPort max operational speed can defined by board.mk
#ifndef BOARD_TUD_MAX_SPEED
#define BOARD_TUD_MAX_SPEED OPT_MODE_DEFAULT_SPEED
#endif
//--------------------------------------------------------------------
// Common Configuration
//--------------------------------------------------------------------
// defined by compiler flags for flexibility
#ifndef CFG_TUSB_MCU
#error CFG_TUSB_MCU must be defined
#endif
#ifndef CFG_TUSB_OS
#define CFG_TUSB_OS OPT_OS_NONE
#endif
#ifndef CFG_TUSB_DEBUG
#define CFG_TUSB_DEBUG 0
#endif
// Enable Device stack
#define CFG_TUD_ENABLED 1
// Default is max speed that hardware controller could support with on-chip PHY
#define CFG_TUD_MAX_SPEED BOARD_TUD_MAX_SPEED
/* USB DMA on some MCUs can only access a specific SRAM region with restriction on alignment.
* Tinyusb use follows macros to declare transferring memory so that they can be put
* into those specific section.
* e.g
* - CFG_TUSB_MEM SECTION : __attribute__ (( section(".usb_ram") ))
* - CFG_TUSB_MEM_ALIGN : __attribute__ ((aligned(4)))
*/
#ifndef CFG_TUSB_MEM_SECTION
#define CFG_TUSB_MEM_SECTION
#endif
#ifndef CFG_TUSB_MEM_ALIGN
#define CFG_TUSB_MEM_ALIGN __attribute__ ((aligned(4)))
#endif
//--------------------------------------------------------------------
// DEVICE CONFIGURATION
//--------------------------------------------------------------------
#ifndef CFG_TUD_ENDPOINT0_SIZE
#define CFG_TUD_ENDPOINT0_SIZE 64
#endif
//------------- CLASS -------------//
#define CFG_TUD_CDC 0
#define CFG_TUD_MSC 1
#define CFG_TUD_HID 0
#define CFG_TUD_MIDI 0
#define CFG_TUD_VENDOR 0
// CDC FIFO size of TX and RX
#define CFG_TUD_CDC_RX_BUFSIZE (TUD_OPT_HIGH_SPEED ? 512 : 64)
#define CFG_TUD_CDC_TX_BUFSIZE (TUD_OPT_HIGH_SPEED ? 512 : 64)
// CDC Endpoint transfer buffer size, more is faster
#define CFG_TUD_CDC_EP_BUFSIZE (TUD_OPT_HIGH_SPEED ? 512 : 64)
// MSC Buffer size of Device Mass storage
#define CFG_TUD_MSC_EP_BUFSIZE 512
#ifdef __cplusplus
}
#endif
#endif /* _TUSB_CONFIG_H_ */

View File

@ -0,0 +1,224 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#include "tusb.h"
/* A combination of interfaces must have a unique product id, since PC will save
* device driver after the first plug.
* Auto ProductID layout's Bitmap:
* [MSB] HID | MSC | CDC [LSB]
*/
#define _PID_MAP(itf, n) ((CFG_TUD_##itf) << (n))
#define USB_PID \
(0x4000 | _PID_MAP(MSC, 0) | _PID_MAP(HID, 1) | _PID_MAP(MIDI, 2) | \
_PID_MAP(VENDOR, 3))
#define USB_VID 0xCafe
#define USB_BCD 0x0200
//--------------------------------------------------------------------+
// Device Descriptors
//--------------------------------------------------------------------+
// Invoked when received GET DEVICE DESCRIPTOR
// Application return pointer to descriptor
uint8_t const *tud_descriptor_device_cb(void) {
static tusb_desc_device_t const desc_device = {
.bLength = sizeof(tusb_desc_device_t),
.bDescriptorType = TUSB_DESC_DEVICE,
.bcdUSB = USB_BCD,
// Use Interface Association Descriptor (IAD) for CDC
// As required by USB Specs IAD's subclass must be common class (2) and
// protocol must be IAD (1)
.bDeviceClass = TUSB_CLASS_MISC,
.bDeviceSubClass = MISC_SUBCLASS_COMMON,
.bDeviceProtocol = MISC_PROTOCOL_IAD,
.bMaxPacketSize0 = CFG_TUD_ENDPOINT0_SIZE,
.idVendor = USB_VID,
.idProduct = USB_PID,
.bcdDevice = 0x0100,
.iManufacturer = 0x01,
.iProduct = 0x02,
.iSerialNumber = 0x03,
.bNumConfigurations = 0x01};
return (uint8_t const *)&desc_device;
}
//--------------------------------------------------------------------+
// Configuration Descriptor
//--------------------------------------------------------------------+
enum { ITF_NUM_MSC = 0, ITF_NUM_TOTAL };
#define EPNUM_MSC_OUT 0x05
#define EPNUM_MSC_IN 0x85
#define CONFIG_TOTAL_LEN (TUD_CONFIG_DESC_LEN + TUD_MSC_DESC_LEN)
// full speed configuration
uint8_t const desc_fs_configuration[] = {
// Config number, interface count, string index, total length, attribute,
// power in mA
TUD_CONFIG_DESCRIPTOR(1, ITF_NUM_TOTAL, 0, CONFIG_TOTAL_LEN, 0x00, 100),
// Interface number, string index, EP Out & EP In address, EP size
TUD_MSC_DESCRIPTOR(ITF_NUM_MSC, 4, EPNUM_MSC_OUT, EPNUM_MSC_IN, 64),
};
#if TUD_OPT_HIGH_SPEED
// Per USB specs: high speed capable device must report device_qualifier and
// other_speed_configuration
// high speed configuration
uint8_t const desc_hs_configuration[] = {
// Config number, interface count, string index, total length, attribute,
// power in mA
TUD_CONFIG_DESCRIPTOR(1, ITF_NUM_TOTAL, 0, CONFIG_TOTAL_LEN, 0x00, 100),
// Interface number, string index, EP Out & EP In address, EP size
TUD_MSC_DESCRIPTOR(ITF_NUM_MSC, 4, EPNUM_MSC_OUT, EPNUM_MSC_IN, 512),
};
// other speed configuration
uint8_t desc_other_speed_config[CONFIG_TOTAL_LEN];
// device qualifier is mostly similar to device descriptor since we don't change
// configuration based on speed
tusb_desc_device_qualifier_t const desc_device_qualifier = {
.bLength = sizeof(tusb_desc_device_qualifier_t),
.bDescriptorType = TUSB_DESC_DEVICE_QUALIFIER,
.bcdUSB = USB_BCD,
.bDeviceClass = TUSB_CLASS_MISC,
.bDeviceSubClass = MISC_SUBCLASS_COMMON,
.bDeviceProtocol = MISC_PROTOCOL_IAD,
.bMaxPacketSize0 = CFG_TUD_ENDPOINT0_SIZE,
.bNumConfigurations = 0x01,
.bReserved = 0x00};
// Invoked when received GET DEVICE QUALIFIER DESCRIPTOR request
// Application return pointer to descriptor, whose contents must exist long
// enough for transfer to complete. device_qualifier descriptor describes
// information about a high-speed capable device that would change if the device
// were operating at the other speed. If not highspeed capable stall this
// request.
uint8_t const *tud_descriptor_device_qualifier_cb(void) {
return (uint8_t const *)&desc_device_qualifier;
}
// Invoked when received GET OTHER SEED CONFIGURATION DESCRIPTOR request
// Application return pointer to descriptor, whose contents must exist long
// enough for transfer to complete Configuration descriptor in the other speed
// e.g if high speed then this is for full speed and vice versa
uint8_t const *tud_descriptor_other_speed_configuration_cb(uint8_t index) {
(void)index; // for multiple configurations
// if link speed is high return fullspeed config, and vice versa
// Note: the descriptor type is OHER_SPEED_CONFIG instead of CONFIG
memcpy(desc_other_speed_config,
(tud_speed_get() == TUSB_SPEED_HIGH) ? desc_fs_configuration
: desc_hs_configuration,
CONFIG_TOTAL_LEN);
desc_other_speed_config[1] = TUSB_DESC_OTHER_SPEED_CONFIG;
return desc_other_speed_config;
}
#endif // highspeed
// Invoked when received GET CONFIGURATION DESCRIPTOR
// Application return pointer to descriptor
// Descriptor contents must exist long enough for transfer to complete
uint8_t const *tud_descriptor_configuration_cb(uint8_t index) {
(void)index; // for multiple configurations
#if TUD_OPT_HIGH_SPEED
// Although we are highspeed, host may be fullspeed.
return (tud_speed_get() == TUSB_SPEED_HIGH) ? desc_hs_configuration
: desc_fs_configuration;
#else
return desc_fs_configuration;
#endif
}
//--------------------------------------------------------------------+
// String Descriptors
//--------------------------------------------------------------------+
// array of pointer to string descriptors
char const *string_desc_arr[] = {
(const char[]){0x09, 0x04}, // 0: is supported language is English (0x0409)
"TinyUSB", // 1: Manufacturer
"TinyUSB Device", // 2: Product
"123456789012", // 3: Serials, should use chip ID
"TinyUSB MSC", // 4: MSC Interface
};
static uint16_t _desc_str[32];
// Invoked when received GET STRING DESCRIPTOR request
// Application return pointer to descriptor, whose contents must exist long
// enough for transfer to complete
uint16_t const *tud_descriptor_string_cb(uint8_t index, uint16_t langid) {
(void)langid;
uint8_t chr_count;
if (index == 0) {
memcpy(&_desc_str[1], string_desc_arr[0], 2);
chr_count = 1;
} else {
// Note: the 0xEE index string is a Microsoft OS 1.0 Descriptors.
// https://docs.microsoft.com/en-us/windows-hardware/drivers/usbcon/microsoft-defined-usb-descriptors
if (!(index < sizeof(string_desc_arr) / sizeof(string_desc_arr[0])))
return NULL;
const char *str = string_desc_arr[index];
// Cap at max char
chr_count = (uint8_t)strlen(str);
if (chr_count > 31)
chr_count = 31;
// Convert ASCII string into UTF-16
for (uint8_t i = 0; i < chr_count; i++) {
_desc_str[1 + i] = str[i];
}
}
// first byte is length (including header), second byte is string type
_desc_str[0] = (uint16_t)((TUSB_DESC_STRING << 8) | (2 * chr_count + 2));
return _desc_str;
}

View File

@ -0,0 +1,29 @@
cmake_minimum_required(VERSION 3.5)
include(${CMAKE_CURRENT_SOURCE_DIR}/../../../hw/bsp/family_support.cmake)
# gets PROJECT name for the example (e.g. <BOARD>-<DIR_NAME>)
family_get_project_name(PROJECT ${CMAKE_CURRENT_LIST_DIR})
project(${PROJECT})
# Checks this example is valid for the family and initializes the project
family_initialize_project(${PROJECT} ${CMAKE_CURRENT_LIST_DIR})
add_executable(${PROJECT})
# Example source
target_sources(${PROJECT} PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}/src/main.c
${CMAKE_CURRENT_SOURCE_DIR}/src/msc_disk.c
${CMAKE_CURRENT_SOURCE_DIR}/src/usb_descriptors.c
)
# Example include
target_include_directories(${PROJECT} PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}/src
)
# Configure compilation flags and libraries for the example... see the corresponding function
# in hw/bsp/FAMILY/family.cmake for details.
family_configure_device_example(${PROJECT})

View File

@ -0,0 +1,71 @@
DEPS_SUBMODULES += lib/lwip
include ../../../../tools/top.mk
include ../../make.mk
# suppress warning caused by lwip
CFLAGS += \
-Wno-error=null-dereference \
-Wno-error=unused-parameter \
-Wno-error=unused-variable
INC += \
src \
$(TOP)/hw \
$(TOP)/lib/lwip/src/include \
$(TOP)/lib/lwip/src/include/ipv4 \
$(TOP)/lib/lwip/src/include/lwip/apps \
$(TOP)/lib/networking
# Example source
SRC_C += $(addprefix $(CURRENT_PATH)/, $(wildcard src/*.c))
SRC_CXX += $(addprefix $(CURRENT_PATH)/, $(wildcard src/*.cc))
# lwip sources
SRC_C += \
lib/lwip/src/core/altcp.c \
lib/lwip/src/core/altcp_alloc.c \
lib/lwip/src/core/altcp_tcp.c \
lib/lwip/src/core/def.c \
lib/lwip/src/core/dns.c \
lib/lwip/src/core/inet_chksum.c \
lib/lwip/src/core/init.c \
lib/lwip/src/core/ip.c \
lib/lwip/src/core/mem.c \
lib/lwip/src/core/memp.c \
lib/lwip/src/core/netif.c \
lib/lwip/src/core/pbuf.c \
lib/lwip/src/core/raw.c \
lib/lwip/src/core/stats.c \
lib/lwip/src/core/sys.c \
lib/lwip/src/core/tcp.c \
lib/lwip/src/core/tcp_in.c \
lib/lwip/src/core/tcp_out.c \
lib/lwip/src/core/timeouts.c \
lib/lwip/src/core/udp.c \
lib/lwip/src/core/ipv4/autoip.c \
lib/lwip/src/core/ipv4/dhcp.c \
lib/lwip/src/core/ipv4/etharp.c \
lib/lwip/src/core/ipv4/icmp.c \
lib/lwip/src/core/ipv4/igmp.c \
lib/lwip/src/core/ipv4/ip4.c \
lib/lwip/src/core/ipv4/ip4_addr.c \
lib/lwip/src/core/ipv4/ip4_frag.c \
lib/lwip/src/core/ipv6/dhcp6.c \
lib/lwip/src/core/ipv6/ethip6.c \
lib/lwip/src/core/ipv6/icmp6.c \
lib/lwip/src/core/ipv6/inet6.c \
lib/lwip/src/core/ipv6/ip6.c \
lib/lwip/src/core/ipv6/ip6_addr.c \
lib/lwip/src/core/ipv6/ip6_frag.c \
lib/lwip/src/core/ipv6/mld6.c \
lib/lwip/src/core/ipv6/nd6.c \
lib/lwip/src/netif/ethernet.c \
lib/lwip/src/netif/slipif.c \
lib/lwip/src/apps/http/httpd.c \
lib/lwip/src/apps/http/fs.c \
lib/networking/dhserver.c \
lib/networking/dnserver.c \
lib/networking/rndis_reports.c
include ../../rules.mk

View File

@ -0,0 +1,75 @@
/*
* Copyright (c) 2001-2003 Swedish Institute of Computer Science.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*
* This file is part of the lwIP TCP/IP stack.
*
* Author: Adam Dunkels <adam@sics.se>
*
*/
#ifndef __CC_H__
#define __CC_H__
//#include "cpu.h"
typedef int sys_prot_t;
/* define compiler specific symbols */
#if defined (__ICCARM__)
#define PACK_STRUCT_BEGIN
#define PACK_STRUCT_STRUCT
#define PACK_STRUCT_END
#define PACK_STRUCT_FIELD(x) x
#define PACK_STRUCT_USE_INCLUDES
#elif defined (__CC_ARM)
#define PACK_STRUCT_BEGIN __packed
#define PACK_STRUCT_STRUCT
#define PACK_STRUCT_END
#define PACK_STRUCT_FIELD(x) x
#elif defined (__GNUC__)
#define PACK_STRUCT_BEGIN
#define PACK_STRUCT_STRUCT __attribute__ ((__packed__))
#define PACK_STRUCT_END
#define PACK_STRUCT_FIELD(x) x
#elif defined (__TASKING__)
#define PACK_STRUCT_BEGIN
#define PACK_STRUCT_STRUCT
#define PACK_STRUCT_END
#define PACK_STRUCT_FIELD(x) x
#endif
#define LWIP_PLATFORM_ASSERT(x) do { if(!(x)) while(1); } while(0)
#endif /* __CC_H__ */

View File

@ -0,0 +1,99 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#include <cassert>
#include <fuzzer/FuzzedDataProvider.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "class/cdc/cdc_device.h"
#include "class/net/net_device.h"
#include "fuzz/fuzz.h"
#include "tusb.h"
#include <cstdint>
#include <string>
#include <vector>
extern "C" {
#define FUZZ_ITERATIONS 500
//--------------------------------------------------------------------+
// MACRO CONSTANT TYPEDEF PROTYPES
//--------------------------------------------------------------------+
void net_task(FuzzedDataProvider *provider);
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) {
FuzzedDataProvider provider(Data, Size);
std::vector<uint8_t> callback_data = provider.ConsumeBytes<uint8_t>(
provider.ConsumeIntegralInRange<size_t>(0, Size));
fuzz_init(callback_data.data(), callback_data.size());
// init device stack on configured roothub port
tud_init(BOARD_TUD_RHPORT);
for (int i = 0; i < FUZZ_ITERATIONS; i++) {
if (provider.remaining_bytes() == 0) {
return 0;
}
tud_int_handler(provider.ConsumeIntegral<uint8_t>());
tud_task(); // tinyusb device task
net_task(&provider);
}
return 0;
}
//--------------------------------------------------------------------+
// USB CDC
//--------------------------------------------------------------------+
enum NetApiFuncs {
kNetworkRecvRenew,
kNetworkCanXmit,
kNetworkXmit,
kMaxValue,
};
void net_task(FuzzedDataProvider *provider) {
assert(provider != NULL);
switch (provider->ConsumeEnum<NetApiFuncs>()) {
case kNetworkRecvRenew:
tud_network_recv_renew();
break;
case kNetworkCanXmit:
(void)tud_network_can_xmit(provider->ConsumeIntegral<uint16_t>());
case kNetworkXmit:
// TODO: Actually pass real values here later.
tud_network_xmit(NULL, 0);
case kMaxValue:
// Noop.
break;
}
}
}

View File

@ -0,0 +1,71 @@
/*
* Copyright (c) 2001-2003 Swedish Institute of Computer Science.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*
* This file is part of the lwIP TCP/IP stack.
*
* Author: Simon Goldschmidt
*
*/
#ifndef __LWIPOPTS_H__
#define __LWIPOPTS_H__
/* Prevent having to link sys_arch.c (we don't test the API layers in unit tests) */
#define NO_SYS 1
#define MEM_ALIGNMENT 4
#define LWIP_RAW 0
#define LWIP_NETCONN 0
#define LWIP_SOCKET 0
#define LWIP_DHCP 0
#define LWIP_ICMP 1
#define LWIP_UDP 1
#define LWIP_TCP 1
#define LWIP_IPV4 1
#define LWIP_IPV6 0
#define ETH_PAD_SIZE 0
#define LWIP_IP_ACCEPT_UDP_PORT(p) ((p) == PP_NTOHS(67))
#define TCP_MSS (1500 /*mtu*/ - 20 /*iphdr*/ - 20 /*tcphhr*/)
#define TCP_SND_BUF (2 * TCP_MSS)
#define TCP_WND (TCP_MSS)
#define ETHARP_SUPPORT_STATIC_ENTRIES 1
#define LWIP_HTTPD_CGI 0
#define LWIP_HTTPD_SSI 0
#define LWIP_HTTPD_SSI_INCLUDE_TAG 0
#define LWIP_SINGLE_NETIF 1
#define PBUF_POOL_SIZE 2
#define HTTPD_USE_CUSTOM_FSDATA 0
#define LWIP_MULTICAST_PING 1
#define LWIP_BROADCAST_PING 1
#define LWIP_IPV6_MLD 0
#define LWIP_IPV6_SEND_ROUTER_SOLICIT 0
#endif /* __LWIPOPTS_H__ */

View File

@ -0,0 +1,122 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#ifndef _TUSB_CONFIG_H_
#define _TUSB_CONFIG_H_
#ifdef __cplusplus
extern "C" {
#endif
//--------------------------------------------------------------------+
// Board Specific Configuration
//--------------------------------------------------------------------+
// RHPort number used for device can be defined by board.mk, default to port 0
#ifndef BOARD_TUD_RHPORT
#define BOARD_TUD_RHPORT 0
#endif
// RHPort max operational speed can defined by board.mk
#ifndef BOARD_TUD_MAX_SPEED
#define BOARD_TUD_MAX_SPEED OPT_MODE_DEFAULT_SPEED
#endif
//--------------------------------------------------------------------
// Common Configuration
//--------------------------------------------------------------------
// defined by compiler flags for flexibility
#ifndef CFG_TUSB_MCU
#error CFG_TUSB_MCU must be defined
#endif
#ifndef CFG_TUSB_OS
#define CFG_TUSB_OS OPT_OS_NONE
#endif
#ifndef CFG_TUSB_DEBUG
#define CFG_TUSB_DEBUG 0
#endif
// Enable Device stack
#define CFG_TUD_ENABLED 1
// Default is max speed that hardware controller could support with on-chip PHY
#define CFG_TUD_MAX_SPEED BOARD_TUD_MAX_SPEED
/* USB DMA on some MCUs can only access a specific SRAM region with restriction on alignment.
* Tinyusb use follows macros to declare transferring memory so that they can be put
* into those specific section.
* e.g
* - CFG_TUSB_MEM SECTION : __attribute__ (( section(".usb_ram") ))
* - CFG_TUSB_MEM_ALIGN : __attribute__ ((aligned(4)))
*/
#ifndef CFG_TUSB_MEM_SECTION
#define CFG_TUSB_MEM_SECTION
#endif
#ifndef CFG_TUSB_MEM_ALIGN
#define CFG_TUSB_MEM_ALIGN __attribute__ ((aligned(4)))
#endif
//--------------------------------------------------------------------
// DEVICE CONFIGURATION
//--------------------------------------------------------------------
#ifndef CFG_TUD_ENDPOINT0_SIZE
#define CFG_TUD_ENDPOINT0_SIZE 64
#endif
//------------- CLASS -------------//
#define CFG_TUD_CDC 1
#define CFG_TUD_MSC 0
#define CFG_TUD_HID 0
#define CFG_TUD_MIDI 0
#define CFG_TUD_VENDOR 0
// Network class has 2 drivers: ECM/RNDIS and NCM.
// Only one of the drivers can be enabled
#define CFG_TUD_ECM_RNDIS 1
#define CFG_TUD_NCM (1-CFG_TUD_ECM_RNDIS)
// CDC FIFO size of TX and RX
#define CFG_TUD_CDC_RX_BUFSIZE (TUD_OPT_HIGH_SPEED ? 512 : 64)
#define CFG_TUD_CDC_TX_BUFSIZE (TUD_OPT_HIGH_SPEED ? 512 : 64)
// CDC Endpoint transfer buffer size, more is faster
#define CFG_TUD_CDC_EP_BUFSIZE (TUD_OPT_HIGH_SPEED ? 512 : 64)
// MSC Buffer size of Device Mass storage
#define CFG_TUD_MSC_EP_BUFSIZE 512
#ifdef __cplusplus
}
#endif
#endif /* _TUSB_CONFIG_H_ */

View File

@ -0,0 +1,229 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Ha Thach (tinyusb.org)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#include "tusb.h"
/* A combination of interfaces must have a unique product id, since PC will save
* device driver after the first plug.
* Auto ProductID layout's Bitmap:
* [MSB] HID | CDC [LSB]
*/
#define _PID_MAP(itf, n) ((CFG_TUD_##itf) << (n))
#define USB_PID \
(0x4000 | _PID_MAP(CDC, 0) | _PID_MAP(HID, 2) | _PID_MAP(MIDI, 3) | \
_PID_MAP(VENDOR, 4))
#define USB_VID 0xCafe
#define USB_BCD 0x0200
//--------------------------------------------------------------------+
// Device Descriptors
//--------------------------------------------------------------------+
// Invoked when received GET DEVICE DESCRIPTOR
// Application return pointer to descriptor
uint8_t const *tud_descriptor_device_cb(void) {
static tusb_desc_device_t const desc_device = {
.bLength = sizeof(tusb_desc_device_t),
.bDescriptorType = TUSB_DESC_DEVICE,
.bcdUSB = USB_BCD,
// Use Interface Association Descriptor (IAD) for CDC
// As required by USB Specs IAD's subclass must be common class (2) and
// protocol must be IAD (1)
.bDeviceClass = TUSB_CLASS_MISC,
.bDeviceSubClass = MISC_SUBCLASS_COMMON,
.bDeviceProtocol = MISC_PROTOCOL_IAD,
.bMaxPacketSize0 = CFG_TUD_ENDPOINT0_SIZE,
.idVendor = USB_VID,
.idProduct = USB_PID,
.bcdDevice = 0x0100,
.iManufacturer = 0x01,
.iProduct = 0x02,
.iSerialNumber = 0x03,
.bNumConfigurations = 0x01};
return (uint8_t const *)&desc_device;
}
//--------------------------------------------------------------------+
// Configuration Descriptor
//--------------------------------------------------------------------+
enum { ITF_NUM_CDC = 0, ITF_NUM_CDC_DATA, ITF_NUM_TOTAL };
#define EPNUM_CDC_NOTIF 0x81
#define EPNUM_CDC_OUT 0x02
#define EPNUM_CDC_IN 0x82
#define CONFIG_TOTAL_LEN (TUD_CONFIG_DESC_LEN + TUD_CDC_DESC_LEN)
// full speed configuration
uint8_t const desc_fs_configuration[] = {
// Config number, interface count, string index, total length, attribute,
// power in mA
TUD_CONFIG_DESCRIPTOR(1, ITF_NUM_TOTAL, 0, CONFIG_TOTAL_LEN, 0x00, 100),
// Interface number, string index, EP notification address and size, EP data
// address (out, in) and size.
TUD_CDC_DESCRIPTOR(ITF_NUM_CDC, 4, EPNUM_CDC_NOTIF, 8, EPNUM_CDC_OUT,
EPNUM_CDC_IN, 64),
};
#if TUD_OPT_HIGH_SPEED
// Per USB specs: high speed capable device must report device_qualifier and
// other_speed_configuration
// high speed configuration
uint8_t const desc_hs_configuration[] = {
// Config number, interface count, string index, total length, attribute,
// power in mA
TUD_CONFIG_DESCRIPTOR(1, ITF_NUM_TOTAL, 0, CONFIG_TOTAL_LEN, 0x00, 100),
// Interface number, string index, EP notification address and size, EP data
// address (out, in) and size.
TUD_CDC_DESCRIPTOR(ITF_NUM_CDC, 4, EPNUM_CDC_NOTIF, 8, EPNUM_CDC_OUT,
EPNUM_CDC_IN, 512),
};
// other speed configuration
uint8_t desc_other_speed_config[CONFIG_TOTAL_LEN];
// device qualifier is mostly similar to device descriptor since we don't change
// configuration based on speed
tusb_desc_device_qualifier_t const desc_device_qualifier = {
.bLength = sizeof(tusb_desc_device_qualifier_t),
.bDescriptorType = TUSB_DESC_DEVICE_QUALIFIER,
.bcdUSB = USB_BCD,
.bDeviceClass = TUSB_CLASS_MISC,
.bDeviceSubClass = MISC_SUBCLASS_COMMON,
.bDeviceProtocol = MISC_PROTOCOL_IAD,
.bMaxPacketSize0 = CFG_TUD_ENDPOINT0_SIZE,
.bNumConfigurations = 0x01,
.bReserved = 0x00};
// Invoked when received GET DEVICE QUALIFIER DESCRIPTOR request
// Application return pointer to descriptor, whose contents must exist long
// enough for transfer to complete. device_qualifier descriptor describes
// information about a high-speed capable device that would change if the device
// were operating at the other speed. If not highspeed capable stall this
// request.
uint8_t const *tud_descriptor_device_qualifier_cb(void) {
return (uint8_t const *)&desc_device_qualifier;
}
// Invoked when received GET OTHER SEED CONFIGURATION DESCRIPTOR request
// Application return pointer to descriptor, whose contents must exist long
// enough for transfer to complete Configuration descriptor in the other speed
// e.g if high speed then this is for full speed and vice versa
uint8_t const *tud_descriptor_other_speed_configuration_cb(uint8_t index) {
(void)index; // for multiple configurations
// if link speed is high return fullspeed config, and vice versa
// Note: the descriptor type is OHER_SPEED_CONFIG instead of CONFIG
memcpy(desc_other_speed_config,
(tud_speed_get() == TUSB_SPEED_HIGH) ? desc_fs_configuration
: desc_hs_configuration,
CONFIG_TOTAL_LEN);
desc_other_speed_config[1] = TUSB_DESC_OTHER_SPEED_CONFIG;
return desc_other_speed_config;
}
#endif // highspeed
// Invoked when received GET CONFIGURATION DESCRIPTOR
// Application return pointer to descriptor
// Descriptor contents must exist long enough for transfer to complete
uint8_t const *tud_descriptor_configuration_cb(uint8_t index) {
(void)index; // for multiple configurations
#if TUD_OPT_HIGH_SPEED
// Although we are highspeed, host may be fullspeed.
return (tud_speed_get() == TUSB_SPEED_HIGH) ? desc_hs_configuration
: desc_fs_configuration;
#else
return desc_fs_configuration;
#endif
}
//--------------------------------------------------------------------+
// String Descriptors
//--------------------------------------------------------------------+
// array of pointer to string descriptors
char const *string_desc_arr[] = {
(const char[]){0x09, 0x04}, // 0: is supported language is English (0x0409)
"TinyUSB", // 1: Manufacturer
"TinyUSB Device", // 2: Product
"123456789012", // 3: Serials, should use chip ID
"TinyUSB CDC", // 4: CDC Interface
};
static uint16_t _desc_str[32];
// Invoked when received GET STRING DESCRIPTOR request
// Application return pointer to descriptor, whose contents must exist long
// enough for transfer to complete
uint16_t const *tud_descriptor_string_cb(uint8_t index, uint16_t langid) {
(void)langid;
uint8_t chr_count;
if (index == 0) {
memcpy(&_desc_str[1], string_desc_arr[0], 2);
chr_count = 1;
} else {
// Note: the 0xEE index string is a Microsoft OS 1.0 Descriptors.
// https://docs.microsoft.com/en-us/windows-hardware/drivers/usbcon/microsoft-defined-usb-descriptors
if (!(index < sizeof(string_desc_arr) / sizeof(string_desc_arr[0])))
return NULL;
const char *str = string_desc_arr[index];
// Cap at max char
chr_count = (uint8_t)strlen(str);
if (chr_count > 31)
chr_count = 31;
// Convert ASCII string into UTF-16
for (uint8_t i = 0; i < chr_count; i++) {
_desc_str[1 + i] = str[i];
}
}
// first byte is length (including header), second byte is string type
_desc_str[0] = (uint16_t)((TUSB_DESC_STRING << 8) | (2 * chr_count + 2));
return _desc_str;
}

View File

@ -0,0 +1,74 @@
# List of supported OIDs
RNDIS_OID_GEN_SUPPORTED_LIST="\x00\x01\x01\x01"
# Hardware status
RNDIS_OID_GEN_HARDWARE_STATUS="\x00\x01\x01\x02"
# Media types supported (encoded)
RNDIS_OID_GEN_MEDIA_SUPPORTED="\x00\x01\x01\x03"
# Media types in use (encoded)
RNDIS_OID_GEN_MEDIA_IN_USE="\x00\x01\x01\x04"
RNDIS_OID_GEN_MAXIMUM_LOOKAHEAD="\x00\x01\x01\x05"
# Maximum frame size in bytes
RNDIS_OID_GEN_MAXIMUM_FRAME_SIZE="\x00\x01\x01\x06"
# Link speed in units of 100 bps
RNDIS_OID_GEN_LINK_SPEED="\x00\x01\x01\x07"
# Transmit buffer space
RNDIS_OID_GEN_TRANSMIT_BUFFER_SPACE="\x00\x01\x01\x08"
# Receive buffer space
RNDIS_OID_GEN_RECEIVE_BUFFER_SPACE="\x00\x01\x01\x09"
# NDIS version number used by the driver
RNDIS_OID_GEN_DRIVER_VERSION="\x00\x01\x01\x10"
# Maximum total packet length in bytes
RNDIS_OID_GEN_MAXIMUM_TOTAL_SIZE="\x00\x01\x01\x11"
# Optional protocol flags (encoded)
RNDIS_OID_GEN_PROTOCOL_OPTIONS="\x00\x01\x01\x12"
# Optional NIC flags (encoded)
RNDIS_OID_GEN_MAC_OPTIONS="\x00\x01\x01\x13"
# Whether the NIC is connected to the network
RNDIS_OID_GEN_MEDIA_CONNECT_STATUS="\x00\x01\x01\x14"
# The maximum number of send packets the driver can accept per call to its MiniportSendPacketsfunction
RNDIS_OID_GEN_MAXIMUM_SEND_PACKETS="\x00\x01\x01\x15"
# Vendor-assigned version number of the driver
RNDIS_OID_GEN_VENDOR_DRIVER_VERSION="\x00\x01\x01\x16"
# The custom GUIDs (Globally Unique Identifier) supported by the miniport driver
RNDIS_OID_GEN_SUPPORTED_GUIDS="\x00\x01\x01\x17"
# List of network-layer addresses associated with the binding between a transport and the driver
RNDIS_OID_GEN_NETWORK_LAYER_ADDRESSES="\x00\x01\x01\x18"
# Size of packets' additional headers
RNDIS_OID_GEN_TRANSPORT_HEADER_OFFSET="\x00\x01\x01\x19"
RNDIS_OID_GEN_MEDIA_CAPABILITIES="\x00\x01\x02\x01"
# Physical media supported by the miniport driver (encoded)
RNDIS_OID_GEN_PHYSICAL_MEDIUM="\x00\x01\x02\x02"
# Permanent station address
RNDIS_OID_802_3_PERMANENT_ADDRESS="\x01\x01\x01\x01"
# Current station address
RNDIS_OID_802_3_CURRENT_ADDRESS="\x01\x01\x01\x02"
# Current multicast address list
RNDIS_OID_802_3_MULTICAST_LIST="\x01\x01\x01\x03"
# Maximum size of multicast address list
RNDIS_OID_802_3_MAXIMUM_LIST_SIZE="\x01\x01\x01\x04"
# Directed packets. Directed packets contain a destination address equal to the station address of the NIC.
RNDIS_PACKET_TYPE_DIRECTED="\x00\x00\x00\x01"
# Multicast address packets sent to addresses in the multicast address list.
RNDIS_PACKET_TYPE_MULTICAST="\x00\x00\x00\x02"
# All multicast address packets, not just the ones enumerated in the multicast address list.
RNDIS_PACKET_TYPE_ALL_MULTICAST="\x00\x00\x00\x04"
# Broadcast packets.
RNDIS_PACKET_TYPE_BROADCAST="\x00\x00\x00\x08"
# All source routing packets. If the protocol driver sets this bit, the NDIS library attempts to act as a source routing bridge.
RNDIS_PACKET_TYPE_SOURCE_ROUTING="\x00\x00\x00\x10"
# Specifies all packets regardless of whether VLAN filtering is enabled or not and whether the VLAN identifier matches or not.
RNDIS_PACKET_TYPE_PROMISCUOUS="\x00\x00\x00\x20"
# SMT packets that an FDDI NIC receives.
RNDIS_PACKET_TYPE_SMT="\x00\x00\x00\x40"
# All packets sent by installed protocols and all packets indicated by the NIC that is identified by a given NdisBindingHandle.
RNDIS_PACKET_TYPE_ALL_LOCAL="\x00\x00\x00\x80"
# Packets sent to the current group address.
RNDIS_PACKET_TYPE_GROUP="\x00\x00\x10\x00"
# All functional address packets, not just the ones in the current functional address.
RNDIS_PACKET_TYPE_ALL_FUNCTIONAL="\x00\x00\x20\x00"
# Functional address packets sent to addresses included in the current functional address.
RNDIS_PACKET_TYPE_FUNCTIONAL="\x00\x00\x40\x00"
# NIC driver frames that a Token Ring NIC receives.
RNDIS_PACKET_TYPE_MAC_FRAME="\x00\x00\x80\x00"
RNDIS_PACKET_TYPE_NO_LOCAL="\x00\x01\x00\x00"

View File

@ -0,0 +1,34 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#include "fuzzer/FuzzedDataProvider.h"
#include <optional>
std::optional<FuzzedDataProvider> _fuzz_data_provider;
extern "C" int fuzz_init(const uint8_t *data, size_t size) {
_fuzz_data_provider.emplace(data, size);
return 0;
}

View File

@ -0,0 +1,37 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#pragma once
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
int fuzz_init(const uint8_t *data, size_t size);
#ifdef __cplusplus
}
#endif

View File

@ -0,0 +1,30 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#pragma once
#include "fuzzer/FuzzedDataProvider.h"
#include <optional>
extern std::optional<FuzzedDataProvider> _fuzz_data_provider;

View File

@ -0,0 +1,108 @@
# ---------------------------------------
# Common make definition for all examples
# ---------------------------------------
# Build directory
BUILD := _build
PROJECT := $(notdir $(CURDIR))
# Handy check parameter function
check_defined = \
$(strip $(foreach 1,$1, \
$(call __check_defined,$1,$(strip $(value 2)))))
__check_defined = \
$(if $(value $1),, \
$(error Undefined make flag: $1$(if $2, ($2))))
#-------------- Fuzz harness compiler ------------
CC ?= clang
CXX ?= clang++
GDB ?= gdb
OBJCOPY = objcopy
SIZE = size
MKDIR = mkdir
ifeq ($(CMDEXE),1)
CP = copy
RM = del
PYTHON = python
else
SED = sed
CP = cp
RM = rm
PYTHON = python3
endif
#-------------- Fuzz harness flags ------------
COVERAGE_FLAGS ?= -fsanitize-coverage=trace-pc-guard
SANITIZER_FLAGS ?= -fsanitize=fuzzer \
-fsanitize=address
CFLAGS += $(COVERAGE_FLAGS) $(SANITIZER_FLAGS)
#-------------- Source files and compiler flags --------------
INC += $(TOP)/test
# Compiler Flags
CFLAGS += \
-ggdb \
-fdata-sections \
-ffunction-sections \
-fno-strict-aliasing \
-Wall \
-Wextra \
-Werror \
-Wfatal-errors \
-Wdouble-promotion \
-Wstrict-prototypes \
-Wstrict-overflow \
-Werror-implicit-function-declaration \
-Wfloat-equal \
-Wundef \
-Wshadow \
-Wwrite-strings \
-Wsign-compare \
-Wmissing-format-attribute \
-Wunreachable-code \
-Wcast-align \
-Wcast-qual \
-Wnull-dereference \
-Wuninitialized \
-Wunused \
-Wredundant-decls \
-O1
CFLAGS += \
-Wno-error=unreachable-code \
-DOPT_MCU_FUZZ=1 \
-DCFG_TUSB_MCU=OPT_MCU_FUZZ
CXXFLAGS += \
-xc++ \
-Wno-c++11-narrowing \
-fno-implicit-templates
# conversion is too strict for most mcu driver, may be disable sign/int/arith-conversion
# -Wconversion
# Debugging/Optimization
ifeq ($(DEBUG), 1)
CFLAGS += -Og
else
CFLAGS += $(CFLAGS_OPTIMIZED)
endif
# Log level is mapped to TUSB DEBUG option
ifneq ($(LOG),)
CMAKE_DEFSYM += -DLOG=$(LOG)
CFLAGS += -DCFG_TUSB_DEBUG=$(LOG)
endif
# Logger: default is uart, can be set to rtt or swo
ifneq ($(LOGGER),)
CMAKE_DEFSYM += -DLOGGER=$(LOGGER)
endif

View File

@ -0,0 +1,162 @@
#include "fuzz/fuzz_private.h"
#include "tusb.h"
#include <cassert>
#include <array>
#include <limits>
#if CFG_TUD_MSC==1
// Whether host does safe eject.
// tud_msc_get_maxlun_cb returns a uint8_t so the max logical units that are
// allowed is 255, so we need to keep track of 255 fuzzed logical units.
static std::array<bool, std::numeric_limits<uint8_t>::max()> ejected = {false};
extern "C" {
// Invoked when received SCSI_CMD_INQUIRY
// Application fill vendor id, product id and revision with string up to 8, 16,
// 4 characters respectively
void tud_msc_inquiry_cb(uint8_t lun, uint8_t vendor_id[8],
uint8_t product_id[16], uint8_t product_rev[4]) {
(void)lun;
assert(_fuzz_data_provider.has_value());
std::string vid = _fuzz_data_provider->ConsumeBytesAsString(8);
std::string pid = _fuzz_data_provider->ConsumeBytesAsString(16);
std::string rev = _fuzz_data_provider->ConsumeBytesAsString(4);
memcpy(vendor_id, vid.c_str(), strlen(vid.c_str()));
memcpy(product_id, pid.c_str(), strlen(pid.c_str()));
memcpy(product_rev, rev.c_str(), strlen(rev.c_str()));
}
// Invoked when received Test Unit Ready command.
// return true allowing host to read/write this LUN e.g SD card inserted
bool tud_msc_test_unit_ready_cb(uint8_t lun) {
// RAM disk is ready until ejected
if (ejected[lun]) {
// Additional Sense 3A-00 is NOT_FOUND
tud_msc_set_sense(lun, SCSI_SENSE_NOT_READY, 0x3a, 0x00);
return false;
}
return _fuzz_data_provider->ConsumeBool();
}
// Invoked when received SCSI_CMD_READ_CAPACITY_10 and
// SCSI_CMD_READ_FORMAT_CAPACITY to determine the disk size Application update
// block count and block size
void tud_msc_capacity_cb(uint8_t lun, uint32_t *block_count,
uint16_t *block_size) {
(void)lun;
*block_count = _fuzz_data_provider->ConsumeIntegral<uint32_t>();
*block_size = _fuzz_data_provider->ConsumeIntegral<uint16_t>();
}
// Invoked when received Start Stop Unit command
// - Start = 0 : stopped power mode, if load_eject = 1 : unload disk storage
// - Start = 1 : active mode, if load_eject = 1 : load disk storage
bool tud_msc_start_stop_cb(uint8_t lun, uint8_t power_condition, bool start,
bool load_eject) {
(void)power_condition;
assert(_fuzz_data_provider.has_value());
if (load_eject) {
if (start) {
// load disk storage
} else {
// unload disk storage
ejected[lun] = true;
}
}
return _fuzz_data_provider->ConsumeBool();
}
// Callback invoked when received READ10 command.
// Copy disk's data to buffer (up to bufsize) and return number of copied bytes.
int32_t tud_msc_read10_cb(uint8_t lun, uint32_t lba, uint32_t offset,
void *buffer, uint32_t bufsize) {
assert(_fuzz_data_provider.has_value());
(void)lun;
(void)lba;
(void)offset;
std::vector<uint8_t> consumed_buffer = _fuzz_data_provider->ConsumeBytes<uint8_t>(
_fuzz_data_provider->ConsumeIntegralInRange<uint32_t>(0, bufsize));
memcpy(buffer, consumed_buffer.data(), consumed_buffer.size());
// Sometimes return an error code;
if (_fuzz_data_provider->ConsumeBool()) {
return _fuzz_data_provider->ConsumeIntegralInRange(
std::numeric_limits<int32_t>::min(), -1);
}
return consumed_buffer.size();
}
bool tud_msc_is_writable_cb(uint8_t lun) {
assert(_fuzz_data_provider.has_value());
(void)lun;
return _fuzz_data_provider->ConsumeBool();
}
// Callback invoked when received WRITE10 command.
// Process data in buffer to disk's storage and return number of written bytes
int32_t tud_msc_write10_cb(uint8_t lun, uint32_t lba, uint32_t offset,
uint8_t *buffer, uint32_t bufsize) {
// Ignore these as they are outputs and don't affect the return value.
(void)lun;
(void)lba;
(void)offset;
(void)buffer;
assert(_fuzz_data_provider.has_value());
// -ve error codes -> bufsize.
return _fuzz_data_provider->ConsumeIntegralInRange<int32_t>(
std::numeric_limits<int32_t>::min(), bufsize);
}
// Callback invoked when received an SCSI command not in built-in list below
// - READ_CAPACITY10, READ_FORMAT_CAPACITY, INQUIRY, MODE_SENSE6, REQUEST_SENSE
// - READ10 and WRITE10 has their own callbacks
int32_t tud_msc_scsi_cb(uint8_t lun, uint8_t const scsi_cmd[16], void *buffer,
uint16_t bufsize) {
(void)buffer;
(void)bufsize;
assert(_fuzz_data_provider.has_value());
switch (scsi_cmd[0]) {
case SCSI_CMD_TEST_UNIT_READY:
break;
case SCSI_CMD_INQUIRY:
break;
case SCSI_CMD_MODE_SELECT_6:
break;
case SCSI_CMD_MODE_SENSE_6:
break;
case SCSI_CMD_START_STOP_UNIT:
break;
case SCSI_CMD_PREVENT_ALLOW_MEDIUM_REMOVAL:
break;
case SCSI_CMD_READ_CAPACITY_10:
break;
case SCSI_CMD_REQUEST_SENSE:
break;
case SCSI_CMD_READ_FORMAT_CAPACITY:
break;
case SCSI_CMD_READ_10:
break;
case SCSI_CMD_WRITE_10:
break;
default:
// Set Sense = Invalid Command Operation
tud_msc_set_sense(lun, SCSI_SENSE_ILLEGAL_REQUEST, 0x20, 0x00);
return _fuzz_data_provider->ConsumeIntegralInRange<int32_t>(
std::numeric_limits<int32_t>::min(), -1);
}
return 0;
}
}
#endif

View File

@ -0,0 +1,82 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#include "tusb_config.h"
#if defined(CFG_TUD_ECM_RNDIS) || defined(CFG_TUD_NCM)
#include "class/net/net_device.h"
#include "fuzz_private.h"
#include <cassert>
#include <cstdint>
#include <vector>
#include "lwip/sys.h"
extern "C" {
bool tud_network_recv_cb(const uint8_t *src, uint16_t size) {
assert(_fuzz_data_provider.has_value());
(void)src;
(void)size;
return _fuzz_data_provider->ConsumeBool();
}
// client must provide this: copy from network stack packet pointer to dst
uint16_t tud_network_xmit_cb(uint8_t *dst, void *ref, uint16_t arg) {
(void)ref;
(void)arg;
assert(_fuzz_data_provider.has_value());
uint16_t size = _fuzz_data_provider->ConsumeIntegral<uint16_t>();
std::vector<uint8_t> temp = _fuzz_data_provider->ConsumeBytes<uint8_t>(size);
memcpy(dst, temp.data(), temp.size());
return size;
}
/* lwip has provision for using a mutex, when applicable */
sys_prot_t sys_arch_protect(void) { return 0; }
void sys_arch_unprotect(sys_prot_t pval) { (void)pval; }
//------------- ECM/RNDIS -------------//
// client must provide this: initialize any network state back to the beginning
void tud_network_init_cb(void) {
// NoOp.
}
// client must provide this: 48-bit MAC address
// TODO removed later since it is not part of tinyusb stack
const uint8_t tud_network_mac_address[6] = {0};
//------------- NCM -------------//
// callback to client providing optional indication of internal state of network
// driver
void tud_network_link_state_cb(bool state) {
(void)state;
// NoOp.
}
}
#endif

View File

@ -0,0 +1,161 @@
# ---------------------------------------
# Common make rules for all examples
# ---------------------------------------
# Set all as default goal
.DEFAULT_GOAL := all
# ---------------------------------------
# Compiler Flags
# ---------------------------------------
LIBS_GCC ?= -lm
# libc
LIBS += $(LIBS_GCC)
ifneq ($(BOARD), spresense)
LIBS += -lc -Wl,-Bstatic -lc++ -Wl,-Bdynamic
endif
# TinyUSB Stack source
SRC_C += \
src/tusb.c \
src/common/tusb_fifo.c \
src/device/usbd.c \
src/device/usbd_control.c \
src/class/audio/audio_device.c \
src/class/cdc/cdc_device.c \
src/class/dfu/dfu_device.c \
src/class/dfu/dfu_rt_device.c \
src/class/hid/hid_device.c \
src/class/midi/midi_device.c \
src/class/msc/msc_device.c \
src/class/net/ecm_rndis_device.c \
src/class/net/ncm_device.c \
src/class/usbtmc/usbtmc_device.c \
src/class/video/video_device.c \
src/class/vendor/vendor_device.c
# Fuzzers are c++
SRC_CXX += \
test/fuzz/dcd_fuzz.cc \
test/fuzz/fuzz.cc \
test/fuzz/msc_fuzz.cc \
test/fuzz/net_fuzz.cc \
test/fuzz/usbd_fuzz.cc
# TinyUSB stack include
INC += $(TOP)/src
CFLAGS += $(addprefix -I,$(INC))
CXXFLAGS += -std=c++17
# LTO makes it difficult to analyze map file for optimizing size purpose
# We will run this option in ci
ifeq ($(NO_LTO),1)
CFLAGS := $(filter-out -flto,$(CFLAGS))
endif
ifneq ($(LD_FILE),)
LDFLAGS_LD_FILE ?= -Wl,-T,$(TOP)/$(LD_FILE)
endif
LDFLAGS += $(CFLAGS) $(LDFLAGS_LD_FILE) -fuse-ld=lld -Wl,-Map=$@.map -Wl,--cref -Wl,-gc-sections
ifneq ($(SKIP_NANOLIB), 1)
endif
ASFLAGS += $(CFLAGS)
# Assembly files can be name with upper case .S, convert it to .s
SRC_S := $(SRC_S:.S=.s)
# Due to GCC LTO bug https://bugs.launchpad.net/gcc-arm-embedded/+bug/1747966
# assembly file should be placed first in linking order
# '_asm' suffix is added to object of assembly file
OBJ += $(addprefix $(BUILD)/obj/, $(SRC_S:.s=_asm.o))
OBJ += $(addprefix $(BUILD)/obj/, $(SRC_C:.c=.o))
OBJ += $(addprefix $(BUILD)/obj/, $(SRC_CXX:.cc=_cxx.o))
# Verbose mode
ifeq ("$(V)","1")
$(info CFLAGS $(CFLAGS) ) $(info )
$(info LDFLAGS $(LDFLAGS)) $(info )
$(info ASFLAGS $(ASFLAGS)) $(info )
endif
# ---------------------------------------
# Rules
# ---------------------------------------
all: $(BUILD)/$(PROJECT)
OBJ_DIRS = $(sort $(dir $(OBJ)))
$(OBJ): | $(OBJ_DIRS)
$(OBJ_DIRS):
ifeq ($(CMDEXE),1)
@$(MKDIR) $(subst /,\,$@)
else
@$(MKDIR) -p $@
endif
$(BUILD)/$(PROJECT): $(OBJ)
@echo LINK $@
@ $(CXX) -o $@ $(LIB_FUZZING_ENGINE) $^ $(LIBS) $(LDFLAGS)
# We set vpath to point to the top of the tree so that the source files
# can be located. By following this scheme, it allows a single build rule
# to be used to compile all .c files.
vpath %.c . $(TOP)
$(BUILD)/obj/%.o: %.c
@echo CC $(notdir $@)
@$(CC) $(CFLAGS) -c -MD -o $@ $<
# All cpp srcs
vpath %.cc . $(TOP)
$(BUILD)/obj/%_cxx.o: %.cc
@echo CXX $(notdir $@)
@$(CXX) $(CFLAGS) $(CXXFLAGS) -c -MD -o $@ $<
# ASM sources lower case .s
vpath %.s . $(TOP)
$(BUILD)/obj/%_asm.o: %.s
@echo AS $(notdir $@)
@$(CC) -x assembler-with-cpp $(ASFLAGS) -c -o $@ $<
# ASM sources upper case .S
vpath %.S . $(TOP)
$(BUILD)/obj/%_asm.o: %.S
@echo AS $(notdir $@)
@$(CC) -x assembler-with-cpp $(ASFLAGS) -c -o $@ $<
.PHONY: clean
clean:
ifeq ($(CMDEXE),1)
rd /S /Q $(subst /,\,$(BUILD))
else
$(RM) -rf $(BUILD)
endif
# ---------------- GNU Make End -----------------------
# get depenecies
.PHONY: get-deps
get-deps:
ifdef DEPS_SUBMODULES
git -C $(TOP) submodule update --init $(DEPS_SUBMODULES)
endif
size: $(BUILD)/$(PROJECT)
-@echo ''
@$(SIZE) $<
-@echo ''
# linkermap must be install previously at https://github.com/hathach/linkermap
linkermap: $(BUILD)/$(PROJECT)
@linkermap -v $<.map
# Print out the value of a make variable.
# https://stackoverflow.com/questions/16467718/how-to-print-out-a-variable-in-makefile
print-%:
@echo $* = $($*)

View File

@ -0,0 +1,73 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2022 Nathaniel Brough
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#include "fuzz/fuzz_private.h"
#include "tusb.h"
// #include "usb_descriptors.h"
#ifndef CFG_FUZZ_MAX_STRING_LEN
#define CFG_FUZZ_MAX_STRING_LEN 1000
#endif
extern "C" {
/* TODO: Implement a fuzzed version of this.
uint8_t const *tud_descriptor_bos_cb(void) { }
*/
/* TODO: Implement a fuzzed version of this.
uint8_t const *tud_descriptor_device_qualifier_cb(void) {}
*/
/* TODO: Implement a fuzzed version of this.
uint8_t const *tud_descriptor_other_speed_configuration_cb(uint8_t index) {}
*/
void tud_mount_cb(void) {
// NOOP
}
void tud_umount_cb(void) {
// NOOP
}
void tud_suspend_cb(bool remote_wakeup_en) {
(void)remote_wakeup_en;
// NOOP
}
void tud_resume_cb(void) {
// NOOP
}
/* TODO: Implement a fuzzed version of this.
bool tud_vendor_control_xfer_cb(uint8_t rhport, uint8_t stage,
tusb_control_request_t const *request) {}
*/
/* TODO: Implement a fuzzed version of this.
uint16_t const *tud_descriptor_string_cb(uint8_t index, uint16_t langid) {}
*/
}

View File

@ -0,0 +1,3 @@
#!/bin/bash
ruby vendor/ceedling/bin/ceedling $*

View File

@ -0,0 +1,121 @@
---
# Notes:
# Sample project C code is not presently written to produce a release artifact.
# As such, release build options are disabled.
# This sample, therefore, only demonstrates running a collection of unit tests.
:project:
:use_exceptions: TRUE
:use_mocks: TRUE
:use_test_preprocessor: TRUE
:use_auxiliary_dependencies: TRUE
:use_deep_dependencies: TRUE
:build_root: _build
# :release_build: TRUE
:test_file_prefix: test_
:which_ceedling: vendor/ceedling
:ceedling_version: 0.31.1
:default_tasks:
- test:all
#:test_build:
# :use_assembly: TRUE
#:release_build:
# :output: MyApp.out
# :use_assembly: FALSE
:environment:
:extension:
:executable: .out
:paths:
:test:
- +:test/**
- -:test/support
:source:
- ../../src/**
:support:
- test/support
:defines:
# in order to add common defines:
# 1) remove the trailing [] from the :common: section
# 2) add entries to the :common: section (e.g. :test: has TEST defined)
:common: &common_defines
- _UNITY_TEST_
:test:
- *common_defines
:test_preprocess:
- *common_defines
:cmock:
:mock_prefix: mock_
:when_no_prototypes: :warn
:enforce_strict_ordering: TRUE
:plugins:
- :ignore
- :ignore_arg
- :return_thru_ptr
- :callback
- :array
:treat_as:
uint8: HEX8
uint16: HEX16
uint32: UINT32
int8: INT8
bool: UINT8
# Add -gcov to the plugins list to make sure of the gcov plugin
# You will need to have gcov and gcovr both installed to make it work.
# For more information on these options, see docs in plugins/gcov
:gcov:
:html_report: TRUE
:html_report_type: detailed
:html_medium_threshold: 75
:html_high_threshold: 90
:xml_report: FALSE
:tools:
:test_compiler:
:executable: clang
:name: 'clang compiler'
:arguments:
- -I"$": COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE #expands to -I search paths
- -I"$": COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR #expands to -I search paths
- -D$: COLLECTION_DEFINES_TEST_AND_VENDOR #expands to all -D defined symbols
- -fsanitize=address
- -c ${1} #source code input file (Ruby method call param list sub)
- -o ${2} #object file output (Ruby method call param list sub)
:test_linker:
:executable: clang
:name: 'clang linker'
:arguments:
- -fsanitize=address
- ${1} #list of object files to link (Ruby method call param list sub)
- -o ${2} #executable file output (Ruby method call param list sub)
# LIBRARIES
# These libraries are automatically injected into the build process. Those specified as
# common will be used in all types of builds. Otherwise, libraries can be injected in just
# tests or releases. These options are MERGED with the options in supplemental yaml files.
:libraries:
:placement: :end
:flag: "${1}" # or "-L ${1}" for example
:common: &common_libraries []
:test:
- *common_libraries
:release:
- *common_libraries
:plugins:
:load_paths:
- vendor/ceedling/plugins
:enabled:
- stdout_pretty_tests_report
- module_generator
- raw_output_report
- colour_report
...

View File

@ -0,0 +1,274 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2019, hathach (tinyusb.org)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* This file is part of the TinyUSB stack.
*/
#include "unity.h"
// Files to test
#include "osal/osal.h"
#include "tusb_fifo.h"
#include "tusb.h"
#include "usbd.h"
TEST_FILE("usbd_control.c")
TEST_FILE("msc_device.c")
// Mock File
#include "mock_dcd.h"
//--------------------------------------------------------------------+
// MACRO TYPEDEF CONSTANT ENUM DECLARATION
//--------------------------------------------------------------------+
enum
{
EDPT_CTRL_OUT = 0x00,
EDPT_CTRL_IN = 0x80,
EDPT_MSC_OUT = 0x01,
EDPT_MSC_IN = 0x81,
};
uint8_t const rhport = 0;
enum
{
ITF_NUM_MSC,
ITF_NUM_TOTAL
};
#define CONFIG_TOTAL_LEN (TUD_CONFIG_DESC_LEN + TUD_MSC_DESC_LEN)
uint8_t const data_desc_configuration[] =
{
// Config number, interface count, string index, total length, attribute, power in mA
TUD_CONFIG_DESCRIPTOR(1, ITF_NUM_TOTAL, 0, CONFIG_TOTAL_LEN, TUSB_DESC_CONFIG_ATT_REMOTE_WAKEUP, 100),
// Interface number, string index, EP Out & EP In address, EP size
TUD_MSC_DESCRIPTOR(ITF_NUM_MSC, 0, EDPT_MSC_OUT, EDPT_MSC_IN, TUD_OPT_HIGH_SPEED ? 512 : 64),
};
tusb_control_request_t const request_set_configuration =
{
.bmRequestType = 0x00,
.bRequest = TUSB_REQ_SET_CONFIGURATION,
.wValue = 1,
.wIndex = 0,
.wLength = 0
};
uint8_t const* desc_configuration;
enum
{
DISK_BLOCK_NUM = 16, // 8KB is the smallest size that windows allow to mount
DISK_BLOCK_SIZE = 512
};
uint8_t msc_disk[DISK_BLOCK_NUM][DISK_BLOCK_SIZE];
// Invoked when received SCSI_CMD_INQUIRY
// Application fill vendor id, product id and revision with string up to 8, 16, 4 characters respectively
void tud_msc_inquiry_cb(uint8_t lun, uint8_t vendor_id[8], uint8_t product_id[16], uint8_t product_rev[4])
{
(void) lun;
const char vid[] = "TinyUSB";
const char pid[] = "Mass Storage";
const char rev[] = "1.0";
memcpy(vendor_id , vid, strlen(vid));
memcpy(product_id , pid, strlen(pid));
memcpy(product_rev, rev, strlen(rev));
}
// Invoked when received Test Unit Ready command.
// return true allowing host to read/write this LUN e.g SD card inserted
bool tud_msc_test_unit_ready_cb(uint8_t lun)
{
(void) lun;
return true; // RAM disk is always ready
}
// Invoked when received SCSI_CMD_READ_CAPACITY_10 and SCSI_CMD_READ_FORMAT_CAPACITY to determine the disk size
// Application update block count and block size
void tud_msc_capacity_cb(uint8_t lun, uint32_t* block_count, uint16_t* block_size)
{
(void) lun;
*block_count = DISK_BLOCK_NUM;
*block_size = DISK_BLOCK_SIZE;
}
// Invoked when received Start Stop Unit command
// - Start = 0 : stopped power mode, if load_eject = 1 : unload disk storage
// - Start = 1 : active mode, if load_eject = 1 : load disk storage
bool tud_msc_start_stop_cb(uint8_t lun, uint8_t power_condition, bool start, bool load_eject)
{
(void) lun;
(void) power_condition;
return true;
}
// Callback invoked when received READ10 command.
// Copy disk's data to buffer (up to bufsize) and return number of copied bytes.
int32_t tud_msc_read10_cb(uint8_t lun, uint32_t lba, uint32_t offset, void* buffer, uint32_t bufsize)
{
(void) lun;
uint8_t const* addr = msc_disk[lba] + offset;
memcpy(buffer, addr, bufsize);
return bufsize;
}
// Callback invoked when received WRITE10 command.
// Process data in buffer to disk's storage and return number of written bytes
int32_t tud_msc_write10_cb(uint8_t lun, uint32_t lba, uint32_t offset, uint8_t* buffer, uint32_t bufsize)
{
(void) lun;
uint8_t* addr = msc_disk[lba] + offset;
memcpy(addr, buffer, bufsize);
return bufsize;
}
// Callback invoked when received an SCSI command not in built-in list below
// - READ_CAPACITY10, READ_FORMAT_CAPACITY, INQUIRY, MODE_SENSE6, REQUEST_SENSE
// - READ10 and WRITE10 has their own callbacks
int32_t tud_msc_scsi_cb (uint8_t lun, uint8_t const scsi_cmd[16], void* buffer, uint16_t bufsize)
{
// read10 & write10 has their own callback and MUST not be handled here
void const* response = NULL;
uint16_t resplen = 0;
return resplen;
}
//--------------------------------------------------------------------+
//
//--------------------------------------------------------------------+
uint8_t const * tud_descriptor_device_cb(void)
{
return NULL;
}
uint8_t const * tud_descriptor_configuration_cb(uint8_t index)
{
return desc_configuration;
}
uint16_t const* tud_descriptor_string_cb(uint8_t index, uint16_t langid)
{
(void) langid;
return NULL;
}
void setUp(void)
{
dcd_int_disable_Ignore();
dcd_int_enable_Ignore();
if ( !tusb_inited() )
{
dcd_init_Expect(rhport);
tusb_init();
}
dcd_event_bus_reset(rhport, TUSB_SPEED_HIGH, false);
tud_task();
}
void tearDown(void)
{
}
//--------------------------------------------------------------------+
//
//--------------------------------------------------------------------+
void test_msc(void)
{
// Read 1 LBA = 0, Block count = 1
msc_cbw_t cbw_read10 =
{
.signature = MSC_CBW_SIGNATURE,
.tag = 0xCAFECAFE,
.total_bytes = 512,
.lun = 0,
.dir = TUSB_DIR_IN_MASK,
.cmd_len = sizeof(scsi_read10_t)
};
scsi_read10_t cmd_read10 =
{
.cmd_code = SCSI_CMD_READ_10,
.lba = tu_htonl(0),
.block_count = tu_htons(1)
};
memcpy(cbw_read10.command, &cmd_read10, cbw_read10.cmd_len);
desc_configuration = data_desc_configuration;
uint8_t const* desc_ep = tu_desc_next(tu_desc_next(desc_configuration));
dcd_event_setup_received(rhport, (uint8_t*) &request_set_configuration, false);
// open endpoints
dcd_edpt_open_ExpectAndReturn(rhport, (tusb_desc_endpoint_t const *) desc_ep, true);
dcd_edpt_open_ExpectAndReturn(rhport, (tusb_desc_endpoint_t const *) tu_desc_next(desc_ep), true);
// Prepare SCSI command
dcd_edpt_xfer_ExpectAndReturn(rhport, EDPT_MSC_OUT, NULL, sizeof(msc_cbw_t), true);
dcd_edpt_xfer_IgnoreArg_buffer();
dcd_edpt_xfer_ReturnMemThruPtr_buffer( (uint8_t*) &cbw_read10, sizeof(msc_cbw_t));
// command received
dcd_event_xfer_complete(rhport, EDPT_MSC_OUT, sizeof(msc_cbw_t), 0, true);
// control status
dcd_edpt_xfer_ExpectAndReturn(rhport, EDPT_CTRL_IN, NULL, 0, true);
// SCSI Data transfer
dcd_edpt_xfer_ExpectAndReturn(rhport, EDPT_MSC_IN, NULL, 512, true);
dcd_edpt_xfer_IgnoreArg_buffer();
dcd_event_xfer_complete(rhport, EDPT_MSC_IN, 512, 0, true); // complete
// SCSI Status
dcd_edpt_xfer_ExpectAndReturn(rhport, EDPT_MSC_IN, NULL, 13, true);
dcd_edpt_xfer_IgnoreArg_buffer();
dcd_event_xfer_complete(rhport, EDPT_MSC_IN, 13, 0, true);
// Prepare for next command
dcd_edpt_xfer_ExpectAndReturn(rhport, EDPT_MSC_OUT, NULL, sizeof(msc_cbw_t), true);
dcd_edpt_xfer_IgnoreArg_buffer();
tud_task();
}

View File

@ -0,0 +1,244 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2019, Ha Thach (tinyusb.org)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include "unity.h"
// Files to test
#include "osal/osal.h"
#include "tusb_fifo.h"
#include "tusb.h"
#include "usbd.h"
TEST_FILE("usbd_control.c")
// Mock File
#include "mock_dcd.h"
#include "mock_msc_device.h"
//--------------------------------------------------------------------+
// MACRO TYPEDEF CONSTANT ENUM DECLARATION
//--------------------------------------------------------------------+
enum
{
EDPT_CTRL_OUT = 0x00,
EDPT_CTRL_IN = 0x80
};
uint8_t const rhport = 0;
tusb_desc_device_t const data_desc_device =
{
.bLength = sizeof(tusb_desc_device_t),
.bDescriptorType = TUSB_DESC_DEVICE,
.bcdUSB = 0x0200,
// Use Interface Association Descriptor (IAD) for CDC
// As required by USB Specs IAD's subclass must be common class (2) and protocol must be IAD (1)
.bDeviceClass = TUSB_CLASS_MISC,
.bDeviceSubClass = MISC_SUBCLASS_COMMON,
.bDeviceProtocol = MISC_PROTOCOL_IAD,
.bMaxPacketSize0 = CFG_TUD_ENDPOINT0_SIZE,
.idVendor = 0xCafe,
.idProduct = 0xCafe,
.bcdDevice = 0x0100,
.iManufacturer = 0x01,
.iProduct = 0x02,
.iSerialNumber = 0x03,
.bNumConfigurations = 0x01
};
uint8_t const data_desc_configuration[] =
{
// Config number, interface count, string index, total length, attribute, power in mA
TUD_CONFIG_DESCRIPTOR(1, 0, 0, TUD_CONFIG_DESC_LEN, TUSB_DESC_CONFIG_ATT_REMOTE_WAKEUP, 100),
};
tusb_control_request_t const req_get_desc_device =
{
.bmRequestType = 0x80,
.bRequest = TUSB_REQ_GET_DESCRIPTOR,
.wValue = (TUSB_DESC_DEVICE << 8),
.wIndex = 0x0000,
.wLength = 64
};
tusb_control_request_t const req_get_desc_configuration =
{
.bmRequestType = 0x80,
.bRequest = TUSB_REQ_GET_DESCRIPTOR,
.wValue = (TUSB_DESC_CONFIGURATION << 8),
.wIndex = 0x0000,
.wLength = 256
};
uint8_t const* desc_device;
uint8_t const* desc_configuration;
//--------------------------------------------------------------------+
//
//--------------------------------------------------------------------+
uint8_t const * tud_descriptor_device_cb(void)
{
return desc_device;
}
uint8_t const * tud_descriptor_configuration_cb(uint8_t index)
{
return desc_configuration;
}
uint16_t const* tud_descriptor_string_cb(uint8_t index, uint16_t langid)
{
(void) langid;
return NULL;
}
void setUp(void)
{
dcd_int_disable_Ignore();
dcd_int_enable_Ignore();
if ( !tusb_inited() )
{
mscd_init_Expect();
dcd_init_Expect(rhport);
tusb_init();
}
}
void tearDown(void)
{
}
//--------------------------------------------------------------------+
// Get Descriptor
//--------------------------------------------------------------------+
//------------- Device -------------//
void test_usbd_get_device_descriptor(void)
{
desc_device = (uint8_t const *) &data_desc_device;
dcd_event_setup_received(rhport, (uint8_t*) &req_get_desc_device, false);
// data
dcd_edpt_xfer_ExpectWithArrayAndReturn(rhport, 0x80, (uint8_t*)&data_desc_device, sizeof(tusb_desc_device_t), sizeof(tusb_desc_device_t), true);
dcd_event_xfer_complete(rhport, EDPT_CTRL_IN, sizeof(tusb_desc_device_t), 0, false);
// status
dcd_edpt_xfer_ExpectAndReturn(rhport, EDPT_CTRL_OUT, NULL, 0, true);
dcd_event_xfer_complete(rhport, EDPT_CTRL_OUT, 0, 0, false);
dcd_edpt0_status_complete_ExpectWithArray(rhport, &req_get_desc_device, 1);
tud_task();
}
void test_usbd_get_device_descriptor_null(void)
{
desc_device = NULL;
dcd_event_setup_received(rhport, (uint8_t*) &req_get_desc_device, false);
dcd_edpt_stall_Expect(rhport, EDPT_CTRL_OUT);
dcd_edpt_stall_Expect(rhport, EDPT_CTRL_IN);
tud_task();
}
//------------- Configuration -------------//
void test_usbd_get_configuration_descriptor(void)
{
desc_configuration = data_desc_configuration;
uint16_t total_len = ((tusb_desc_configuration_t const*) data_desc_configuration)->wTotalLength;
dcd_event_setup_received(rhport, (uint8_t*) &req_get_desc_configuration, false);
// data
dcd_edpt_xfer_ExpectWithArrayAndReturn(rhport, 0x80, (uint8_t*) data_desc_configuration, total_len, total_len, true);
dcd_event_xfer_complete(rhport, EDPT_CTRL_IN, total_len, 0, false);
// status
dcd_edpt_xfer_ExpectAndReturn(rhport, EDPT_CTRL_OUT, NULL, 0, true);
dcd_event_xfer_complete(rhport, EDPT_CTRL_OUT, 0, 0, false);
dcd_edpt0_status_complete_ExpectWithArray(rhport, &req_get_desc_configuration, 1);
tud_task();
}
void test_usbd_get_configuration_descriptor_null(void)
{
desc_configuration = NULL;
dcd_event_setup_received(rhport, (uint8_t*) &req_get_desc_configuration, false);
dcd_edpt_stall_Expect(rhport, EDPT_CTRL_OUT);
dcd_edpt_stall_Expect(rhport, EDPT_CTRL_IN);
tud_task();
}
//--------------------------------------------------------------------+
// Control ZLP
//--------------------------------------------------------------------+
void test_usbd_control_in_zlp(void)
{
// 128 byte total len, with EP0 size = 64, and request length = 256
// ZLP must be return
uint8_t zlp_desc_configuration[CFG_TUD_ENDPOINT0_SIZE*2] =
{
// Config number, interface count, string index, total length, attribute, power in mA
TUD_CONFIG_DESCRIPTOR(1, 0, 0, CFG_TUD_ENDPOINT0_SIZE*2, TUSB_DESC_CONFIG_ATT_REMOTE_WAKEUP, 100),
};
desc_configuration = zlp_desc_configuration;
// request, then 1st, 2nd xact + ZLP + status
dcd_event_setup_received(rhport, (uint8_t*) &req_get_desc_configuration, false);
// 1st transaction
dcd_edpt_xfer_ExpectWithArrayAndReturn(rhport, EDPT_CTRL_IN,
zlp_desc_configuration, CFG_TUD_ENDPOINT0_SIZE, CFG_TUD_ENDPOINT0_SIZE, true);
dcd_event_xfer_complete(rhport, EDPT_CTRL_IN, CFG_TUD_ENDPOINT0_SIZE, 0, false);
// 2nd transaction
dcd_edpt_xfer_ExpectWithArrayAndReturn(rhport, EDPT_CTRL_IN,
zlp_desc_configuration + CFG_TUD_ENDPOINT0_SIZE, CFG_TUD_ENDPOINT0_SIZE, CFG_TUD_ENDPOINT0_SIZE, true);
dcd_event_xfer_complete(rhport, EDPT_CTRL_IN, CFG_TUD_ENDPOINT0_SIZE, 0, false);
// Expect Zero length Packet
dcd_edpt_xfer_ExpectAndReturn(rhport, EDPT_CTRL_IN, NULL, 0, true);
dcd_event_xfer_complete(rhport, EDPT_CTRL_IN, 0, 0, false);
// Status
dcd_edpt_xfer_ExpectAndReturn(rhport, EDPT_CTRL_OUT, NULL, 0, true);
dcd_event_xfer_complete(rhport, EDPT_CTRL_OUT, 0, 0, false);
dcd_edpt0_status_complete_ExpectWithArray(rhport, &req_get_desc_configuration, 1);
tud_task();
}

View File

@ -0,0 +1,106 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2019 Ha Thach (tinyusb.org)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#ifndef _TUSB_CONFIG_H_
#define _TUSB_CONFIG_H_
// testing framework
#include "unity.h"
#ifdef __cplusplus
extern "C" {
#endif
//--------------------------------------------------------------------
// COMMON CONFIGURATION
//--------------------------------------------------------------------
// defined by compiler flags for flexibility
#ifndef CFG_TUSB_MCU
//#error CFG_TUSB_MCU must be defined
#define CFG_TUSB_MCU OPT_MCU_NRF5X
#endif
#ifndef CFG_TUSB_RHPORT0_MODE
#define CFG_TUSB_RHPORT0_MODE (OPT_MODE_DEVICE | OPT_MODE_HIGH_SPEED)
#endif
#define CFG_TUSB_OS OPT_OS_NONE
// CFG_TUSB_DEBUG is defined by compiler in DEBUG build
#ifndef CFG_TUSB_DEBUG
#define CFG_TUSB_DEBUG 1
#endif
/* USB DMA on some MCUs can only access a specific SRAM region with restriction on alignment.
* Tinyusb use follows macros to declare transferring memory so that they can be put
* into those specific section.
* e.g
* - CFG_TUSB_MEM SECTION : __attribute__ (( section(".usb_ram") ))
* - CFG_TUSB_MEM_ALIGN : __attribute__ ((aligned(4)))
*/
#ifndef CFG_TUSB_MEM_SECTION
#define CFG_TUSB_MEM_SECTION
#endif
#ifndef CFG_TUSB_MEM_ALIGN
#define CFG_TUSB_MEM_ALIGN __attribute__ ((aligned(4)))
#endif
//--------------------------------------------------------------------
// DEVICE CONFIGURATION
//--------------------------------------------------------------------
#define CFG_TUD_TASK_QUEUE_SZ 100
#define CFG_TUD_ENDPOINT0_SIZE 64
//------------- CLASS -------------//
//#define CFG_TUD_CDC 0
#define CFG_TUD_MSC 1
//#define CFG_TUD_HID 0
//#define CFG_TUD_MIDI 0
//#define CFG_TUD_VENDOR 0
//------------- CDC -------------//
// FIFO size of CDC TX and RX
#define CFG_TUD_CDC_RX_BUFSIZE 512
#define CFG_TUD_CDC_TX_BUFSIZE 512
//------------- MSC -------------//
// Buffer size of Device Mass storage
#define CFG_TUD_MSC_BUFSIZE 512
//------------- HID -------------//
// Should be sufficient to hold ID (if any) + Data
#define CFG_TUD_HID_EP_BUFSIZE 64
#ifdef __cplusplus
}
#endif
#endif /* _TUSB_CONFIG_H_ */

View File

@ -0,0 +1,82 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2023, Ha Thach (tinyusb.org)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* This file is part of the TinyUSB stack.
*/
#include <string.h>
#include "unity.h"
#include "tusb_common.h"
//--------------------------------------------------------------------+
// MACRO TYPEDEF CONSTANT ENUM DECLARATION
//--------------------------------------------------------------------+
//------------- IMPLEMENTATION -------------//
void setUp(void)
{
}
void tearDown(void)
{
}
void test_TU_ARGS_NUM(void)
{
TEST_ASSERT_EQUAL( 0, TU_ARGS_NUM());
TEST_ASSERT_EQUAL( 1, TU_ARGS_NUM(a1));
TEST_ASSERT_EQUAL( 2, TU_ARGS_NUM(a1, a2));
TEST_ASSERT_EQUAL( 3, TU_ARGS_NUM(a1, a2, a3));
TEST_ASSERT_EQUAL( 4, TU_ARGS_NUM(a1, a2, a3, a4));
TEST_ASSERT_EQUAL( 5, TU_ARGS_NUM(a1, a2, a3, a4, a5));
TEST_ASSERT_EQUAL( 6, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6));
TEST_ASSERT_EQUAL( 7, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7));
TEST_ASSERT_EQUAL( 8, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8));
TEST_ASSERT_EQUAL( 9, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9));
TEST_ASSERT_EQUAL(10, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10));
TEST_ASSERT_EQUAL(11, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11));
TEST_ASSERT_EQUAL(12, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12));
TEST_ASSERT_EQUAL(13, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13));
TEST_ASSERT_EQUAL(14, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14));
TEST_ASSERT_EQUAL(15, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15));
TEST_ASSERT_EQUAL(16, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16));
TEST_ASSERT_EQUAL(17, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17));
TEST_ASSERT_EQUAL(18, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18));
TEST_ASSERT_EQUAL(19, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19));
TEST_ASSERT_EQUAL(20, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20));
TEST_ASSERT_EQUAL(21, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21));
TEST_ASSERT_EQUAL(22, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22));
TEST_ASSERT_EQUAL(23, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23));
TEST_ASSERT_EQUAL(24, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24));
TEST_ASSERT_EQUAL(25, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25));
TEST_ASSERT_EQUAL(26, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26));
TEST_ASSERT_EQUAL(27, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26, a27));
TEST_ASSERT_EQUAL(28, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26, a27, a28));
TEST_ASSERT_EQUAL(29, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26, a27, a28, a29));
TEST_ASSERT_EQUAL(30, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26, a27, a28, a29, a30));
TEST_ASSERT_EQUAL(31, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26, a27, a28, a29, a30, a31));
TEST_ASSERT_EQUAL(32, TU_ARGS_NUM(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26, a27, a28, a29, a30, a31, a32));
}

View File

@ -0,0 +1,378 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2019 Ha Thach (tinyusb.org)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* This file is part of the TinyUSB stack.
*/
#include <string.h>
#include "unity.h"
#include "osal/osal.h"
#include "tusb_fifo.h"
#define FIFO_SIZE 64
uint8_t tu_ff_buf[FIFO_SIZE * sizeof(uint8_t)];
tu_fifo_t tu_ff = TU_FIFO_INIT(tu_ff_buf, FIFO_SIZE, uint8_t, false);
tu_fifo_t* ff = &tu_ff;
tu_fifo_buffer_info_t info;
uint8_t test_data[4096];
uint8_t rd_buf[FIFO_SIZE];
void setUp(void)
{
tu_fifo_clear(ff);
memset(&info, 0, sizeof(tu_fifo_buffer_info_t));
for(int i=0; i<sizeof(test_data); i++) test_data[i] = i;
memset(rd_buf, 0, sizeof(rd_buf));
}
void tearDown(void)
{
}
//--------------------------------------------------------------------+
// Tests
//--------------------------------------------------------------------+
void test_normal(void)
{
for(uint8_t i=0; i < FIFO_SIZE; i++) tu_fifo_write(ff, &i);
for(uint8_t i=0; i < FIFO_SIZE; i++)
{
uint8_t c;
tu_fifo_read(ff, &c);
TEST_ASSERT_EQUAL(i, c);
}
}
void test_item_size(void)
{
uint8_t ff4_buf[FIFO_SIZE * sizeof(uint32_t)];
tu_fifo_t ff4 = TU_FIFO_INIT(ff4_buf, FIFO_SIZE, uint32_t, false);
uint32_t data4[2*FIFO_SIZE];
for(uint32_t i=0; i<sizeof(data4)/4; i++) data4[i] = i;
// fill up fifo
tu_fifo_write_n(&ff4, data4, FIFO_SIZE);
uint32_t rd_buf4[FIFO_SIZE];
uint16_t rd_count;
// read 0 -> 4
rd_count = tu_fifo_read_n(&ff4, rd_buf4, 5);
TEST_ASSERT_EQUAL( 5, rd_count );
TEST_ASSERT_EQUAL_UINT32_ARRAY( data4, rd_buf4, rd_count ); // 0 -> 4
tu_fifo_write_n(&ff4, data4+FIFO_SIZE, 5);
// read all 5 -> 68
rd_count = tu_fifo_read_n(&ff4, rd_buf4, FIFO_SIZE);
TEST_ASSERT_EQUAL( FIFO_SIZE, rd_count );
TEST_ASSERT_EQUAL_UINT32_ARRAY( data4+5, rd_buf4, rd_count ); // 5 -> 68
}
void test_read_n(void)
{
uint16_t rd_count;
// fill up fifo
for(uint8_t i=0; i < FIFO_SIZE; i++) tu_fifo_write(ff, test_data+i);
// case 1: Read index + count < depth
// read 0 -> 4
rd_count = tu_fifo_read_n(ff, rd_buf, 5);
TEST_ASSERT_EQUAL( 5, rd_count );
TEST_ASSERT_EQUAL_MEMORY( test_data, rd_buf, rd_count ); // 0 -> 4
// case 2: Read index + count > depth
// write 10, 11, 12
tu_fifo_write(ff, test_data+FIFO_SIZE);
tu_fifo_write(ff, test_data+FIFO_SIZE+1);
tu_fifo_write(ff, test_data+FIFO_SIZE+2);
rd_count = tu_fifo_read_n(ff, rd_buf, 7);
TEST_ASSERT_EQUAL( 7, rd_count );
TEST_ASSERT_EQUAL_MEMORY( test_data+5, rd_buf, rd_count ); // 5 -> 11
// Should only read until empty
TEST_ASSERT_EQUAL( FIFO_SIZE-5+3-7, tu_fifo_read_n(ff, rd_buf, 100) );
}
void test_write_n(void)
{
// case 1: wr + count < depth
tu_fifo_write_n(ff, test_data, 32); // wr = 32, count = 32
uint16_t rd_count;
rd_count = tu_fifo_read_n(ff, rd_buf, 16); // wr = 32, count = 16
TEST_ASSERT_EQUAL( 16, rd_count );
TEST_ASSERT_EQUAL_MEMORY( test_data, rd_buf, rd_count );
// case 2: wr + count > depth
tu_fifo_write_n(ff, test_data+32, 40); // wr = 72 -> 8, count = 56
tu_fifo_read_n(ff, rd_buf, 32); // count = 24
TEST_ASSERT_EQUAL_MEMORY( test_data+16, rd_buf, rd_count);
TEST_ASSERT_EQUAL(24, tu_fifo_count(ff));
}
void test_write_double_overflowed(void)
{
tu_fifo_set_overwritable(ff, true);
uint8_t rd_buf[FIFO_SIZE] = { 0 };
uint8_t* buf = test_data;
// full
buf += tu_fifo_write_n(ff, buf, FIFO_SIZE);
TEST_ASSERT_EQUAL(FIFO_SIZE, tu_fifo_count(ff));
// write more, should still full
buf += tu_fifo_write_n(ff, buf, FIFO_SIZE-8);
TEST_ASSERT_EQUAL(FIFO_SIZE, tu_fifo_count(ff));
// double overflowed: in total, write more than > 2*FIFO_SIZE
buf += tu_fifo_write_n(ff, buf, 16);
TEST_ASSERT_EQUAL(FIFO_SIZE, tu_fifo_count(ff));
// reading back should give back data from last FIFO_SIZE write
tu_fifo_read_n(ff, rd_buf, FIFO_SIZE);
TEST_ASSERT_EQUAL_MEMORY(buf-16, rd_buf+FIFO_SIZE-16, 16);
// TODO whole buffer should match, but we deliberately not implement it
// TEST_ASSERT_EQUAL_MEMORY(buf-FIFO_SIZE, rd_buf, FIFO_SIZE);
}
static uint16_t help_write(uint16_t total, uint16_t n)
{
tu_fifo_write_n(ff, test_data, n);
total = tu_min16(FIFO_SIZE, total + n);
TEST_ASSERT_EQUAL(total, tu_fifo_count(ff));
TEST_ASSERT_EQUAL(FIFO_SIZE - total, tu_fifo_remaining(ff));
return total;
}
void test_write_overwritable2(void)
{
tu_fifo_set_overwritable(ff, true);
// based on actual crash tests detected by fuzzing
uint16_t total = 0;
total = help_write(total, 12);
total = help_write(total, 55);
total = help_write(total, 73);
total = help_write(total, 55);
total = help_write(total, 75);
total = help_write(total, 84);
total = help_write(total, 1);
total = help_write(total, 10);
total = help_write(total, 12);
total = help_write(total, 25);
total = help_write(total, 192);
}
void test_peek(void)
{
uint8_t temp;
temp = 10; tu_fifo_write(ff, &temp);
temp = 20; tu_fifo_write(ff, &temp);
temp = 30; tu_fifo_write(ff, &temp);
temp = 0;
tu_fifo_peek(ff, &temp);
TEST_ASSERT_EQUAL(10, temp);
tu_fifo_read(ff, &temp);
tu_fifo_read(ff, &temp);
tu_fifo_peek(ff, &temp);
TEST_ASSERT_EQUAL(30, temp);
}
void test_get_read_info_when_no_wrap()
{
uint8_t ch = 1;
// write 6 items
for(uint8_t i=0; i < 6; i++) tu_fifo_write(ff, &ch);
// read 2 items
tu_fifo_read(ff, &ch);
tu_fifo_read(ff, &ch);
tu_fifo_get_read_info(ff, &info);
TEST_ASSERT_EQUAL(4, info.len_lin);
TEST_ASSERT_EQUAL(0, info.len_wrap);
TEST_ASSERT_EQUAL_PTR(ff->buffer+2, info.ptr_lin);
TEST_ASSERT_NULL(info.ptr_wrap);
}
void test_get_read_info_when_wrapped()
{
uint8_t ch = 1;
// make fifo full
for(uint8_t i=0; i < FIFO_SIZE; i++) tu_fifo_write(ff, &ch);
// read 6 items
for(uint8_t i=0; i < 6; i++) tu_fifo_read(ff, &ch);
// write 2 items
tu_fifo_write(ff, &ch);
tu_fifo_write(ff, &ch);
tu_fifo_get_read_info(ff, &info);
TEST_ASSERT_EQUAL(FIFO_SIZE-6, info.len_lin);
TEST_ASSERT_EQUAL(2, info.len_wrap);
TEST_ASSERT_EQUAL_PTR(ff->buffer+6, info.ptr_lin);
TEST_ASSERT_EQUAL_PTR(ff->buffer, info.ptr_wrap);
}
void test_get_write_info_when_no_wrap()
{
uint8_t ch = 1;
// write 2 items
tu_fifo_write(ff, &ch);
tu_fifo_write(ff, &ch);
tu_fifo_get_write_info(ff, &info);
TEST_ASSERT_EQUAL(FIFO_SIZE-2, info.len_lin);
TEST_ASSERT_EQUAL(0, info.len_wrap);
TEST_ASSERT_EQUAL_PTR(ff->buffer+2, info .ptr_lin);
// application should check len instead of ptr.
// TEST_ASSERT_NULL(info.ptr_wrap);
}
void test_get_write_info_when_wrapped()
{
uint8_t ch = 1;
// write 6 items
for(uint8_t i=0; i < 6; i++) tu_fifo_write(ff, &ch);
// read 2 items
tu_fifo_read(ff, &ch);
tu_fifo_read(ff, &ch);
tu_fifo_get_write_info(ff, &info);
TEST_ASSERT_EQUAL(FIFO_SIZE-6, info.len_lin);
TEST_ASSERT_EQUAL(2, info.len_wrap);
TEST_ASSERT_EQUAL_PTR(ff->buffer+6, info .ptr_lin);
TEST_ASSERT_EQUAL_PTR(ff->buffer, info.ptr_wrap);
}
void test_empty(void)
{
uint8_t temp;
TEST_ASSERT_TRUE(tu_fifo_empty(ff));
// read info
tu_fifo_get_read_info(ff, &info);
TEST_ASSERT_EQUAL(0, info.len_lin);
TEST_ASSERT_EQUAL(0, info.len_wrap);
TEST_ASSERT_NULL(info.ptr_lin);
TEST_ASSERT_NULL(info.ptr_wrap);
// write info
tu_fifo_get_write_info(ff, &info);
TEST_ASSERT_EQUAL(FIFO_SIZE, info.len_lin);
TEST_ASSERT_EQUAL(0, info.len_wrap);
TEST_ASSERT_EQUAL_PTR(ff->buffer, info .ptr_lin);
// application should check len instead of ptr.
// TEST_ASSERT_NULL(info.ptr_wrap);
// write 1 then re-check empty
tu_fifo_write(ff, &temp);
TEST_ASSERT_FALSE(tu_fifo_empty(ff));
}
void test_full(void)
{
TEST_ASSERT_FALSE(tu_fifo_full(ff));
for(uint8_t i=0; i < FIFO_SIZE; i++) tu_fifo_write(ff, &i);
TEST_ASSERT_TRUE(tu_fifo_full(ff));
// read info
tu_fifo_get_read_info(ff, &info);
TEST_ASSERT_EQUAL(FIFO_SIZE, info.len_lin);
TEST_ASSERT_EQUAL(0, info.len_wrap);
TEST_ASSERT_EQUAL_PTR(ff->buffer, info.ptr_lin);
// skip this, application must check len instead of buffer
// TEST_ASSERT_NULL(info.ptr_wrap);
// write info
}
void test_rd_idx_wrap()
{
tu_fifo_t ff10;
uint8_t buf[10];
uint8_t dst[10];
tu_fifo_config(&ff10, buf, 10, 1, 1);
uint16_t n;
ff10.wr_idx = 6;
ff10.rd_idx = 15;
n = tu_fifo_read_n(&ff10, dst, 4);
TEST_ASSERT_EQUAL(n, 4);
TEST_ASSERT_EQUAL(ff10.rd_idx, 0);
n = tu_fifo_read_n(&ff10, dst, 4);
TEST_ASSERT_EQUAL(n, 4);
TEST_ASSERT_EQUAL(ff10.rd_idx, 4);
n = tu_fifo_read_n(&ff10, dst, 4);
TEST_ASSERT_EQUAL(n, 2);
TEST_ASSERT_EQUAL(ff10.rd_idx, 6);
}

View File

@ -0,0 +1,350 @@
#!/usr/bin/env ruby
#these are always used
require 'rubygems'
require 'fileutils'
# Check for the main project file (either the one defined in the ENV or the default)
main_filepath = ENV['CEEDLING_MAIN_PROJECT_FILE']
project_found = (!main_filepath.nil? && File.exists?(main_filepath))
if (!project_found)
main_filepath = "project.yml"
project_found = File.exists?(main_filepath)
end
def is_windows?
return ((RbConfig::CONFIG['host_os'] =~ /mswin|mingw/) ? true : false) if defined?(RbConfig)
return ((Config::CONFIG['host_os'] =~ /mswin|mingw/) ? true : false)
end
unless (project_found)
#===================================== We Do Not Have A Project ================================================
puts "Welcome to Ceedling!"
require 'thor'
def here
File.dirname(__FILE__) + "/.."
end
class CeedlingTasks < Thor
include Thor::Actions
desc "new PROJECT_NAME", "create a new ceedling project"
method_option :docs, :type => :boolean, :default => false, :desc => "Add docs in project vendor directory"
method_option :local, :type => :boolean, :default => false, :desc => "Create a copy of Ceedling in the project vendor directory"
method_option :gitignore, :type => :boolean, :default => false, :desc => "Create a gitignore file for ignoring ceedling generated files"
method_option :no_configs, :type => :boolean, :default => false, :desc => "Don't install starter configuration files"
method_option :noconfigs, :type => :boolean, :default => false
#deprecated:
method_option :no_docs, :type => :boolean, :default => false
method_option :nodocs, :type => :boolean, :default => false
method_option :as_gem, :type => :boolean, :default => false
method_option :asgem, :type => :boolean, :default => false
method_option :with_ignore, :type => :boolean, :default => false
method_option :withignore, :type => :boolean, :default => false
def new(name, silent = false)
copy_assets_and_create_structure(name, silent, false, options)
end
desc "upgrade PROJECT_NAME", "upgrade ceedling for a project (not req'd if gem used)"
def upgrade(name, silent = false)
as_local = true
begin
require "yaml"
as_local = (YAML.load_file(File.join(name, "project.yml"))[:project][:which_ceedling] != 'gem')
rescue
raise "ERROR: Could not find valid project file '#{yaml_path}'"
end
found_docs = File.exists?( File.join(name, "docs", "CeedlingPacket.md") )
copy_assets_and_create_structure(name, silent, true, {:upgrade => true, :no_configs => true, :local => as_local, :docs => found_docs})
end
no_commands do
def copy_assets_and_create_structure(name, silent=false, force=false, options = {})
puts "WARNING: --no_docs deprecated. It is now the default. Specify -docs if you want docs installed." if (options[:no_docs] || options[:nodocs])
puts "WARNING: --as_gem deprecated. It is now the default. Specify -local if you want ceedling installed to this project." if (options[:as_gem] || options[:asgem])
puts "WARNING: --with_ignore deprecated. It is now called -gitignore" if (options[:with_ignore] || options[:with_ignore])
use_docs = options[:docs] || false
use_configs = !(options[:no_configs] || options[:noconfigs] || false)
use_gem = !(options[:local])
use_ignore = options[:gitignore] || false
is_upgrade = options[:upgrade] || false
ceedling_path = File.join(name, 'vendor', 'ceedling')
source_path = File.join(name, 'src')
test_path = File.join(name, 'test')
test_support_path = File.join(name, 'test/support')
# If it's not an upgrade, make sure we have the paths we expect
if (!is_upgrade)
[source_path, test_path, test_support_path].each do |d|
FileUtils.mkdir_p d
end
end
# Genarate gitkeep in test support path
FileUtils.touch(File.join(test_support_path, '.gitkeep'))
# If documentation requested, create a place to dump them and do so
doc_path = ""
if use_docs
doc_path = use_gem ? File.join(name, 'docs') : File.join(ceedling_path, 'docs')
FileUtils.mkdir_p doc_path
in_doc_path = lambda {|f| File.join(doc_path, f)}
# Add documentation from main projects to list
doc_files = {}
['docs','vendor/unity/docs','vendor/cmock/docs','vendor/cexception/docs'].each do |p|
Dir[ File.expand_path(File.join(here, p, '*.md')) ].each do |f|
doc_files[ File.basename(f) ] = f unless(doc_files.include? f)
end
end
# Add documentation from plugins to list
Dir[ File.join(here, 'plugins/**/README.md') ].each do |plugin_path|
k = "plugin_" + plugin_path.split(/\\|\//)[-2] + ".md"
doc_files[ k ] = File.expand_path(plugin_path)
end
# Copy all documentation
doc_files.each_pair do |k, v|
copy_file(v, in_doc_path.call(k), :force => force)
end
end
# If installed locally to project, copy ceedling, unity, cmock, & supports to vendor
unless use_gem
FileUtils.mkdir_p ceedling_path
#copy full folders from ceedling gem into project
%w{plugins lib bin}.map do |f|
{:src => f, :dst => File.join(ceedling_path, f)}
end.each do |f|
directory(f[:src], f[:dst], :force => force)
end
# mark ceedling as an executable
File.chmod(0755, File.join(ceedling_path, 'bin', 'ceedling')) unless is_windows?
#copy necessary subcomponents from ceedling gem into project
sub_components = [
{:src => 'vendor/c_exception/lib/', :dst => 'vendor/c_exception/lib'},
{:src => 'vendor/cmock/config/', :dst => 'vendor/cmock/config'},
{:src => 'vendor/cmock/lib/', :dst => 'vendor/cmock/lib'},
{:src => 'vendor/cmock/src/', :dst => 'vendor/cmock/src'},
{:src => 'vendor/diy/lib', :dst => 'vendor/diy/lib'},
{:src => 'vendor/unity/auto/', :dst => 'vendor/unity/auto'},
{:src => 'vendor/unity/src/', :dst => 'vendor/unity/src'},
]
sub_components.each do |c|
directory(c[:src], File.join(ceedling_path, c[:dst]), :force => force)
end
end
# We're copying in a configuration file if we haven't said not to
if (use_configs)
dst_yaml = File.join(name, 'project.yml')
src_yaml = if use_gem
File.join(here, 'assets', 'project_as_gem.yml')
else
if is_windows?
copy_file(File.join('assets', 'ceedling.cmd'), File.join(name, 'ceedling.cmd'), :force => force)
else
copy_file(File.join('assets', 'ceedling'), File.join(name, 'ceedling'), :force => force)
File.chmod(0755, File.join(name, 'ceedling'))
end
File.join(here, 'assets', 'project_with_guts.yml')
end
# Perform the actual clone of the config file, while updating the version
File.open(dst_yaml,'w') do |dst|
require File.expand_path(File.join(File.dirname(__FILE__),"..","lib","ceedling","version.rb"))
dst << File.read(src_yaml).gsub(":ceedling_version: '?'",":ceedling_version: #{Ceedling::Version::CEEDLING}")
puts " create #{dst_yaml}"
end
end
# Copy the gitignore file if requested
if (use_ignore)
copy_file(File.join('assets', 'default_gitignore'), File.join(name, '.gitignore'), :force => force)
end
unless silent
puts "\n"
puts "Project '#{name}' #{force ? "upgraded" : "created"}!"
puts " - Tool documentation is located in #{doc_path}" if use_docs
puts " - Execute 'ceedling help' from #{name} to view available test & build tasks"
puts ''
end
end
end
desc "examples", "list available example projects"
def examples()
puts "Available sample projects:"
FileUtils.cd(File.join(here, "examples")) do
Dir["*"].each {|proj| puts " #{proj}"}
end
end
desc "example PROJ_NAME [DEST]", "new specified example project (in DEST, if specified)"
def example(proj_name, dest=nil)
if dest.nil? then dest = proj_name end
copy_assets_and_create_structure(dest, true, false, {:local=>true, :docs=>true})
dest_src = File.join(dest,'src')
dest_test = File.join(dest,'test')
dest_project = File.join(dest,'project.yml')
directory "examples/#{proj_name}/src", dest_src
directory "examples/#{proj_name}/test", dest_test
remove_file dest_project
copy_file "examples/#{proj_name}/project.yml", dest_project
puts "\n"
puts "Example project '#{proj_name}' created!"
puts " - Tool documentation is located in vendor/ceedling/docs"
puts " - Execute 'ceedling help' to view available test & build tasks"
puts ''
end
desc "version", "return the version of the tools installed"
def version()
require File.expand_path(File.join(File.dirname(__FILE__),"..","lib","ceedling","version.rb"))
puts " Ceedling:: #{Ceedling::Version::CEEDLING}"
puts " CMock:: #{Ceedling::Version::CMOCK}"
puts " Unity:: #{Ceedling::Version::UNITY}"
puts " CException:: #{Ceedling::Version::CEXCEPTION}"
end
end
if (ARGV[0] =~ /^\-T$/)
puts "\n(No Project Detected, Therefore Showing Options to Create Projects)"
CeedlingTasks.tasks.each_pair do |k,v|
puts v.usage.ljust(25,' ') + v.description
end
puts "\n"
else
CeedlingTasks.source_root here
CeedlingTasks.start
end
#===================================== We Have A Project Already ================================================
else
require 'yaml'
require 'rbconfig'
#determine platform
platform = begin
case(RbConfig::CONFIG['host_os'])
when /mswin|mingw|cygwin/i
:mswin
when /darwin/
:osx
else
:linux
end
rescue
:linux
end
#create our default meta-runner option set
options = {
:pretest => nil,
:args => [],
:add_path => [],
:path_connector => (platform == :mswin) ? ";" : ":",
:graceful_fail => false,
:which_ceedling => (Dir.exists?("vendor/ceedling") ? "vendor/ceedling" : 'gem'),
:default_tasks => [ 'test:all' ],
:list_tasks => false
}
#guess that we need a special script file first if it exists
if (platform == :mswin)
options[:pretest] = File.exists?("#{ platform.to_s }_setup.bat") ? "#{ platform.to_s }_setup.bat" : nil
else
options[:pretest] = File.exists?("#{ platform.to_s }_setup.sh") ? "source #{ platform.to_s }_setup.sh" : nil
end
#merge in project settings if they can be found here
yaml_options = YAML.load_file(main_filepath)
if (yaml_options[:paths])
options[:add_path] = yaml_options[:paths][:tools] || []
else
options[:add_path] = []
end
options[:graceful_fail] = yaml_options[:graceful_fail] if yaml_options[:graceful_fail]
options[:which_ceedling] = yaml_options[:project][:which_ceedling] if (yaml_options[:project] && yaml_options[:project][:which_ceedling])
options[:default_tasks] = yaml_options[:default_tasks] if yaml_options[:default_tasks]
#sort through command line options
ARGV.each do |v|
case(v)
when /^(?:new|examples?|templates?)$/
puts "\nOops. You called ceedling with argument '#{v}'.\n" +
" This is an operation that will create a new project... \n" +
" but it looks like you're already in a project. If you really \n" +
" want to do this, try moving to an empty folder.\n\n"
abort
when /^help$/
options[:list_tasks] = true
when /^-T$/
options[:list_tasks] = true
when /^--tasks$/
options[:list_tasks] = true
when /^project:(\w+)/
ENV['CEEDLING_USER_PROJECT_FILE'] = "#{$1}.yml"
else
options[:args].push(v)
end
end
#add to the path
if (options[:add_path] && !options[:add_path].empty?)
path = ENV["PATH"]
options[:add_path].each do |p|
f = File.expand_path(File.dirname(__FILE__),p)
path = (f + options[:path_connector] + path) unless path.include? f
end
ENV["PATH"] = path
end
# Load Ceedling (either through the rakefile OR directly)
if (File.exists?("rakefile.rb"))
load 'rakefile.rb'
else
if (options[:which_ceedling] == 'gem')
require 'ceedling'
else
load "#{options[:which_ceedling]}/lib/ceedling.rb"
end
Ceedling.load_project
end
Rake.application.standard_exception_handling do
if options[:list_tasks]
# Display helpful task list when requested. This required us to dig into Rake internals a bit
Rake.application.define_singleton_method(:name=) {|n| @name = n}
Rake.application.name = 'ceedling'
Rake.application.options.show_tasks = :tasks
Rake.application.options.show_task_pattern = /^(?!.*build).*$/
Rake.application.display_tasks_and_comments()
else
task :default => options[:default_tasks]
# Run our Tasks!
Rake.application.collect_command_line_tasks(options[:args])
Rake.application.top_level
end
end
true
#===================================================================================================================
end

View File

@ -0,0 +1,99 @@
##
# This module defines the interface for interacting with and loading a project
# with Ceedling.
module Ceedling
##
# Returns the location where the gem is installed.
# === Return
# _String_ - The location where the gem lives.
def self.location
File.join( File.dirname(__FILE__), '..')
end
##
# Return the path to the "built-in" plugins.
# === Return
# _String_ - The path where the default plugins live.
def self.load_path
File.join( self.location, 'plugins')
end
##
# Return the path to the Ceedling Rakefile
# === Return
# _String_
def self.rakefile
File.join( self.location, 'lib', 'ceedling', 'rakefile.rb' )
end
##
# This method selects the project file that Ceedling will use by setting the
# CEEDLING_MAIN_PROJECT_FILE environment variable before loading the ceedling
# rakefile. A path supplied as an argument to this method will override the
# current value of the environment variable. If no path is supplied as an
# argument then the existing value of the environment variable is used. If
# the environment variable has not been set and no argument has been supplied
# then a default path of './project.yml' will be used.
#
# === Arguments
# +options+ _Hash_::
# A hash containing the options for ceedling. Currently the following
# options are supported:
# * +config+ - The path to the project YAML configuration file.
# * +root+ - The root of the project directory.
# * +prefix+ - A prefix to prepend to plugin names in order to determine the
# corresponding gem name.
# * +plugins+ - The list of ceedling plugins to load
def self.load_project(options = {})
# Make sure our path to the yaml file is setup
if options.has_key? :config
ENV['CEEDLING_MAIN_PROJECT_FILE'] = options[:config]
elsif ENV['CEEDLING_MAIN_PROJECT_FILE'].nil?
ENV['CEEDLING_MAIN_PROJECT_FILE'] = './project.yml'
end
# Register the plugins
if options.has_key? :plugins
options[:plugins].each do |plugin|
register_plugin( plugin, options[:prefix] )
end
end
# Define the root of the project if specified
Object.const_set('PROJECT_ROOT', options[:root]) if options.has_key? :root
# Load ceedling
load "#{self.rakefile}"
end
##
# Register a plugin for ceedling to use when a project is loaded. This method
# *must* be called prior to calling the _load_project_ method.
#
# This method is intended to be used for loading plugins distributed via the
# RubyGems mechanism. As such, the following gem structure is assumed for
# plugins.
#
# * The gem name must be prefixed with 'ceedling-' followed by the plugin
# name (ex. 'ceedling-bullseye')
#
# * The contents of the plugin must be isntalled into a subdirectory of
# the gem with the same name as the plugin (ex. 'bullseye/')
#
# === Arguments
# +name+ _String_:: The name of the plugin to load.
# +prefix+ _String_::
# (optional, default = nil) The prefix to use for the full gem name.
def self.register_plugin(name, prefix=nil)
# Figure out the full name of the gem and location
prefix ||= 'ceedling-'
gem_name = prefix + name
gem_dir = Gem::Specification.find_by_name(gem_name).gem_dir()
# Register the plugin with Ceedling
require 'ceedling/defaults'
DEFAULT_CEEDLING_CONFIG[:plugins][:enabled] << name
DEFAULT_CEEDLING_CONFIG[:plugins][:load_paths] << gem_dir
end
end

View File

@ -0,0 +1,39 @@
require 'ceedling/constants'
##
# Utilities for raiser and reporting errors during building.
class BuildInvokerUtils
constructor :configurator, :streaminator
##
# Processes exceptions and tries to display a useful message for the user.
#
# ==== Attributes
#
# * _exception_: The exception given by a rescue statement.
# * _context_: A symbol representing where in the build the exception
# occurs.
# * _test_build_: A bool to signify if the exception occurred while building
# from test or source.
#
def process_exception(exception, context, test_build=true)
if (exception.message =~ /Don't know how to build task '(.+)'/i)
error_header = "ERROR: Rake could not find file referenced in source"
error_header += " or test" if (test_build)
error_header += ": '#{$1}'. Possible stale dependency."
@streaminator.stderr_puts( error_header )
if (@configurator.project_use_deep_dependencies)
help_message = "Try fixing #include statements or adding missing file. Then run '#{REFRESH_TASK_ROOT}#{context.to_s}' task and try again."
@streaminator.stderr_puts( help_message )
end
raise ''
else
raise exception
end
end
end

View File

@ -0,0 +1,47 @@
class Cacheinator
constructor :cacheinator_helper, :file_path_utils, :file_wrapper, :yaml_wrapper
def cache_test_config(hash)
@yaml_wrapper.dump( @file_path_utils.form_test_build_cache_path( INPUT_CONFIGURATION_CACHE_FILE), hash )
end
def cache_release_config(hash)
@yaml_wrapper.dump( @file_path_utils.form_release_build_cache_path( INPUT_CONFIGURATION_CACHE_FILE ), hash )
end
def diff_cached_test_file( filepath )
cached_filepath = @file_path_utils.form_test_build_cache_path( filepath )
if (@file_wrapper.exist?( cached_filepath ) and (!@file_wrapper.compare( filepath, cached_filepath )))
@file_wrapper.cp(filepath, cached_filepath, {:preserve => false})
return filepath
elsif (!@file_wrapper.exist?( cached_filepath ))
@file_wrapper.cp(filepath, cached_filepath, {:preserve => false})
return filepath
end
return cached_filepath
end
def diff_cached_test_config?(hash)
cached_filepath = @file_path_utils.form_test_build_cache_path(INPUT_CONFIGURATION_CACHE_FILE)
return @cacheinator_helper.diff_cached_config?( cached_filepath, hash )
end
def diff_cached_test_defines?(files)
cached_filepath = @file_path_utils.form_test_build_cache_path(DEFINES_DEPENDENCY_CACHE_FILE)
return @cacheinator_helper.diff_cached_defines?( cached_filepath, files )
end
def diff_cached_release_config?(hash)
cached_filepath = @file_path_utils.form_release_build_cache_path(INPUT_CONFIGURATION_CACHE_FILE)
return @cacheinator_helper.diff_cached_config?( cached_filepath, hash )
end
end

View File

@ -0,0 +1,35 @@
class CacheinatorHelper
constructor :file_wrapper, :yaml_wrapper
def diff_cached_config?(cached_filepath, hash)
return false if ( not @file_wrapper.exist?(cached_filepath) )
return true if (@yaml_wrapper.load(cached_filepath) != hash)
return false
end
def diff_cached_defines?(cached_filepath, files)
changed_defines = false
current_defines = COLLECTION_DEFINES_TEST_AND_VENDOR.reject(&:empty?)
current_dependencies = Hash[files.collect { |source| [source, current_defines.dup] }]
if not @file_wrapper.exist?(cached_filepath)
@yaml_wrapper.dump(cached_filepath, current_dependencies)
return changed_defines
end
dependencies = @yaml_wrapper.load(cached_filepath)
common_dependencies = current_dependencies.select { |file, defines| dependencies.has_key?(file) }
if dependencies.values_at(*common_dependencies.keys) != common_dependencies.values
changed_defines = true
end
dependencies.merge!(current_dependencies)
@yaml_wrapper.dump(cached_filepath, dependencies)
return changed_defines
end
end

View File

@ -0,0 +1,15 @@
require 'cmock'
class CmockBuilder
attr_accessor :cmock
def setup
@cmock = nil
end
def manufacture(cmock_config)
@cmock = CMock.new(cmock_config)
end
end

View File

@ -0,0 +1,382 @@
require 'ceedling/defaults'
require 'ceedling/constants'
require 'ceedling/file_path_utils'
require 'deep_merge'
class Configurator
attr_reader :project_config_hash, :script_plugins, :rake_plugins
attr_accessor :project_logging, :project_debug, :project_verbosity, :sanity_checks
constructor(:configurator_setup, :configurator_builder, :configurator_plugins, :cmock_builder, :yaml_wrapper, :system_wrapper) do
@project_logging = false
@project_debug = false
@project_verbosity = Verbosity::NORMAL
@sanity_checks = TestResultsSanityChecks::NORMAL
end
def setup
# special copy of cmock config to provide to cmock for construction
@cmock_config_hash = {}
# note: project_config_hash is an instance variable so constants and accessors created
# in eval() statements in build() have something of proper scope and persistence to reference
@project_config_hash = {}
@project_config_hash_backup = {}
@script_plugins = []
@rake_plugins = []
end
def replace_flattened_config(config)
@project_config_hash.merge!(config)
@configurator_setup.build_constants_and_accessors(@project_config_hash, binding())
end
def store_config
@project_config_hash_backup = @project_config_hash.clone
end
def restore_config
@project_config_hash = @project_config_hash_backup
@configurator_setup.build_constants_and_accessors(@project_config_hash, binding())
end
def reset_defaults(config)
[:test_compiler,
:test_linker,
:test_fixture,
:test_includes_preprocessor,
:test_file_preprocessor,
:test_file_preprocessor_directives,
:test_dependencies_generator,
:release_compiler,
:release_assembler,
:release_linker,
:release_dependencies_generator].each do |tool|
config[:tools].delete(tool) if (not (config[:tools][tool].nil?))
end
end
# The default values defined in defaults.rb (eg. DEFAULT_TOOLS_TEST) are populated
# into @param config
def populate_defaults(config)
new_config = DEFAULT_CEEDLING_CONFIG.deep_clone
new_config.deep_merge!(config)
config.replace(new_config)
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_TEST )
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_TEST_PREPROCESSORS ) if (config[:project][:use_test_preprocessor])
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_TEST_DEPENDENCIES ) if (config[:project][:use_deep_dependencies])
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_RELEASE ) if (config[:project][:release_build])
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_RELEASE_ASSEMBLER ) if (config[:project][:release_build] and config[:release_build][:use_assembly])
@configurator_builder.populate_defaults( config, DEFAULT_TOOLS_RELEASE_DEPENDENCIES ) if (config[:project][:release_build] and config[:project][:use_deep_dependencies])
end
def populate_unity_defaults(config)
unity = config[:unity] || {}
@runner_config = unity.merge(@runner_config || config[:test_runner] || {})
end
def populate_cmock_defaults(config)
# cmock has its own internal defaults handling, but we need to set these specific values
# so they're present for the build environment to access;
# note: these need to end up in the hash given to initialize cmock for this to be successful
cmock = config[:cmock] || {}
# yes, we're duplicating the default mock_prefix in cmock, but it's because we need CMOCK_MOCK_PREFIX always available in Ceedling's environment
cmock[:mock_prefix] = 'Mock' if (cmock[:mock_prefix].nil?)
# just because strict ordering is the way to go
cmock[:enforce_strict_ordering] = true if (cmock[:enforce_strict_ordering].nil?)
cmock[:mock_path] = File.join(config[:project][:build_root], TESTS_BASE_PATH, 'mocks') if (cmock[:mock_path].nil?)
cmock[:verbosity] = @project_verbosity if (cmock[:verbosity].nil?)
cmock[:plugins] = [] if (cmock[:plugins].nil?)
cmock[:plugins].map! { |plugin| plugin.to_sym }
cmock[:plugins] << (:cexception) if (!cmock[:plugins].include?(:cexception) and (config[:project][:use_exceptions]))
cmock[:plugins].uniq!
cmock[:unity_helper] = false if (cmock[:unity_helper].nil?)
if (cmock[:unity_helper])
cmock[:unity_helper] = [cmock[:unity_helper]] if cmock[:unity_helper].is_a? String
cmock[:includes] += cmock[:unity_helper].map{|helper| File.basename(helper) }
cmock[:includes].uniq!
end
@runner_config = cmock.merge(@runner_config || config[:test_runner] || {})
@cmock_builder.manufacture(cmock)
end
def get_runner_config
@runner_config
end
# grab tool names from yaml and insert into tool structures so available for error messages
# set up default values
def tools_setup(config)
config[:tools].each_key do |name|
tool = config[:tools][name]
# populate name if not given
tool[:name] = name.to_s if (tool[:name].nil?)
# handle inline ruby string substitution in executable
if (tool[:executable] =~ RUBY_STRING_REPLACEMENT_PATTERN)
tool[:executable].replace(@system_wrapper.module_eval(tool[:executable]))
end
# populate stderr redirect option
tool[:stderr_redirect] = StdErrRedirect::NONE if (tool[:stderr_redirect].nil?)
# populate background execution option
tool[:background_exec] = BackgroundExec::NONE if (tool[:background_exec].nil?)
# populate optional option to control verification of executable in search paths
tool[:optional] = false if (tool[:optional].nil?)
end
end
def tools_supplement_arguments(config)
tools_name_prefix = 'tools_'
config[:tools].each_key do |name|
tool = @project_config_hash[(tools_name_prefix + name.to_s).to_sym]
# smoosh in extra arguments if specified at top-level of config (useful for plugins & default gcc tools)
# arguments are squirted in at _end_ of list
top_level_tool = (tools_name_prefix + name.to_s).to_sym
if (not config[top_level_tool].nil?)
# adding and flattening is not a good idea: might over-flatten if there's array nesting in tool args
tool[:arguments].concat config[top_level_tool][:arguments]
end
end
end
def find_and_merge_plugins(config)
# plugins must be loaded before generic path evaluation & magic that happen later;
# perform path magic here as discrete step
config[:plugins][:load_paths].each do |path|
path.replace(@system_wrapper.module_eval(path)) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
FilePathUtils::standardize(path)
end
config[:plugins][:load_paths] << FilePathUtils::standardize(Ceedling.load_path)
config[:plugins][:load_paths].uniq!
paths_hash = @configurator_plugins.add_load_paths(config)
@rake_plugins = @configurator_plugins.find_rake_plugins(config, paths_hash)
@script_plugins = @configurator_plugins.find_script_plugins(config, paths_hash)
config_plugins = @configurator_plugins.find_config_plugins(config, paths_hash)
plugin_yml_defaults = @configurator_plugins.find_plugin_yml_defaults(config, paths_hash)
plugin_hash_defaults = @configurator_plugins.find_plugin_hash_defaults(config, paths_hash)
config_plugins.each do |plugin|
plugin_config = @yaml_wrapper.load(plugin)
config.deep_merge(plugin_config)
end
plugin_yml_defaults.each do |defaults|
@configurator_builder.populate_defaults( config, @yaml_wrapper.load(defaults) )
end
plugin_hash_defaults.each do |defaults|
@configurator_builder.populate_defaults( config, defaults )
end
# special plugin setting for results printing
config[:plugins][:display_raw_test_results] = true if (config[:plugins][:display_raw_test_results].nil?)
paths_hash.each_pair { |name, path| config[:plugins][name] = path }
end
def merge_imports(config)
if config[:import]
if config[:import].is_a? Array
until config[:import].empty?
path = config[:import].shift
path = @system_wrapper.module_eval(path) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
config.deep_merge!(@yaml_wrapper.load(path))
end
else
config[:import].each_value do |path|
if !path.nil?
path = @system_wrapper.module_eval(path) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
config.deep_merge!(@yaml_wrapper.load(path))
end
end
end
end
config.delete(:import)
end
def eval_environment_variables(config)
config[:environment].each do |hash|
key = hash.keys[0]
value = hash[key]
items = []
interstitial = ((key == :path) ? File::PATH_SEPARATOR : '')
items = ((value.class == Array) ? hash[key] : [value])
items.each do |item|
if item.is_a? String and item =~ RUBY_STRING_REPLACEMENT_PATTERN
item.replace( @system_wrapper.module_eval( item ) )
end
end
hash[key] = items.join( interstitial )
@system_wrapper.env_set( key.to_s.upcase, hash[key] )
end
end
def eval_paths(config)
# [:plugins]:[load_paths] already handled
paths = [ # individual paths that don't follow convention processed below
config[:project][:build_root],
config[:release_build][:artifacts]]
eval_path_list( paths )
config[:paths].each_pair { |collection, paths| eval_path_list( paths ) }
config[:files].each_pair { |collection, files| eval_path_list( files ) }
# all other paths at secondary hash key level processed by convention:
# ex. [:toplevel][:foo_path] & [:toplevel][:bar_paths] are evaluated
config.each_pair { |parent, child| eval_path_list( collect_path_list( child ) ) }
end
def standardize_paths(config)
# [:plugins]:[load_paths] already handled
paths = [ # individual paths that don't follow convention processed below
config[:project][:build_root],
config[:release_build][:artifacts]] # cmock path in case it was explicitly set in config
paths.flatten.each { |path| FilePathUtils::standardize( path ) }
config[:paths].each_pair do |collection, paths|
# ensure that list is an array (i.e. handle case of list being a single string,
# or a multidimensional array)
config[:paths][collection] = [paths].flatten.map{|path| FilePathUtils::standardize( path )}
end
config[:files].each_pair { |collection, files| files.each{ |path| FilePathUtils::standardize( path ) } }
config[:tools].each_pair { |tool, config| FilePathUtils::standardize( config[:executable] ) if (config.include? :executable) }
# all other paths at secondary hash key level processed by convention:
# ex. [:toplevel][:foo_path] & [:toplevel][:bar_paths] are standardized
config.each_pair do |parent, child|
collect_path_list( child ).each { |path| FilePathUtils::standardize( path ) }
end
end
def validate(config)
# collect felonies and go straight to jail
raise if (not @configurator_setup.validate_required_sections( config ))
# collect all misdemeanors, everybody on probation
blotter = []
blotter << @configurator_setup.validate_required_section_values( config )
blotter << @configurator_setup.validate_paths( config )
blotter << @configurator_setup.validate_tools( config )
blotter << @configurator_setup.validate_plugins( config )
raise if (blotter.include?( false ))
end
# create constants and accessors (attached to this object) from given hash
def build(config, *keys)
# create flattened & expanded configuration hash
built_config = @configurator_setup.build_project_config( config, @configurator_builder.flattenify( config ) )
@project_config_hash = built_config.clone
store_config()
@configurator_setup.build_constants_and_accessors(built_config, binding())
# top-level keys disappear when we flatten, so create global constants & accessors to any specified keys
keys.each do |key|
hash = { key => config[key] }
@configurator_setup.build_constants_and_accessors(hash, binding())
end
end
# add to constants and accessors as post build step
def build_supplement(config_base, config_more)
# merge in our post-build additions to base configuration hash
config_base.deep_merge!( config_more )
# flatten our addition hash
config_more_flattened = @configurator_builder.flattenify( config_more )
# merge our flattened hash with built hash from previous build
@project_config_hash.deep_merge!( config_more_flattened )
store_config()
# create more constants and accessors
@configurator_setup.build_constants_and_accessors(config_more_flattened, binding())
# recreate constants & update accessors with new merged, base values
config_more.keys.each do |key|
hash = { key => config_base[key] }
@configurator_setup.build_constants_and_accessors(hash, binding())
end
end
def insert_rake_plugins(plugins)
plugins.each do |plugin|
@project_config_hash[:project_rakefile_component_files] << plugin
end
end
### private ###
private
def collect_path_list( container )
paths = []
container.each_key { |key| paths << container[key] if (key.to_s =~ /_path(s)?$/) } if (container.class == Hash)
return paths.flatten
end
def eval_path_list( paths )
if paths.kind_of?(Array)
paths = Array.new(paths)
end
paths.flatten.each do |path|
path.replace( @system_wrapper.module_eval( path ) ) if (path =~ RUBY_STRING_REPLACEMENT_PATTERN)
end
end
end

View File

@ -0,0 +1,475 @@
require 'rubygems'
require 'rake' # for ext() method
require 'ceedling/file_path_utils' # for class methods
require 'ceedling/defaults'
require 'ceedling/constants' # for Verbosity constants class & base file paths
class ConfiguratorBuilder
constructor :file_system_utils, :file_wrapper, :system_wrapper
def build_global_constants(config)
config.each_pair do |key, value|
formatted_key = key.to_s.upcase
# undefine global constant if it already exists
Object.send(:remove_const, formatted_key.to_sym) if @system_wrapper.constants_include?(formatted_key)
# create global constant
Object.module_eval("#{formatted_key} = value")
end
end
def build_accessor_methods(config, context)
config.each_pair do |key, value|
# fill configurator object with accessor methods
eval("def #{key.to_s.downcase}() return @project_config_hash[:#{key.to_s}] end", context)
end
end
# create a flattened hash from the original configuration structure
def flattenify(config)
new_hash = {}
config.each_key do | parent |
# gracefully handle empty top-level entries
next if (config[parent].nil?)
case config[parent]
when Array
config[parent].each do |hash|
key = "#{parent.to_s.downcase}_#{hash.keys[0].to_s.downcase}".to_sym
new_hash[key] = hash[hash.keys[0]]
end
when Hash
config[parent].each_pair do | child, value |
key = "#{parent.to_s.downcase}_#{child.to_s.downcase}".to_sym
new_hash[key] = value
end
# handle entries with no children, only values
else
new_hash["#{parent.to_s.downcase}".to_sym] = config[parent]
end
end
return new_hash
end
def populate_defaults(config, defaults)
defaults.keys.sort.each do |section|
defaults[section].keys.sort.each do |entry|
config[section] = {} if config[section].nil?
config[section][entry] = defaults[section][entry].deep_clone if (config[section][entry].nil?)
end
end
end
def clean(in_hash)
# ensure that include files inserted into test runners have file extensions & proper ones at that
in_hash[:test_runner_includes].map!{|include| include.ext(in_hash[:extension_header])}
end
def set_build_paths(in_hash)
out_hash = {}
project_build_artifacts_root = File.join(in_hash[:project_build_root], 'artifacts')
project_build_tests_root = File.join(in_hash[:project_build_root], TESTS_BASE_PATH)
project_build_release_root = File.join(in_hash[:project_build_root], RELEASE_BASE_PATH)
paths = [
[:project_build_artifacts_root, project_build_artifacts_root, true ],
[:project_build_tests_root, project_build_tests_root, true ],
[:project_build_release_root, project_build_release_root, in_hash[:project_release_build] ],
[:project_test_artifacts_path, File.join(project_build_artifacts_root, TESTS_BASE_PATH), true ],
[:project_test_runners_path, File.join(project_build_tests_root, 'runners'), true ],
[:project_test_results_path, File.join(project_build_tests_root, 'results'), true ],
[:project_test_build_output_path, File.join(project_build_tests_root, 'out'), true ],
[:project_test_build_output_asm_path, File.join(project_build_tests_root, 'out', 'asm'), true ],
[:project_test_build_output_c_path, File.join(project_build_tests_root, 'out', 'c'), true ],
[:project_test_build_cache_path, File.join(project_build_tests_root, 'cache'), true ],
[:project_test_dependencies_path, File.join(project_build_tests_root, 'dependencies'), true ],
[:project_release_artifacts_path, File.join(project_build_artifacts_root, RELEASE_BASE_PATH), in_hash[:project_release_build] ],
[:project_release_build_cache_path, File.join(project_build_release_root, 'cache'), in_hash[:project_release_build] ],
[:project_release_build_output_path, File.join(project_build_release_root, 'out'), in_hash[:project_release_build] ],
[:project_release_build_output_asm_path, File.join(project_build_release_root, 'out', 'asm'), in_hash[:project_release_build] ],
[:project_release_build_output_c_path, File.join(project_build_release_root, 'out', 'c'), in_hash[:project_release_build] ],
[:project_release_dependencies_path, File.join(project_build_release_root, 'dependencies'), in_hash[:project_release_build] ],
[:project_log_path, File.join(in_hash[:project_build_root], 'logs'), true ],
[:project_temp_path, File.join(in_hash[:project_build_root], 'temp'), true ],
[:project_test_preprocess_includes_path, File.join(project_build_tests_root, 'preprocess/includes'), in_hash[:project_use_test_preprocessor] ],
[:project_test_preprocess_files_path, File.join(project_build_tests_root, 'preprocess/files'), in_hash[:project_use_test_preprocessor] ],
]
out_hash[:project_build_paths] = []
# fetch already set mock path
out_hash[:project_build_paths] << in_hash[:cmock_mock_path] if (in_hash[:project_use_mocks])
paths.each do |path|
build_path_name = path[0]
build_path = path[1]
build_path_add_condition = path[2]
# insert path into build paths if associated with true condition
out_hash[:project_build_paths] << build_path if build_path_add_condition
# set path symbol name and path for each entry in paths array
out_hash[build_path_name] = build_path
end
return out_hash
end
def set_force_build_filepaths(in_hash)
out_hash = {}
out_hash[:project_test_force_rebuild_filepath] = File.join( in_hash[:project_test_dependencies_path], 'force_build' )
out_hash[:project_release_force_rebuild_filepath] = File.join( in_hash[:project_release_dependencies_path], 'force_build' ) if (in_hash[:project_release_build])
return out_hash
end
def set_rakefile_components(in_hash)
out_hash = {
:project_rakefile_component_files =>
[File.join(CEEDLING_LIB, 'ceedling', 'tasks_base.rake'),
File.join(CEEDLING_LIB, 'ceedling', 'tasks_filesystem.rake'),
File.join(CEEDLING_LIB, 'ceedling', 'tasks_tests.rake'),
File.join(CEEDLING_LIB, 'ceedling', 'tasks_vendor.rake'),
File.join(CEEDLING_LIB, 'ceedling', 'rules_tests.rake')]}
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'rules_cmock.rake') if (in_hash[:project_use_mocks])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'rules_preprocess.rake') if (in_hash[:project_use_test_preprocessor])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'rules_tests_deep_dependencies.rake') if (in_hash[:project_use_deep_dependencies])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'tasks_tests_deep_dependencies.rake') if (in_hash[:project_use_deep_dependencies])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'rules_release_deep_dependencies.rake') if (in_hash[:project_release_build] and in_hash[:project_use_deep_dependencies])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'rules_release.rake') if (in_hash[:project_release_build])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'tasks_release_deep_dependencies.rake') if (in_hash[:project_release_build] and in_hash[:project_use_deep_dependencies])
out_hash[:project_rakefile_component_files] << File.join(CEEDLING_LIB, 'ceedling', 'tasks_release.rake') if (in_hash[:project_release_build])
return out_hash
end
def set_release_target(in_hash)
return {} if (not in_hash[:project_release_build])
release_target_file = ((in_hash[:release_build_output].nil?) ? (DEFAULT_RELEASE_TARGET_NAME.ext(in_hash[:extension_executable])) : in_hash[:release_build_output])
release_map_file = ((in_hash[:release_build_output].nil?) ? (DEFAULT_RELEASE_TARGET_NAME.ext(in_hash[:extension_map])) : in_hash[:release_build_output].ext(in_hash[:extension_map]))
return {
# tempted to make a helper method in file_path_utils? stop right there, pal. you'll introduce a cyclical dependency
:project_release_build_target => File.join(in_hash[:project_build_release_root], release_target_file),
:project_release_build_map => File.join(in_hash[:project_build_release_root], release_map_file)
}
end
def collect_project_options(in_hash)
options = []
in_hash[:project_options_paths].each do |path|
options << @file_wrapper.directory_listing( File.join(path, '*.yml') )
end
return {
:collection_project_options => options.flatten
}
end
def expand_all_path_globs(in_hash)
out_hash = {}
path_keys = []
in_hash.each_key do |key|
next if (not key.to_s[0..4] == 'paths')
path_keys << key
end
# sorted to provide assured order of traversal in test calls on mocks
path_keys.sort.each do |key|
out_hash["collection_#{key.to_s}".to_sym] = @file_system_utils.collect_paths( in_hash[key] )
end
return out_hash
end
def collect_source_and_include_paths(in_hash)
return {
:collection_paths_source_and_include =>
( in_hash[:collection_paths_source] +
in_hash[:collection_paths_include] ).select {|x| File.directory?(x)}
}
end
def collect_source_include_vendor_paths(in_hash)
extra_paths = []
extra_paths << File.join(in_hash[:cexception_vendor_path], CEXCEPTION_LIB_PATH) if (in_hash[:project_use_exceptions])
return {
:collection_paths_source_include_vendor =>
in_hash[:collection_paths_source_and_include] +
extra_paths
}
end
def collect_test_support_source_include_paths(in_hash)
return {
:collection_paths_test_support_source_include =>
(in_hash[:collection_paths_test] +
in_hash[:collection_paths_support] +
in_hash[:collection_paths_source] +
in_hash[:collection_paths_include] ).select {|x| File.directory?(x)}
}
end
def collect_vendor_paths(in_hash)
return {:collection_paths_vendor => get_vendor_paths(in_hash)}
end
def collect_test_support_source_include_vendor_paths(in_hash)
return {
:collection_paths_test_support_source_include_vendor =>
get_vendor_paths(in_hash) +
in_hash[:collection_paths_test_support_source_include]
}
end
def collect_tests(in_hash)
all_tests = @file_wrapper.instantiate_file_list
in_hash[:collection_paths_test].each do |path|
all_tests.include( File.join(path, "#{in_hash[:project_test_file_prefix]}*#{in_hash[:extension_source]}") )
end
@file_system_utils.revise_file_list( all_tests, in_hash[:files_test] )
return {:collection_all_tests => all_tests}
end
def collect_assembly(in_hash)
all_assembly = @file_wrapper.instantiate_file_list
return {:collection_all_assembly => all_assembly} if ((not in_hash[:release_build_use_assembly]) && (not in_hash[:test_build_use_assembly]))
# Sprinkle in all assembly files we can find in the source folders
in_hash[:collection_paths_source].each do |path|
all_assembly.include( File.join(path, "*#{in_hash[:extension_assembly]}") )
end
# Also add all assembly files we can find in the support folders
in_hash[:collection_paths_support].each do |path|
all_assembly.include( File.join(path, "*#{in_hash[:extension_assembly]}") )
end
# Also add files that we are explicitly adding via :files:assembly: section
@file_system_utils.revise_file_list( all_assembly, in_hash[:files_assembly] )
return {:collection_all_assembly => all_assembly}
end
def collect_source(in_hash)
all_source = @file_wrapper.instantiate_file_list
in_hash[:collection_paths_source].each do |path|
if File.exists?(path) and not File.directory?(path)
all_source.include( path )
else
all_source.include( File.join(path, "*#{in_hash[:extension_source]}") )
end
end
@file_system_utils.revise_file_list( all_source, in_hash[:files_source] )
return {:collection_all_source => all_source}
end
def collect_headers(in_hash)
all_headers = @file_wrapper.instantiate_file_list
paths =
in_hash[:collection_paths_test] +
in_hash[:collection_paths_support] +
in_hash[:collection_paths_source] +
in_hash[:collection_paths_include]
paths.each do |path|
all_headers.include( File.join(path, "*#{in_hash[:extension_header]}") )
end
@file_system_utils.revise_file_list( all_headers, in_hash[:files_include] )
return {:collection_all_headers => all_headers}
end
def collect_release_existing_compilation_input(in_hash)
release_input = @file_wrapper.instantiate_file_list
paths =
in_hash[:collection_paths_source] +
in_hash[:collection_paths_include]
paths << File.join(in_hash[:cexception_vendor_path], CEXCEPTION_LIB_PATH) if (in_hash[:project_use_exceptions])
paths.each do |path|
release_input.include( File.join(path, "*#{in_hash[:extension_header]}") )
if File.exists?(path) and not File.directory?(path)
release_input.include( path )
else
release_input.include( File.join(path, "*#{in_hash[:extension_source]}") )
end
end
@file_system_utils.revise_file_list( release_input, in_hash[:files_source] )
@file_system_utils.revise_file_list( release_input, in_hash[:files_include] )
# finding assembly files handled explicitly through other means
return {:collection_release_existing_compilation_input => release_input}
end
def collect_all_existing_compilation_input(in_hash)
all_input = @file_wrapper.instantiate_file_list
paths =
in_hash[:collection_paths_test] +
in_hash[:collection_paths_support] +
in_hash[:collection_paths_source] +
in_hash[:collection_paths_include] +
[File.join(in_hash[:unity_vendor_path], UNITY_LIB_PATH)]
paths << File.join(in_hash[:cexception_vendor_path], CEXCEPTION_LIB_PATH) if (in_hash[:project_use_exceptions])
paths << File.join(in_hash[:cmock_vendor_path], CMOCK_LIB_PATH) if (in_hash[:project_use_mocks])
paths.each do |path|
all_input.include( File.join(path, "*#{in_hash[:extension_header]}") )
if File.exists?(path) and not File.directory?(path)
all_input.include( path )
else
all_input.include( File.join(path, "*#{in_hash[:extension_source]}") )
all_input.include( File.join(path, "*#{in_hash[:extension_assembly]}") ) if (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY)
end
end
@file_system_utils.revise_file_list( all_input, in_hash[:files_test] )
@file_system_utils.revise_file_list( all_input, in_hash[:files_support] )
@file_system_utils.revise_file_list( all_input, in_hash[:files_source] )
@file_system_utils.revise_file_list( all_input, in_hash[:files_include] )
# finding assembly files handled explicitly through other means
return {:collection_all_existing_compilation_input => all_input}
end
def get_vendor_defines(in_hash)
defines = in_hash[:unity_defines].clone
defines.concat(in_hash[:cmock_defines]) if (in_hash[:project_use_mocks])
defines.concat(in_hash[:cexception_defines]) if (in_hash[:project_use_exceptions])
return defines
end
def collect_vendor_defines(in_hash)
return {:collection_defines_vendor => get_vendor_defines(in_hash)}
end
def collect_test_and_vendor_defines(in_hash)
defines = in_hash[:defines_test].clone
vendor_defines = get_vendor_defines(in_hash)
defines.concat(vendor_defines) if vendor_defines
return {:collection_defines_test_and_vendor => defines}
end
def collect_release_and_vendor_defines(in_hash)
release_defines = in_hash[:defines_release].clone
release_defines.concat(in_hash[:cexception_defines]) if (in_hash[:project_use_exceptions])
return {:collection_defines_release_and_vendor => release_defines}
end
def collect_release_artifact_extra_link_objects(in_hash)
objects = []
# no build paths here so plugins can remap if necessary (i.e. path mapping happens at runtime)
objects << CEXCEPTION_C_FILE.ext( in_hash[:extension_object] ) if (in_hash[:project_use_exceptions])
return {:collection_release_artifact_extra_link_objects => objects}
end
def collect_test_fixture_extra_link_objects(in_hash)
# Note: Symbols passed to compiler at command line can change Unity and CException behavior / configuration;
# we also handle those dependencies elsewhere in compilation dependencies
sources = [UNITY_C_FILE]
in_hash[:files_support].each { |file| sources << file }
# we don't include paths here because use of plugins or mixing different compilers may require different build paths
sources << CEXCEPTION_C_FILE if (in_hash[:project_use_exceptions])
sources << CMOCK_C_FILE if (in_hash[:project_use_mocks])
# if we're using mocks & a unity helper is defined & that unity helper includes a source file component (not only a header of macros),
# then link in the unity_helper object file too
if ( in_hash[:project_use_mocks] and in_hash[:cmock_unity_helper] )
in_hash[:cmock_unity_helper].each do |helper|
if @file_wrapper.exist?(helper.ext(in_hash[:extension_source]))
sources << helper
end
end
end
# create object files from all the sources
objects = sources.map { |file| File.basename(file) }
# no build paths here so plugins can remap if necessary (i.e. path mapping happens at runtime)
objects.map! { |object| object.ext(in_hash[:extension_object]) }
return { :collection_all_support => sources,
:collection_test_fixture_extra_link_objects => objects
}
end
private
def get_vendor_paths(in_hash)
vendor_paths = []
vendor_paths << File.join(in_hash[:unity_vendor_path], UNITY_LIB_PATH)
vendor_paths << File.join(in_hash[:cexception_vendor_path], CEXCEPTION_LIB_PATH) if (in_hash[:project_use_exceptions])
vendor_paths << File.join(in_hash[:cmock_vendor_path], CMOCK_LIB_PATH) if (in_hash[:project_use_mocks])
vendor_paths << in_hash[:cmock_mock_path] if (in_hash[:project_use_mocks])
return vendor_paths
end
end

View File

@ -0,0 +1,131 @@
require 'ceedling/constants'
class ConfiguratorPlugins
constructor :stream_wrapper, :file_wrapper, :system_wrapper
attr_reader :rake_plugins, :script_plugins
def setup
@rake_plugins = []
@script_plugins = []
end
def add_load_paths(config)
plugin_paths = {}
config[:plugins][:enabled].each do |plugin|
config[:plugins][:load_paths].each do |root|
path = File.join(root, plugin)
is_script_plugin = ( not @file_wrapper.directory_listing( File.join( path, 'lib', '*.rb' ) ).empty? )
is_rake_plugin = ( not @file_wrapper.directory_listing( File.join( path, '*.rake' ) ).empty? )
if is_script_plugin or is_rake_plugin
plugin_paths[(plugin + '_path').to_sym] = path
if is_script_plugin
@system_wrapper.add_load_path( File.join( path, 'lib') )
@system_wrapper.add_load_path( File.join( path, 'config') )
end
break
end
end
end
return plugin_paths
end
# gather up and return .rake filepaths that exist on-disk
def find_rake_plugins(config, plugin_paths)
@rake_plugins = []
plugins_with_path = []
config[:plugins][:enabled].each do |plugin|
if path = plugin_paths[(plugin + '_path').to_sym]
rake_plugin_path = File.join(path, "#{plugin}.rake")
if (@file_wrapper.exist?(rake_plugin_path))
plugins_with_path << rake_plugin_path
@rake_plugins << plugin
end
end
end
return plugins_with_path
end
# gather up and return just names of .rb classes that exist on-disk
def find_script_plugins(config, plugin_paths)
@script_plugins = []
config[:plugins][:enabled].each do |plugin|
if path = plugin_paths[(plugin + '_path').to_sym]
script_plugin_path = File.join(path, "lib", "#{plugin}.rb")
if @file_wrapper.exist?(script_plugin_path)
@script_plugins << plugin
end
end
end
return @script_plugins
end
# gather up and return configuration .yml filepaths that exist on-disk
def find_config_plugins(config, plugin_paths)
plugins_with_path = []
config[:plugins][:enabled].each do |plugin|
if path = plugin_paths[(plugin + '_path').to_sym]
config_plugin_path = File.join(path, "config", "#{plugin}.yml")
if @file_wrapper.exist?(config_plugin_path)
plugins_with_path << config_plugin_path
end
end
end
return plugins_with_path
end
# gather up and return default .yml filepaths that exist on-disk
def find_plugin_yml_defaults(config, plugin_paths)
defaults_with_path = []
config[:plugins][:enabled].each do |plugin|
if path = plugin_paths[(plugin + '_path').to_sym]
default_path = File.join(path, 'config', 'defaults.yml')
if @file_wrapper.exist?(default_path)
defaults_with_path << default_path
end
end
end
return defaults_with_path
end
# gather up and return
def find_plugin_hash_defaults(config, plugin_paths)
defaults_hash= []
config[:plugins][:enabled].each do |plugin|
if path = plugin_paths[(plugin + '_path').to_sym]
default_path = File.join(path, "config", "defaults_#{plugin}.rb")
if @file_wrapper.exist?(default_path)
@system_wrapper.require_file( "defaults_#{plugin}.rb")
object = eval("get_default_config()")
defaults_hash << object
end
end
end
return defaults_hash
end
end

View File

@ -0,0 +1,128 @@
# add sort-ability to symbol so we can order keys array in hash for test-ability
class Symbol
include Comparable
def <=>(other)
self.to_s <=> other.to_s
end
end
class ConfiguratorSetup
constructor :configurator_builder, :configurator_validator, :configurator_plugins, :stream_wrapper
def build_project_config(config, flattened_config)
### flesh out config
@configurator_builder.clean(flattened_config)
### add to hash values we build up from configuration & file system contents
flattened_config.merge!(@configurator_builder.set_build_paths(flattened_config))
flattened_config.merge!(@configurator_builder.set_force_build_filepaths(flattened_config))
flattened_config.merge!(@configurator_builder.set_rakefile_components(flattened_config))
flattened_config.merge!(@configurator_builder.set_release_target(flattened_config))
flattened_config.merge!(@configurator_builder.collect_project_options(flattened_config))
### iterate through all entries in paths section and expand any & all globs to actual paths
flattened_config.merge!(@configurator_builder.expand_all_path_globs(flattened_config))
flattened_config.merge!(@configurator_builder.collect_vendor_paths(flattened_config))
flattened_config.merge!(@configurator_builder.collect_source_and_include_paths(flattened_config))
flattened_config.merge!(@configurator_builder.collect_source_include_vendor_paths(flattened_config))
flattened_config.merge!(@configurator_builder.collect_test_support_source_include_paths(flattened_config))
flattened_config.merge!(@configurator_builder.collect_test_support_source_include_vendor_paths(flattened_config))
flattened_config.merge!(@configurator_builder.collect_tests(flattened_config))
flattened_config.merge!(@configurator_builder.collect_assembly(flattened_config))
flattened_config.merge!(@configurator_builder.collect_source(flattened_config))
flattened_config.merge!(@configurator_builder.collect_headers(flattened_config))
flattened_config.merge!(@configurator_builder.collect_release_existing_compilation_input(flattened_config))
flattened_config.merge!(@configurator_builder.collect_all_existing_compilation_input(flattened_config))
flattened_config.merge!(@configurator_builder.collect_vendor_defines(flattened_config))
flattened_config.merge!(@configurator_builder.collect_test_and_vendor_defines(flattened_config))
flattened_config.merge!(@configurator_builder.collect_release_and_vendor_defines(flattened_config))
flattened_config.merge!(@configurator_builder.collect_release_artifact_extra_link_objects(flattened_config))
flattened_config.merge!(@configurator_builder.collect_test_fixture_extra_link_objects(flattened_config))
return flattened_config
end
def build_constants_and_accessors(config, context)
@configurator_builder.build_global_constants(config)
@configurator_builder.build_accessor_methods(config, context)
end
def validate_required_sections(config)
validation = []
validation << @configurator_validator.exists?(config, :project)
validation << @configurator_validator.exists?(config, :paths)
return false if (validation.include?(false))
return true
end
def validate_required_section_values(config)
validation = []
validation << @configurator_validator.exists?(config, :project, :build_root)
validation << @configurator_validator.exists?(config, :paths, :test)
validation << @configurator_validator.exists?(config, :paths, :source)
return false if (validation.include?(false))
return true
end
def validate_paths(config)
validation = []
if config[:cmock][:unity_helper]
config[:cmock][:unity_helper].each do |path|
validation << @configurator_validator.validate_filepath_simple( path, :cmock, :unity_helper )
end
end
config[:project][:options_paths].each do |path|
validation << @configurator_validator.validate_filepath_simple( path, :project, :options_paths )
end
config[:plugins][:load_paths].each do |path|
validation << @configurator_validator.validate_filepath_simple( path, :plugins, :load_paths )
end
config[:paths].keys.sort.each do |key|
validation << @configurator_validator.validate_path_list(config, :paths, key)
end
return false if (validation.include?(false))
return true
end
def validate_tools(config)
validation = []
config[:tools].keys.sort.each do |key|
validation << @configurator_validator.exists?(config, :tools, key, :executable)
validation << @configurator_validator.validate_executable_filepath(config, :tools, key, :executable) if (not config[:tools][key][:optional])
validation << @configurator_validator.validate_tool_stderr_redirect(config, :tools, key)
end
return false if (validation.include?(false))
return true
end
def validate_plugins(config)
missing_plugins =
Set.new( config[:plugins][:enabled] ) -
Set.new( @configurator_plugins.rake_plugins ) -
Set.new( @configurator_plugins.script_plugins )
missing_plugins.each do |plugin|
@stream_wrapper.stderr_puts("ERROR: Ceedling plugin '#{plugin}' contains no rake or ruby class entry point. (Misspelled or missing files?)")
end
return ( (missing_plugins.size > 0) ? false : true )
end
end

View File

@ -0,0 +1,193 @@
require 'rubygems'
require 'rake' # for ext()
require 'ceedling/constants'
require 'ceedling/tool_executor' # for argument replacement pattern
require 'ceedling/file_path_utils' # for glob handling class methods
class ConfiguratorValidator
constructor :file_wrapper, :stream_wrapper, :system_wrapper
# walk into config hash verify existence of data at key depth
def exists?(config, *keys)
hash = retrieve_value(config, keys)
exist = !hash[:value].nil?
if (not exist)
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Required config file entry #{format_key_sequence(keys, hash[:depth])} does not exist.")
end
return exist
end
# walk into config hash. verify directory path(s) at given key depth
def validate_path_list(config, *keys)
hash = retrieve_value(config, keys)
list = hash[:value]
# return early if we couldn't walk into hash and find a value
return false if (list.nil?)
path_list = []
exist = true
case list
when String then path_list << list
when Array then path_list = list
end
path_list.each do |path|
base_path = FilePathUtils::extract_path(path) # lop off add/subtract notation & glob specifiers
if (not @file_wrapper.exist?(base_path))
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Config path #{format_key_sequence(keys, hash[:depth])}['#{base_path}'] does not exist on disk.")
exist = false
end
end
return exist
end
# simple path verification
def validate_filepath_simple(path, *keys)
validate_path = path
if (not @file_wrapper.exist?(validate_path))
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Config path '#{validate_path}' associated with #{format_key_sequence(keys, keys.size)} does not exist on disk.")
return false
end
return true
end
# walk into config hash. verify specified file exists.
def validate_filepath(config, *keys)
hash = retrieve_value(config, keys)
filepath = hash[:value]
# return early if we couldn't walk into hash and find a value
return false if (filepath.nil?)
# skip everything if we've got an argument replacement pattern
return true if (filepath =~ TOOL_EXECUTOR_ARGUMENT_REPLACEMENT_PATTERN)
if (not @file_wrapper.exist?(filepath))
# See if we can deal with it internally.
if GENERATED_DIR_PATH.include?(filepath)
# we already made this directory before let's make it again.
FileUtils.mkdir_p File.join(File.dirname(__FILE__), filepath)
@stream_wrapper.stderr_puts("WARNING: Generated filepath #{format_key_sequence(keys, hash[:depth])}['#{filepath}'] does not exist on disk. Recreating")
else
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Config filepath #{format_key_sequence(keys, hash[:depth])}['#{filepath}'] does not exist on disk.")
return false
end
end
return true
end
# walk into config hash. verify specified file exists.
def validate_executable_filepath(config, *keys)
exe_extension = config[:extension][:executable]
hash = retrieve_value(config, keys)
filepath = hash[:value]
# return early if we couldn't walk into hash and find a value
return false if (filepath.nil?)
# skip everything if we've got an argument replacement pattern
return true if (filepath =~ TOOL_EXECUTOR_ARGUMENT_REPLACEMENT_PATTERN)
# if there's no path included, verify file exists somewhere in system search paths
if (not filepath.include?('/'))
exists = false
@system_wrapper.search_paths.each do |path|
if (@file_wrapper.exist?( File.join(path, filepath)) )
exists = true
break
end
if (@file_wrapper.exist?( (File.join(path, filepath)).ext( exe_extension ) ))
exists = true
break
elsif (@system_wrapper.windows? and @file_wrapper.exist?( (File.join(path, filepath)).ext( EXTENSION_WIN_EXE ) ))
exists = true
break
end
end
if (not exists)
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Config filepath #{format_key_sequence(keys, hash[:depth])}['#{filepath}'] does not exist in system search paths.")
return false
end
# if there is a path included, check that explicit filepath exists
else
if (not @file_wrapper.exist?(filepath))
# no verbosity checking since this is lowest level anyhow & verbosity checking depends on configurator
@stream_wrapper.stderr_puts("ERROR: Config filepath #{format_key_sequence(keys, hash[:depth])}['#{filepath}'] does not exist on disk.")
return false
end
end
return true
end
def validate_tool_stderr_redirect(config, tools, tool)
redirect = config[tools][tool][:stderr_redirect]
if (redirect.class == Symbol)
# map constants and force to array of strings for runtime universality across ruby versions
if (not StdErrRedirect.constants.map{|constant| constant.to_s}.include?(redirect.to_s.upcase))
error = "ERROR: [:#{tools}][:#{tool}][:stderr_redirect][:#{redirect}] is not a recognized option " +
"{#{StdErrRedirect.constants.map{|constant| ':' + constant.to_s.downcase}.join(', ')}}."
@stream_wrapper.stderr_puts(error)
return false
end
end
return true
end
private #########################################
def retrieve_value(config, keys)
value = nil
hash = config
depth = 0
# walk into hash & extract value at requested key sequence
keys.each do |symbol|
depth += 1
if (not hash[symbol].nil?)
hash = hash[symbol]
value = hash
else
value = nil
break
end
end
return {:value => value, :depth => depth}
end
def format_key_sequence(keys, depth)
walked_keys = keys.slice(0, depth)
formatted_keys = walked_keys.map{|key| "[:#{key.to_s}]"}
return formatted_keys.join
end
end

View File

@ -0,0 +1,99 @@
class Verbosity
SILENT = 0 # as silent as possible (though there are some messages that must be spit out)
ERRORS = 1 # only errors
COMPLAIN = 2 # spit out errors and warnings/notices
NORMAL = 3 # errors, warnings/notices, standard status messages
OBNOXIOUS = 4 # all messages including extra verbose output (used for lite debugging / verification)
DEBUG = 5 # special extra verbose output for hardcore debugging
end
class TestResultsSanityChecks
NONE = 0 # no sanity checking of test results
NORMAL = 1 # perform non-problematic checks
THOROUGH = 2 # perform checks that require inside knowledge of system workings
end
class StdErrRedirect
NONE = :none
AUTO = :auto
WIN = :win
UNIX = :unix
TCSH = :tcsh
end
class BackgroundExec
NONE = :none
AUTO = :auto
WIN = :win
UNIX = :unix
end
unless defined?(PROJECT_ROOT)
PROJECT_ROOT = Dir.pwd()
end
GENERATED_DIR_PATH = [['vendor', 'ceedling'], 'src', "test", ['test', 'support'], 'build'].each{|p| File.join(*p)}
EXTENSION_WIN_EXE = '.exe'
EXTENSION_NONWIN_EXE = '.out'
CEXCEPTION_ROOT_PATH = 'c_exception'
CEXCEPTION_LIB_PATH = "#{CEXCEPTION_ROOT_PATH}/lib"
CEXCEPTION_C_FILE = 'CException.c'
CEXCEPTION_H_FILE = 'CException.h'
UNITY_ROOT_PATH = 'unity'
UNITY_LIB_PATH = "#{UNITY_ROOT_PATH}/src"
UNITY_C_FILE = 'unity.c'
UNITY_H_FILE = 'unity.h'
UNITY_INTERNALS_H_FILE = 'unity_internals.h'
CMOCK_ROOT_PATH = 'cmock'
CMOCK_LIB_PATH = "#{CMOCK_ROOT_PATH}/src"
CMOCK_C_FILE = 'cmock.c'
CMOCK_H_FILE = 'cmock.h'
DEFAULT_CEEDLING_MAIN_PROJECT_FILE = 'project.yml' unless defined?(DEFAULT_CEEDLING_MAIN_PROJECT_FILE) # main project file
DEFAULT_CEEDLING_USER_PROJECT_FILE = 'user.yml' unless defined?(DEFAULT_CEEDLING_USER_PROJECT_FILE) # supplemental user config file
INPUT_CONFIGURATION_CACHE_FILE = 'input.yml' unless defined?(INPUT_CONFIGURATION_CACHE_FILE) # input configuration file dump
DEFINES_DEPENDENCY_CACHE_FILE = 'defines_dependency.yml' unless defined?(DEFINES_DEPENDENCY_CACHE_FILE) # preprocessor definitions for files
TEST_ROOT_NAME = 'test' unless defined?(TEST_ROOT_NAME)
TEST_TASK_ROOT = TEST_ROOT_NAME + ':' unless defined?(TEST_TASK_ROOT)
TEST_SYM = TEST_ROOT_NAME.to_sym unless defined?(TEST_SYM)
RELEASE_ROOT_NAME = 'release' unless defined?(RELEASE_ROOT_NAME)
RELEASE_TASK_ROOT = RELEASE_ROOT_NAME + ':' unless defined?(RELEASE_TASK_ROOT)
RELEASE_SYM = RELEASE_ROOT_NAME.to_sym unless defined?(RELEASE_SYM)
REFRESH_ROOT_NAME = 'refresh' unless defined?(REFRESH_ROOT_NAME)
REFRESH_TASK_ROOT = REFRESH_ROOT_NAME + ':' unless defined?(REFRESH_TASK_ROOT)
REFRESH_SYM = REFRESH_ROOT_NAME.to_sym unless defined?(REFRESH_SYM)
UTILS_ROOT_NAME = 'utils' unless defined?(UTILS_ROOT_NAME)
UTILS_TASK_ROOT = UTILS_ROOT_NAME + ':' unless defined?(UTILS_TASK_ROOT)
UTILS_SYM = UTILS_ROOT_NAME.to_sym unless defined?(UTILS_SYM)
OPERATION_COMPILE_SYM = :compile unless defined?(OPERATION_COMPILE_SYM)
OPERATION_ASSEMBLE_SYM = :assemble unless defined?(OPERATION_ASSEMBLE_SYM)
OPERATION_LINK_SYM = :link unless defined?(OPERATION_LINK_SYM)
RUBY_STRING_REPLACEMENT_PATTERN = /#\{.+\}/
RUBY_EVAL_REPLACEMENT_PATTERN = /^\{(.+)\}$/
TOOL_EXECUTOR_ARGUMENT_REPLACEMENT_PATTERN = /(\$\{(\d+)\})/
TEST_STDOUT_STATISTICS_PATTERN = /\n-+\s*(\d+)\s+Tests\s+(\d+)\s+Failures\s+(\d+)\s+Ignored\s+(OK|FAIL)\s*/i
NULL_FILE_PATH = '/dev/null'
TESTS_BASE_PATH = TEST_ROOT_NAME
RELEASE_BASE_PATH = RELEASE_ROOT_NAME
VENDORS_FILES = %w(unity UnityHelper cmock CException).freeze

View File

@ -0,0 +1,471 @@
require 'ceedling/constants'
require 'ceedling/system_wrapper'
require 'ceedling/file_path_utils'
#this should be defined already, but not always during system specs
CEEDLING_VENDOR = File.expand_path(File.dirname(__FILE__) + '/../../vendor') unless defined? CEEDLING_VENDOR
CEEDLING_PLUGINS = [] unless defined? CEEDLING_PLUGINS
DEFAULT_TEST_COMPILER_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_compiler'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
"-DGNU_COMPILER".freeze,
"-g".freeze,
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
"-c \"${1}\"".freeze,
"-o \"${2}\"".freeze,
# gcc's list file output options are complex; no use of ${3} parameter in default config
"-MMD".freeze,
"-MF \"${4}\"".freeze,
].freeze
}
DEFAULT_TEST_LINKER_TOOL = {
:executable => ENV['CCLD'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CCLD'].split[0],
:name => 'default_test_linker'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CCLD'].nil? ? "" : ENV['CCLD'].split[1..-1],
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
ENV['LDFLAGS'].nil? ? "" : ENV['LDFLAGS'].split,
"\"${1}\"".freeze,
"${5}".freeze,
"-o \"${2}\"".freeze,
"".freeze,
"${4}".freeze,
ENV['LDLIBS'].nil? ? "" : ENV['LDLIBS'].split
].freeze
}
DEFAULT_TEST_FIXTURE_TOOL = {
:executable => '${1}'.freeze,
:name => 'default_test_fixture'.freeze,
:stderr_redirect => StdErrRedirect::AUTO.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [].freeze
}
DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_includes_preprocessor'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze, # OSX clang
'-MM'.freeze,
'-MG'.freeze,
# avoid some possibility of deep system lib header file complications by omitting vendor paths
# if cpp is run on *nix system, escape spaces in paths; if cpp on windows just use the paths collection as is
# {"-I\"$\"" => "{SystemWrapper.windows? ? COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE : COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE.map{|path| path.gsub(\/ \/, \'\\\\ \') }}"}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze, # OSX clang
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
"\"${1}\"".freeze
].freeze
}
DEFAULT_TEST_FILE_PREPROCESSOR_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_file_preprocessor'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze,
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
"\"${1}\"".freeze,
"-o \"${2}\"".freeze
].freeze
}
DEFAULT_TEST_FILE_PREPROCESSOR_DIRECTIVES_TOOL = {
:executable => FilePathUtils.os_executable_ext('gcc').freeze,
:name => 'default_test_file_preprocessor_directives'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze,
'-fdirectives-only'.freeze,
# '-nostdinc'.freeze, # disabled temporarily due to stdio access violations on OSX
"\"${1}\"".freeze,
"-o \"${2}\"".freeze
].freeze
}
# Disable the -MD flag for OSX LLVM Clang, since unsupported
if RUBY_PLATFORM =~ /darwin/ && `gcc --version 2> /dev/null` =~ /Apple LLVM version .* \(clang/m # OSX w/LLVM Clang
MD_FLAG = '' # Clang doesn't support the -MD flag
else
MD_FLAG = '-MD'
end
DEFAULT_TEST_DEPENDENCIES_GENERATOR_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_test_dependencies_generator'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_SUPPORT_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_TEST_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_TEST_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_TEST_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze,
"-MT \"${3}\"".freeze,
'-MM'.freeze,
MD_FLAG.freeze,
'-MG'.freeze,
"-MF \"${2}\"".freeze,
"-c \"${1}\"".freeze,
# '-nostdinc'.freeze,
].freeze
}
DEFAULT_RELEASE_DEPENDENCIES_GENERATOR_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_release_dependencies_generator'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
'-E'.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_RELEASE_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_RELEASE_AND_VENDOR'}.freeze,
{"-D$" => 'DEFINES_RELEASE_PREPROCESS'}.freeze,
"-DGNU_COMPILER".freeze,
"-MT \"${3}\"".freeze,
'-MM'.freeze,
MD_FLAG.freeze,
'-MG'.freeze,
"-MF \"${2}\"".freeze,
"-c \"${1}\"".freeze,
# '-nostdinc'.freeze,
].freeze
}
DEFAULT_RELEASE_COMPILER_TOOL = {
:executable => ENV['CC'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CC'].split[0],
:name => 'default_release_compiler'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CC'].nil? ? "" : ENV['CC'].split[1..-1],
ENV['CPPFLAGS'].nil? ? "" : ENV['CPPFLAGS'].split,
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_INCLUDE_VENDOR'}.freeze,
{"-I\"$\"" => 'COLLECTION_PATHS_RELEASE_TOOLCHAIN_INCLUDE'}.freeze,
{"-D$" => 'COLLECTION_DEFINES_RELEASE_AND_VENDOR'}.freeze,
"-DGNU_COMPILER".freeze,
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
"-c \"${1}\"".freeze,
"-o \"${2}\"".freeze,
# gcc's list file output options are complex; no use of ${3} parameter in default config
"-MMD".freeze,
"-MF \"${4}\"".freeze,
].freeze
}
DEFAULT_RELEASE_ASSEMBLER_TOOL = {
:executable => ENV['AS'].nil? ? FilePathUtils.os_executable_ext('as').freeze : ENV['AS'].split[0],
:name => 'default_release_assembler'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['AS'].nil? ? "" : ENV['AS'].split[1..-1],
ENV['ASFLAGS'].nil? ? "" : ENV['ASFLAGS'].split,
{"-I\"$\"" => 'COLLECTION_PATHS_SOURCE_AND_INCLUDE'}.freeze,
"\"${1}\"".freeze,
"-o \"${2}\"".freeze,
].freeze
}
DEFAULT_RELEASE_LINKER_TOOL = {
:executable => ENV['CCLD'].nil? ? FilePathUtils.os_executable_ext('gcc').freeze : ENV['CCLD'].split[0],
:name => 'default_release_linker'.freeze,
:stderr_redirect => StdErrRedirect::NONE.freeze,
:background_exec => BackgroundExec::NONE.freeze,
:optional => false.freeze,
:arguments => [
ENV['CCLD'].nil? ? "" : ENV['CCLD'].split[1..-1],
ENV['CFLAGS'].nil? ? "" : ENV['CFLAGS'].split,
ENV['LDFLAGS'].nil? ? "" : ENV['LDFLAGS'].split,
"\"${1}\"".freeze,
"${5}".freeze,
"-o \"${2}\"".freeze,
"".freeze,
"${4}".freeze,
ENV['LDLIBS'].nil? ? "" : ENV['LDLIBS'].split
].freeze
}
DEFAULT_TOOLS_TEST = {
:tools => {
:test_compiler => DEFAULT_TEST_COMPILER_TOOL,
:test_linker => DEFAULT_TEST_LINKER_TOOL,
:test_fixture => DEFAULT_TEST_FIXTURE_TOOL,
}
}
DEFAULT_TOOLS_TEST_PREPROCESSORS = {
:tools => {
:test_includes_preprocessor => DEFAULT_TEST_INCLUDES_PREPROCESSOR_TOOL,
:test_file_preprocessor => DEFAULT_TEST_FILE_PREPROCESSOR_TOOL,
:test_file_preprocessor_directives => DEFAULT_TEST_FILE_PREPROCESSOR_DIRECTIVES_TOOL,
}
}
DEFAULT_TOOLS_TEST_DEPENDENCIES = {
:tools => {
:test_dependencies_generator => DEFAULT_TEST_DEPENDENCIES_GENERATOR_TOOL,
}
}
DEFAULT_TOOLS_RELEASE = {
:tools => {
:release_compiler => DEFAULT_RELEASE_COMPILER_TOOL,
:release_linker => DEFAULT_RELEASE_LINKER_TOOL,
}
}
DEFAULT_TOOLS_RELEASE_ASSEMBLER = {
:tools => {
:release_assembler => DEFAULT_RELEASE_ASSEMBLER_TOOL,
}
}
DEFAULT_TOOLS_RELEASE_DEPENDENCIES = {
:tools => {
:release_dependencies_generator => DEFAULT_RELEASE_DEPENDENCIES_GENERATOR_TOOL,
}
}
DEFAULT_RELEASE_TARGET_NAME = 'project'
DEFAULT_CEEDLING_CONFIG = {
:project => {
# :build_root must be set by user
:use_exceptions => true,
:use_mocks => true,
:compile_threads => 1,
:test_threads => 1,
:use_test_preprocessor => false,
:use_preprocessor_directives => false,
:use_deep_dependencies => false,
:generate_deep_dependencies => true, # only applicable if use_deep_dependencies is true
:auto_link_deep_dependencies => false,
:test_file_prefix => 'test_',
:options_paths => [],
:release_build => false,
},
:release_build => {
# :output is set while building configuration -- allows smart default system-dependent file extension handling
:use_assembly => false,
:artifacts => [],
},
:paths => {
:test => [], # must be populated by user
:source => [], # must be populated by user
:support => [],
:include => [],
:libraries => [],
:test_toolchain_include => [],
:release_toolchain_include => [],
},
:files => {
:test => [],
:source => [],
:assembly => [],
:support => [],
:include => [],
},
# unlike other top-level entries, environment's value is an array to preserve order
:environment => [
# when evaluated, this provides wider text field for rake task comments
{:rake_columns => '120'},
],
:defines => {
:test => [],
:test_preprocess => [],
:release => [],
:release_preprocess => [],
:use_test_definition => false,
},
:libraries => {
:flag => '-l${1}',
:path_flag => '-L ${1}',
:test => [],
:test_preprocess => [],
:release => [],
:release_preprocess => [],
},
:flags => {},
:extension => {
:header => '.h',
:source => '.c',
:assembly => '.s',
:object => '.o',
:libraries => ['.a','.so'],
:executable => ( SystemWrapper.windows? ? EXTENSION_WIN_EXE : EXTENSION_NONWIN_EXE ),
:map => '.map',
:list => '.lst',
:testpass => '.pass',
:testfail => '.fail',
:dependencies => '.d',
},
:unity => {
:vendor_path => CEEDLING_VENDOR,
:defines => []
},
:cmock => {
:vendor_path => CEEDLING_VENDOR,
:defines => [],
:includes => []
},
:cexception => {
:vendor_path => CEEDLING_VENDOR,
:defines => []
},
:test_runner => {
:includes => [],
:file_suffix => '_runner',
},
# all tools populated while building up config structure
:tools => {},
# empty argument lists for default tools
# (these can be overridden in project file to add arguments to tools without totally redefining tools)
:test_compiler => { :arguments => [] },
:test_linker => { :arguments => [] },
:test_fixture => {
:arguments => [],
:link_objects => [], # compiled object files to always be linked in (e.g. cmock.o if using mocks)
},
:test_includes_preprocessor => { :arguments => [] },
:test_file_preprocessor => { :arguments => [] },
:test_file_preprocessor_directives => { :arguments => [] },
:test_dependencies_generator => { :arguments => [] },
:release_compiler => { :arguments => [] },
:release_linker => { :arguments => [] },
:release_assembler => { :arguments => [] },
:release_dependencies_generator => { :arguments => [] },
:plugins => {
:load_paths => CEEDLING_PLUGINS,
:enabled => [],
}
}.freeze
DEFAULT_TESTS_RESULTS_REPORT_TEMPLATE = %q{
% ignored = hash[:results][:counts][:ignored]
% failed = hash[:results][:counts][:failed]
% stdout_count = hash[:results][:counts][:stdout]
% header_prepend = ((hash[:header].length > 0) ? "#{hash[:header]}: " : '')
% banner_width = 25 + header_prepend.length # widest message
% if (stdout_count > 0)
<%=@ceedling[:plugin_reportinator].generate_banner(header_prepend + 'TEST OUTPUT')%>
% hash[:results][:stdout].each do |string|
% string[:collection].each do |item|
<%=string[:source][:path]%><%=File::SEPARATOR%><%=string[:source][:file]%>: "<%=item%>"
% end
% end
% end
% if (ignored > 0)
<%=@ceedling[:plugin_reportinator].generate_banner(header_prepend + 'IGNORED TEST SUMMARY')%>
% hash[:results][:ignores].each do |ignore|
% ignore[:collection].each do |item|
<%=ignore[:source][:path]%><%=File::SEPARATOR%><%=ignore[:source][:file]%>:<%=item[:line]%>:<%=item[:test]%>
% if (item[:message].length > 0)
: "<%=item[:message]%>"
% else
<%="\n"%>
% end
% end
% end
% end
% if (failed > 0)
<%=@ceedling[:plugin_reportinator].generate_banner(header_prepend + 'FAILED TEST SUMMARY')%>
% hash[:results][:failures].each do |failure|
% failure[:collection].each do |item|
<%=failure[:source][:path]%><%=File::SEPARATOR%><%=failure[:source][:file]%>:<%=item[:line]%>:<%=item[:test]%>
% if (item[:message].length > 0)
: "<%=item[:message]%>"
% else
<%="\n"%>
% end
% end
% end
% end
% total_string = hash[:results][:counts][:total].to_s
% format_string = "%#{total_string.length}i"
<%=@ceedling[:plugin_reportinator].generate_banner(header_prepend + 'OVERALL TEST SUMMARY')%>
% if (hash[:results][:counts][:total] > 0)
TESTED: <%=hash[:results][:counts][:total].to_s%>
PASSED: <%=sprintf(format_string, hash[:results][:counts][:passed])%>
FAILED: <%=sprintf(format_string, failed)%>
IGNORED: <%=sprintf(format_string, ignored)%>
% else
No tests executed.
% end
}

View File

@ -0,0 +1,97 @@
class Dependinator
constructor :configurator, :project_config_manager, :test_includes_extractor, :file_path_utils, :rake_wrapper, :file_wrapper
def touch_force_rebuild_files
@file_wrapper.touch( @configurator.project_test_force_rebuild_filepath )
@file_wrapper.touch( @configurator.project_release_force_rebuild_filepath ) if (@configurator.project_release_build)
end
def load_release_object_deep_dependencies(dependencies_list)
dependencies_list.each do |dependencies_file|
if File.exists?(dependencies_file)
@rake_wrapper.load_dependencies( dependencies_file )
end
end
end
def enhance_release_file_dependencies(files)
files.each do |filepath|
@rake_wrapper[filepath].enhance( [@configurator.project_release_force_rebuild_filepath] ) if (@project_config_manager.release_config_changed)
end
end
def load_test_object_deep_dependencies(files_list)
dependencies_list = @file_path_utils.form_test_dependencies_filelist(files_list)
dependencies_list.each do |dependencies_file|
if File.exists?(dependencies_file)
@rake_wrapper.load_dependencies(dependencies_file)
end
end
end
def enhance_runner_dependencies(runner_filepath)
@rake_wrapper[runner_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
end
def enhance_shallow_include_lists_dependencies(include_lists)
include_lists.each do |include_list_filepath|
@rake_wrapper[include_list_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
end
end
def enhance_preprocesed_file_dependencies(files)
files.each do |filepath|
@rake_wrapper[filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
end
end
def enhance_mock_dependencies(mocks_list)
# if input configuration or ceedling changes, make sure these guys get rebuilt
mocks_list.each do |mock_filepath|
@rake_wrapper[mock_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
@rake_wrapper[mock_filepath].enhance( @configurator.cmock_unity_helper ) if (@configurator.cmock_unity_helper)
end
end
def enhance_dependencies_dependencies(dependencies)
dependencies.each do |dependencies_filepath|
@rake_wrapper[dependencies_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
end
end
def enhance_test_build_object_dependencies(objects)
objects.each do |object_filepath|
@rake_wrapper[object_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if (@project_config_manager.test_config_changed ||
@project_config_manager.test_defines_changed)
end
end
def enhance_results_dependencies(result_filepath)
@rake_wrapper[result_filepath].enhance( [@configurator.project_test_force_rebuild_filepath] ) if @project_config_manager.test_config_changed
end
def enhance_test_executable_dependencies(test, objects)
@rake_wrapper[ @file_path_utils.form_test_executable_filepath(test) ].enhance( objects )
end
end

View File

@ -0,0 +1,9 @@
require 'erb'
class ErbWrapper
def generate_file(template, data, output_file)
File.open(output_file, "w") do |f|
f << ERB.new(template, 0, "<>").result(binding)
end
end
end

View File

@ -0,0 +1,149 @@
require 'rubygems'
require 'rake' # for adding ext() method to string
require 'thread'
class FileFinder
SEMAPHORE = Mutex.new
constructor :configurator, :file_finder_helper, :cacheinator, :file_path_utils, :file_wrapper, :yaml_wrapper
def prepare_search_sources
@all_test_source_and_header_file_collection =
@configurator.collection_all_tests +
@configurator.collection_all_source +
@configurator.collection_all_headers
end
def find_header_file(mock_file)
header = File.basename(mock_file).sub(/#{@configurator.cmock_mock_prefix}/, '').ext(@configurator.extension_header)
found_path = @file_finder_helper.find_file_in_collection(header, @configurator.collection_all_headers, :error)
return found_path
end
def find_header_input_for_mock_file(mock_file)
found_path = find_header_file(mock_file)
mock_input = found_path
if (@configurator.project_use_test_preprocessor)
mock_input = @cacheinator.diff_cached_test_file( @file_path_utils.form_preprocessed_file_filepath( found_path ) )
end
return mock_input
end
def find_source_from_test(test, complain)
test_prefix = @configurator.project_test_file_prefix
source_paths = @configurator.collection_all_source
source = File.basename(test).sub(/#{test_prefix}/, '')
# we don't blow up if a test file has no corresponding source file
return @file_finder_helper.find_file_in_collection(source, source_paths, complain)
end
def find_test_from_runner_path(runner_path)
extension_source = @configurator.extension_source
test_file = File.basename(runner_path).sub(/#{@configurator.test_runner_file_suffix}#{'\\'+extension_source}/, extension_source)
found_path = @file_finder_helper.find_file_in_collection(test_file, @configurator.collection_all_tests, :error)
return found_path
end
def find_test_input_for_runner_file(runner_path)
found_path = find_test_from_runner_path(runner_path)
runner_input = found_path
if (@configurator.project_use_test_preprocessor)
runner_input = @cacheinator.diff_cached_test_file( @file_path_utils.form_preprocessed_file_filepath( found_path ) )
end
return runner_input
end
def find_test_from_file_path(file_path)
test_file = File.basename(file_path).ext(@configurator.extension_source)
found_path = @file_finder_helper.find_file_in_collection(test_file, @configurator.collection_all_tests, :error)
return found_path
end
def find_test_or_source_or_header_file(file_path)
file = File.basename(file_path)
return @file_finder_helper.find_file_in_collection(file, @all_test_source_and_header_file_collection, :error)
end
def find_compilation_input_file(file_path, complain=:error, release=false)
found_file = nil
source_file = File.basename(file_path).ext(@configurator.extension_source)
# We only collect files that already exist when we start up.
# FileLists can produce undesired results for dynamically generated files depending on when they're accessed.
# So collect mocks and runners separately and right now.
SEMAPHORE.synchronize {
if (source_file =~ /#{@configurator.test_runner_file_suffix}/)
found_file =
@file_finder_helper.find_file_in_collection(
source_file,
@file_wrapper.directory_listing( File.join(@configurator.project_test_runners_path, '*') ),
complain)
elsif (@configurator.project_use_mocks and (source_file =~ /#{@configurator.cmock_mock_prefix}/))
found_file =
@file_finder_helper.find_file_in_collection(
source_file,
@file_wrapper.directory_listing( File.join(@configurator.cmock_mock_path, '*') ),
complain)
elsif release
found_file =
@file_finder_helper.find_file_in_collection(
source_file,
@configurator.collection_release_existing_compilation_input,
complain)
else
temp_complain = (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY) ? :ignore : complain
found_file =
@file_finder_helper.find_file_in_collection(
source_file,
@configurator.collection_all_existing_compilation_input,
temp_complain)
found_file ||= find_assembly_file(file_path, false) if (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY)
end
}
return found_file
end
def find_source_file(file_path, complain)
source_file = File.basename(file_path).ext(@configurator.extension_source)
return @file_finder_helper.find_file_in_collection(source_file, @configurator.collection_all_source, complain)
end
def find_assembly_file(file_path, complain = :error)
assembly_file = File.basename(file_path).ext(@configurator.extension_assembly)
return @file_finder_helper.find_file_in_collection(assembly_file, @configurator.collection_all_assembly, complain)
end
def find_file_from_list(file_path, file_list, complain)
return @file_finder_helper.find_file_in_collection(file_path, file_list, complain)
end
end

View File

@ -0,0 +1,56 @@
require 'fileutils'
require 'ceedling/constants' # for Verbosity enumeration
class FileFinderHelper
constructor :streaminator
def find_file_in_collection(file_name, file_list, complain, extra_message="")
file_to_find = nil
file_list.each do |item|
base_file = File.basename(item)
# case insensitive comparison
if (base_file.casecmp(file_name) == 0)
# case sensitive check
if (base_file == file_name)
file_to_find = item
break
else
blow_up(file_name, "However, a filename having different capitalization was found: '#{item}'.")
end
end
end
if file_to_find.nil?
case (complain)
when :error then blow_up(file_name, extra_message)
when :warn then gripe(file_name, extra_message)
#when :ignore then
end
end
return file_to_find
end
private
def blow_up(file_name, extra_message="")
error = "ERROR: Found no file '#{file_name}' in search paths."
error += ' ' if (extra_message.length > 0)
@streaminator.stderr_puts(error + extra_message, Verbosity::ERRORS)
raise
end
def gripe(file_name, extra_message="")
warning = "WARNING: Found no file '#{file_name}' in search paths."
warning += ' ' if (extra_message.length > 0)
@streaminator.stderr_puts(warning + extra_message, Verbosity::COMPLAIN)
end
end

View File

@ -0,0 +1,202 @@
require 'rubygems'
require 'rake' # for ext()
require 'fileutils'
require 'ceedling/system_wrapper'
# global utility methods (for plugins, project files, etc.)
def ceedling_form_filepath(destination_path, original_filepath, new_extension=nil)
filename = File.basename(original_filepath)
filename.replace(filename.ext(new_extension)) if (!new_extension.nil?)
return File.join( destination_path.gsub(/\\/, '/'), filename )
end
class FilePathUtils
GLOB_MATCHER = /[\*\?\{\}\[\]]/
constructor :configurator, :file_wrapper
######### class methods ##########
# standardize path to use '/' path separator & have no trailing path separator
def self.standardize(path)
if path.is_a? String
path.strip!
path.gsub!(/\\/, '/')
path.chomp!('/')
end
return path
end
def self.os_executable_ext(executable)
return executable.ext('.exe') if SystemWrapper.windows?
return executable
end
# extract directory path from between optional add/subtract aggregation modifiers and up to glob specifiers
# note: slightly different than File.dirname in that /files/foo remains /files/foo and does not become /files
def self.extract_path(path)
path = path.sub(/^(\+|-):/, '')
# find first occurrence of path separator followed by directory glob specifier: *, ?, {, }, [, ]
find_index = (path =~ GLOB_MATCHER)
# no changes needed (lop off final path separator)
return path.chomp('/') if (find_index.nil?)
# extract up to first glob specifier
path = path[0..(find_index-1)]
# lop off everything up to and including final path separator
find_index = path.rindex('/')
return path[0..(find_index-1)] if (not find_index.nil?)
# return string up to first glob specifier if no path separator found
return path
end
# return whether the given path is to be aggregated (no aggregation modifier defaults to same as +:)
def self.add_path?(path)
return (path =~ /^-:/).nil?
end
# get path (and glob) lopping off optional +: / -: prefixed aggregation modifiers
def self.extract_path_no_aggregation_operators(path)
return path.sub(/^(\+|-):/, '')
end
# all the globs that may be in a path string work fine with one exception;
# to recurse through all subdirectories, the glob is dir/**/** but our paths use
# convention of only dir/**
def self.reform_glob(path)
return path if (path =~ /\/\*\*$/).nil?
return path + '/**'
end
######### instance methods ##########
def form_temp_path(filepath, prefix='')
return File.join( @configurator.project_temp_path, prefix + File.basename(filepath) )
end
### release ###
def form_release_build_cache_path(filepath)
return File.join( @configurator.project_release_build_cache_path, File.basename(filepath) )
end
def form_release_dependencies_filepath(filepath)
return File.join( @configurator.project_release_dependencies_path, File.basename(filepath).ext(@configurator.extension_dependencies) )
end
def form_release_build_c_object_filepath(filepath)
return File.join( @configurator.project_release_build_output_c_path, File.basename(filepath).ext(@configurator.extension_object) )
end
def form_release_build_asm_object_filepath(filepath)
return File.join( @configurator.project_release_build_output_asm_path, File.basename(filepath).ext(@configurator.extension_object) )
end
def form_release_build_c_objects_filelist(files)
return (@file_wrapper.instantiate_file_list(files)).pathmap("#{@configurator.project_release_build_output_c_path}/%n#{@configurator.extension_object}")
end
def form_release_build_asm_objects_filelist(files)
return (@file_wrapper.instantiate_file_list(files)).pathmap("#{@configurator.project_release_build_output_asm_path}/%n#{@configurator.extension_object}")
end
def form_release_build_c_list_filepath(filepath)
return File.join( @configurator.project_release_build_output_c_path, File.basename(filepath).ext(@configurator.extension_list) )
end
def form_release_dependencies_filelist(files)
return (@file_wrapper.instantiate_file_list(files)).pathmap("#{@configurator.project_release_dependencies_path}/%n#{@configurator.extension_dependencies}")
end
### tests ###
def form_test_build_cache_path(filepath)
return File.join( @configurator.project_test_build_cache_path, File.basename(filepath) )
end
def form_test_dependencies_filepath(filepath)
return File.join( @configurator.project_test_dependencies_path, File.basename(filepath).ext(@configurator.extension_dependencies) )
end
def form_pass_results_filepath(filepath)
return File.join( @configurator.project_test_results_path, File.basename(filepath).ext(@configurator.extension_testpass) )
end
def form_fail_results_filepath(filepath)
return File.join( @configurator.project_test_results_path, File.basename(filepath).ext(@configurator.extension_testfail) )
end
def form_runner_filepath_from_test(filepath)
return File.join( @configurator.project_test_runners_path, File.basename(filepath, @configurator.extension_source)) + @configurator.test_runner_file_suffix + @configurator.extension_source
end
def form_test_filepath_from_runner(filepath)
return filepath.sub(/#{TEST_RUNNER_FILE_SUFFIX}/, '')
end
def form_runner_object_filepath_from_test(filepath)
return (form_test_build_c_object_filepath(filepath)).sub(/(#{@configurator.extension_object})$/, "#{@configurator.test_runner_file_suffix}\\1")
end
def form_test_build_c_object_filepath(filepath)
return File.join( @configurator.project_test_build_output_c_path, File.basename(filepath).ext(@configurator.extension_object) )
end
def form_test_build_asm_object_filepath(filepath)
return File.join( @configurator.project_test_build_output_asm_path, File.basename(filepath).ext(@configurator.extension_object) )
end
def form_test_executable_filepath(filepath)
return File.join( @configurator.project_test_build_output_path, File.basename(filepath).ext(@configurator.extension_executable) )
end
def form_test_build_map_filepath(filepath)
return File.join( @configurator.project_test_build_output_path, File.basename(filepath).ext(@configurator.extension_map) )
end
def form_test_build_list_filepath(filepath)
return File.join( @configurator.project_test_build_output_path, File.basename(filepath).ext(@configurator.extension_list) )
end
def form_preprocessed_file_filepath(filepath)
return File.join( @configurator.project_test_preprocess_files_path, File.basename(filepath) )
end
def form_preprocessed_includes_list_filepath(filepath)
return File.join( @configurator.project_test_preprocess_includes_path, File.basename(filepath) )
end
def form_test_build_objects_filelist(sources)
return (@file_wrapper.instantiate_file_list(sources)).pathmap("#{@configurator.project_test_build_output_c_path}/%n#{@configurator.extension_object}")
end
def form_preprocessed_mockable_headers_filelist(mocks)
list = @file_wrapper.instantiate_file_list(mocks)
headers = list.map do |file|
module_name = File.basename(file).sub(/^#{@configurator.cmock_mock_prefix}/, '').sub(/\.[a-zA-Z]+$/,'')
"#{@configurator.project_test_preprocess_files_path}/#{module_name}#{@configurator.extension_header}"
end
return headers
end
def form_mocks_source_filelist(mocks)
list = (@file_wrapper.instantiate_file_list(mocks))
sources = list.map{|file| "#{@configurator.cmock_mock_path}/#{file}#{@configurator.extension_source}"}
return sources
end
def form_test_dependencies_filelist(files)
list = @file_wrapper.instantiate_file_list(files)
return list.pathmap("#{@configurator.project_test_dependencies_path}/%n#{@configurator.extension_dependencies}")
end
def form_pass_results_filelist(path, files)
list = @file_wrapper.instantiate_file_list(files)
return list.pathmap("#{path}/%n#{@configurator.extension_testpass}")
end
end

View File

@ -0,0 +1,69 @@
require 'rubygems'
require 'rake'
require 'set'
require 'fileutils'
require 'ceedling/file_path_utils'
class FileSystemUtils
constructor :file_wrapper
# build up path list from input of one or more strings or arrays of (+/-) paths & globs
def collect_paths(*paths)
raw = [] # all paths and globs
plus = Set.new # all paths to expand and add
minus = Set.new # all paths to remove from plus set
# assemble all globs and simple paths, reforming our glob notation to ruby globs
paths.each do |paths_container|
case (paths_container)
when String then raw << (FilePathUtils::reform_glob(paths_container))
when Array then paths_container.each {|path| raw << (FilePathUtils::reform_glob(path))}
else raise "Don't know how to handle #{paths_container.class}"
end
end
# iterate through each path and glob
raw.each do |path|
dirs = [] # container for only (expanded) paths
# if a glob, expand it and slurp up all non-file paths
if path.include?('*')
# grab base directory only if globs are snug up to final path separator
if (path =~ /\/\*+$/)
dirs << FilePathUtils.extract_path(path)
end
# grab expanded sub-directory globs
expanded = @file_wrapper.directory_listing( FilePathUtils.extract_path_no_aggregation_operators(path) )
expanded.each do |entry|
dirs << entry if @file_wrapper.directory?(entry)
end
# else just grab simple path
# note: we could just run this through glob expansion but such an
# approach doesn't handle a path not yet on disk)
else
dirs << FilePathUtils.extract_path_no_aggregation_operators(path)
end
# add dirs to the appropriate set based on path aggregation modifier if present
FilePathUtils.add_path?(path) ? plus.merge(dirs) : minus.merge(dirs)
end
return (plus - minus).to_a.uniq
end
# given a file list, add to it or remove from it
def revise_file_list(list, revisions)
revisions.each do |revision|
# include or exclude file or glob to file list
file = FilePathUtils.extract_path_no_aggregation_operators( revision )
FilePathUtils.add_path?(revision) ? list.include(file) : list.exclude(file)
end
end
end

View File

@ -0,0 +1,10 @@
class FileSystemWrapper
def cd(path)
FileUtils.cd path do
yield
end
end
end

View File

@ -0,0 +1,83 @@
require 'rubygems'
require 'rake' # for FileList
require 'fileutils'
require 'ceedling/constants'
class FileWrapper
def get_expanded_path(path)
return File.expand_path(path)
end
def basename(path, extension=nil)
return File.basename(path, extension) if extension
return File.basename(path)
end
def exist?(filepath)
return true if (filepath == NULL_FILE_PATH)
return File.exist?(filepath)
end
def directory?(path)
return File.directory?(path)
end
def dirname(path)
return File.dirname(path)
end
def directory_listing(glob)
return Dir.glob(glob, File::FNM_PATHNAME)
end
def rm_f(filepath, options={})
FileUtils.rm_f(filepath, **options)
end
def rm_r(filepath, options={})
FileUtils.rm_r(filepath, **options={})
end
def cp(source, destination, options={})
FileUtils.cp(source, destination, **options)
end
def compare(from, to)
return FileUtils.compare_file(from, to)
end
def open(filepath, flags)
File.open(filepath, flags) do |file|
yield(file)
end
end
def read(filepath)
return File.read(filepath)
end
def touch(filepath, options={})
FileUtils.touch(filepath, **options)
end
def write(filepath, contents, flags='w')
File.open(filepath, flags) do |file|
file.write(contents)
end
end
def readlines(filepath)
return File.readlines(filepath)
end
def instantiate_file_list(files=[])
return FileList.new(files)
end
def mkdir(folder)
return FileUtils.mkdir_p(folder)
end
end

View File

@ -0,0 +1,74 @@
require 'rubygems'
require 'rake' # for ext()
require 'fileutils'
require 'ceedling/constants'
# :flags:
# :release:
# :compile:
# :'test_.+'
# - -pedantic # add '-pedantic' to every test file
# :*: # add '-foo' to compilation of all files not main.c
# - -foo
# :main: # add '-Wall' to compilation of main.c
# - -Wall
# :test:
# :link:
# :test_main: # add '--bar --baz' to linking of test_main.exe
# - --bar
# - --baz
def partition(hash, &predicate)
hash.partition(&predicate).map(&:to_h)
end
class Flaginator
constructor :configurator
def get_flag(hash, file_name)
file_key = file_name.to_sym
# 1. try literals
literals, magic = partition(hash) { |k, v| k.to_s =~ /^\w+$/ }
return literals[file_key] if literals.include?(file_key)
any, regex = partition(magic) { |k, v| (k == :'*') || (k == :'.*') } # glob or regex wild card
# 2. try regexes
find_res = regex.find { |k, v| file_name =~ /^#{k.to_s}$/ }
return find_res[1] if find_res
# 3. try anything
find_res = any.find { |k, v| file_name =~ /.*/ }
return find_res[1] if find_res
# 4. well, we've tried
return []
end
def flag_down( operation, context, file )
# create configurator accessor method
accessor = ('flags_' + context.to_s).to_sym
# create simple filename key from whatever filename provided
file_name = File.basename( file ).ext('')
file_key = File.basename( file ).ext('').to_sym
# if no entry in configuration for flags for this context, bail out
return [] if not @configurator.respond_to?( accessor )
# get flags sub hash associated with this context
flags = @configurator.send( accessor )
# if operation not represented in flags hash, bail out
return [] if not flags.include?( operation )
# redefine flags to sub hash associated with the operation
flags = flags[operation]
return get_flag(flags, file_name)
end
end

View File

@ -0,0 +1,186 @@
require 'ceedling/constants'
class Generator
constructor :configurator,
:generator_helper,
:preprocessinator,
:cmock_builder,
:generator_test_runner,
:generator_test_results,
:flaginator,
:test_includes_extractor,
:tool_executor,
:file_finder,
:file_path_utils,
:streaminator,
:plugin_manager,
:file_wrapper
def generate_shallow_includes_list(context, file)
@streaminator.stdout_puts("Generating include list for #{File.basename(file)}...", Verbosity::NORMAL)
@preprocessinator.preprocess_shallow_includes(file)
end
def generate_preprocessed_file(context, file)
@streaminator.stdout_puts("Preprocessing #{File.basename(file)}...", Verbosity::NORMAL)
@preprocessinator.preprocess_file(file)
end
def generate_dependencies_file(tool, context, source, object, dependencies)
@streaminator.stdout_puts("Generating dependencies for #{File.basename(source)}...", Verbosity::NORMAL)
command =
@tool_executor.build_command_line(
tool,
[], # extra per-file command line parameters
source,
dependencies,
object)
@tool_executor.exec( command[:line], command[:options] )
end
def generate_mock(context, header_filepath)
arg_hash = {:header_file => header_filepath, :context => context}
@plugin_manager.pre_mock_generate( arg_hash )
begin
@cmock_builder.cmock.setup_mocks( arg_hash[:header_file] )
rescue
raise
ensure
@plugin_manager.post_mock_generate( arg_hash )
end
end
# test_filepath may be either preprocessed test file or original test file
def generate_test_runner(context, test_filepath, runner_filepath)
arg_hash = {:context => context, :test_file => test_filepath, :runner_file => runner_filepath}
@plugin_manager.pre_runner_generate(arg_hash)
# collect info we need
module_name = File.basename(arg_hash[:test_file])
test_cases = @generator_test_runner.find_test_cases( @file_finder.find_test_from_runner_path(runner_filepath) )
mock_list = @test_includes_extractor.lookup_raw_mock_list(arg_hash[:test_file])
@streaminator.stdout_puts("Generating runner for #{module_name}...", Verbosity::NORMAL)
test_file_includes = [] # Empty list for now, since apparently unused
# build runner file
begin
@generator_test_runner.generate(module_name, runner_filepath, test_cases, mock_list, test_file_includes)
rescue
raise
ensure
@plugin_manager.post_runner_generate(arg_hash)
end
end
def generate_object_file(tool, operation, context, source, object, list='', dependencies='')
shell_result = {}
arg_hash = {:tool => tool, :operation => operation, :context => context, :source => source, :object => object, :list => list, :dependencies => dependencies}
@plugin_manager.pre_compile_execute(arg_hash)
@streaminator.stdout_puts("Compiling #{File.basename(arg_hash[:source])}...", Verbosity::NORMAL)
command =
@tool_executor.build_command_line( arg_hash[:tool],
@flaginator.flag_down( operation, context, source ),
arg_hash[:source],
arg_hash[:object],
arg_hash[:list],
arg_hash[:dependencies])
@streaminator.stdout_puts("Command: #{command}", Verbosity::DEBUG)
begin
shell_result = @tool_executor.exec( command[:line], command[:options] )
rescue ShellExecutionException => ex
shell_result = ex.shell_result
raise ex
ensure
arg_hash[:shell_command] = command[:line]
arg_hash[:shell_result] = shell_result
@plugin_manager.post_compile_execute(arg_hash)
end
end
def generate_executable_file(tool, context, objects, executable, map='', libraries=[], libpaths=[])
shell_result = {}
arg_hash = { :tool => tool,
:context => context,
:objects => objects,
:executable => executable,
:map => map,
:libraries => libraries,
:libpaths => libpaths
}
@plugin_manager.pre_link_execute(arg_hash)
@streaminator.stdout_puts("Linking #{File.basename(arg_hash[:executable])}...", Verbosity::NORMAL)
command =
@tool_executor.build_command_line( arg_hash[:tool],
@flaginator.flag_down( OPERATION_LINK_SYM, context, executable ),
arg_hash[:objects],
arg_hash[:executable],
arg_hash[:map],
arg_hash[:libraries],
arg_hash[:libpaths]
)
@streaminator.stdout_puts("Command: #{command}", Verbosity::DEBUG)
begin
shell_result = @tool_executor.exec( command[:line], command[:options] )
rescue ShellExecutionException => ex
notice = "\n" +
"NOTICE: If the linker reports missing symbols, the following may be to blame:\n" +
" 1. Test lacks #include statements corresponding to needed source files.\n" +
" 2. Project search paths do not contain source files corresponding to #include statements in the test.\n"
if (@configurator.project_use_mocks)
notice += " 3. Test does not #include needed mocks.\n\n"
else
notice += "\n"
end
@streaminator.stderr_puts(notice, Verbosity::COMPLAIN)
shell_result = ex.shell_result
raise ''
ensure
arg_hash[:shell_result] = shell_result
@plugin_manager.post_link_execute(arg_hash)
end
end
def generate_test_results(tool, context, executable, result)
arg_hash = {:tool => tool, :context => context, :executable => executable, :result_file => result}
@plugin_manager.pre_test_fixture_execute(arg_hash)
@streaminator.stdout_puts("Running #{File.basename(arg_hash[:executable])}...", Verbosity::NORMAL)
# Unity's exit code is equivalent to the number of failed tests, so we tell @tool_executor not to fail out if there are failures
# so that we can run all tests and collect all results
command = @tool_executor.build_command_line(arg_hash[:tool], [], arg_hash[:executable])
@streaminator.stdout_puts("Command: #{command}", Verbosity::DEBUG)
command[:options][:boom] = false
shell_result = @tool_executor.exec( command[:line], command[:options] )
#Don't Let The Failure Count Make Us Believe Things Aren't Working
shell_result[:exit_code] = 0
@generator_helper.test_results_error_handler(executable, shell_result)
processed = @generator_test_results.process_and_write_results( shell_result,
arg_hash[:result_file],
@file_finder.find_test_from_file_path(arg_hash[:executable]) )
arg_hash[:result_file] = processed[:result_file]
arg_hash[:results] = processed[:results]
arg_hash[:shell_result] = shell_result # for raw output display if no plugins for formatted display
@plugin_manager.post_test_fixture_execute(arg_hash)
end
end

View File

@ -0,0 +1,40 @@
require 'ceedling/constants'
class GeneratorHelper
constructor :streaminator
def test_results_error_handler(executable, shell_result)
notice = ''
error = false
if (shell_result[:output].nil? or shell_result[:output].strip.empty?)
error = true
# mirror style of generic tool_executor failure output
notice = "\n" +
"ERROR: Test executable \"#{File.basename(executable)}\" failed.\n" +
"> Produced no output to $stdout.\n"
elsif ((shell_result[:output] =~ TEST_STDOUT_STATISTICS_PATTERN).nil?)
error = true
# mirror style of generic tool_executor failure output
notice = "\n" +
"ERROR: Test executable \"#{File.basename(executable)}\" failed.\n" +
"> Produced no final test result counts in $stdout:\n" +
"#{shell_result[:output].strip}\n"
end
if (error)
# since we told the tool executor to ignore the exit code, handle it explicitly here
notice += "> And exited with status: [#{shell_result[:exit_code]}] (count of failed tests).\n" if (shell_result[:exit_code] != nil)
notice += "> And then likely crashed.\n" if (shell_result[:exit_code] == nil)
notice += "> This is often a symptom of a bad memory access in source or test code.\n\n"
@streaminator.stderr_puts(notice, Verbosity::COMPLAIN)
raise
end
end
end

View File

@ -0,0 +1,100 @@
require 'rubygems'
require 'rake' # for .ext()
require 'ceedling/constants'
class GeneratorTestResults
constructor :configurator, :generator_test_results_sanity_checker, :yaml_wrapper
def process_and_write_results(unity_shell_result, results_file, test_file)
output_file = results_file
results = get_results_structure
results[:source][:path] = File.dirname(test_file)
results[:source][:file] = File.basename(test_file)
results[:time] = unity_shell_result[:time] unless unity_shell_result[:time].nil?
# process test statistics
if (unity_shell_result[:output] =~ TEST_STDOUT_STATISTICS_PATTERN)
results[:counts][:total] = $1.to_i
results[:counts][:failed] = $2.to_i
results[:counts][:ignored] = $3.to_i
results[:counts][:passed] = (results[:counts][:total] - results[:counts][:failed] - results[:counts][:ignored])
end
# remove test statistics lines
output_string = unity_shell_result[:output].sub(TEST_STDOUT_STATISTICS_PATTERN, '')
output_string.lines do |line|
# process unity output
case line
when /(:IGNORE)/
elements = extract_line_elements(line, results[:source][:file])
results[:ignores] << elements[0]
results[:stdout] << elements[1] if (!elements[1].nil?)
when /(:PASS$)/
elements = extract_line_elements(line, results[:source][:file])
results[:successes] << elements[0]
results[:stdout] << elements[1] if (!elements[1].nil?)
when /(:PASS \(.* ms\)$)/
elements = extract_line_elements(line, results[:source][:file])
results[:successes] << elements[0]
results[:stdout] << elements[1] if (!elements[1].nil?)
when /(:FAIL)/
elements = extract_line_elements(line, results[:source][:file])
results[:failures] << elements[0]
results[:stdout] << elements[1] if (!elements[1].nil?)
else # collect up all other
results[:stdout] << line.chomp
end
end
@generator_test_results_sanity_checker.verify(results, unity_shell_result[:exit_code])
output_file = results_file.ext(@configurator.extension_testfail) if (results[:counts][:failed] > 0)
@yaml_wrapper.dump(output_file, results)
return { :result_file => output_file, :result => results }
end
private
def get_results_structure
return {
:source => {:path => '', :file => ''},
:successes => [],
:failures => [],
:ignores => [],
:counts => {:total => 0, :passed => 0, :failed => 0, :ignored => 0},
:stdout => [],
:time => 0.0
}
end
def extract_line_elements(line, filename)
# handle anything preceding filename in line as extra output to be collected
stdout = nil
stdout_regex = /(.+)#{Regexp.escape(filename)}.+/i
unity_test_time = 0
if (line =~ stdout_regex)
stdout = $1.clone
line.sub!(/#{Regexp.escape(stdout)}/, '')
end
# collect up test results minus and extra output
elements = (line.strip.split(':'))[1..-1]
# find timestamp if available
if (elements[-1] =~ / \((\d*(?:\.\d*)?) ms\)/)
unity_test_time = $1.to_f / 1000
elements[-1].sub!(/ \((\d*(?:\.\d*)?) ms\)/, '')
end
return {:test => elements[1], :line => elements[0].to_i, :message => (elements[3..-1].join(':')).strip, :unity_test_time => unity_test_time}, stdout if elements.size >= 3
return {:test => '???', :line => -1, :message => nil, :unity_test_time => unity_test_time} #fallback safe option. TODO better handling
end
end

View File

@ -0,0 +1,65 @@
require 'rubygems'
require 'rake' # for ext() method
require 'ceedling/constants'
class GeneratorTestResultsSanityChecker
constructor :configurator, :streaminator
def verify(results, unity_exit_code)
# do no sanity checking if it's disabled
return if (@configurator.sanity_checks == TestResultsSanityChecks::NONE)
raise "results nil or empty" if results.nil? || results.empty?
ceedling_ignores_count = results[:ignores].size
ceedling_failures_count = results[:failures].size
ceedling_tests_summation = (ceedling_ignores_count + ceedling_failures_count + results[:successes].size)
# Exit code handling is not a sanity check that can always be performed because
# command line simulators may or may not pass through Unity's exit code
if (@configurator.sanity_checks >= TestResultsSanityChecks::THOROUGH)
# many platforms limit exit codes to a maximum of 255
if ((ceedling_failures_count != unity_exit_code) and (unity_exit_code < 255))
sanity_check_warning(results[:source][:file], "Unity's exit code (#{unity_exit_code}) does not match Ceedling's summation of failed test cases (#{ceedling_failures_count}).")
end
if ((ceedling_failures_count < 255) and (unity_exit_code == 255))
sanity_check_warning(results[:source][:file], "Ceedling's summation of failed test cases (#{ceedling_failures_count}) is less than Unity's exit code (255 or more).")
end
end
if (ceedling_ignores_count != results[:counts][:ignored])
sanity_check_warning(results[:source][:file], "Unity's final ignore count (#{results[:counts][:ignored]}) does not match Ceedling's summation of ignored test cases (#{ceedling_ignores_count}).")
end
if (ceedling_failures_count != results[:counts][:failed])
sanity_check_warning(results[:source][:file], "Unity's final fail count (#{results[:counts][:failed]}) does not match Ceedling's summation of failed test cases (#{ceedling_failures_count}).")
end
if (ceedling_tests_summation != results[:counts][:total])
sanity_check_warning(results[:source][:file], "Unity's final test count (#{results[:counts][:total]}) does not match Ceedling's summation of all test cases (#{ceedling_tests_summation}).")
end
end
private
def sanity_check_warning(file, message)
unless defined?(CEEDLING_IGNORE_SANITY_CHECK)
notice = "\n" +
"ERROR: Internal sanity check for test fixture '#{file.ext(@configurator.extension_executable)}' finds that #{message}\n" +
" Possible causes:\n" +
" 1. Your test + source dereferenced a null pointer.\n" +
" 2. Your test + source indexed past the end of a buffer.\n" +
" 3. Your test + source committed a memory access violation.\n" +
" 4. Your test fixture produced an exit code of 0 despite execution ending prematurely.\n" +
" Sanity check failures of test results are usually a symptom of interrupted test execution.\n\n"
@streaminator.stderr_puts( notice )
raise
end
end
end

View File

@ -0,0 +1,58 @@
class GeneratorTestRunner
constructor :configurator, :file_path_utils, :file_wrapper
def find_test_cases(test_file)
#Pull in Unity's Test Runner Generator
require 'generate_test_runner.rb'
@test_runner_generator ||= UnityTestRunnerGenerator.new( @configurator.get_runner_config )
if (@configurator.project_use_test_preprocessor)
#redirect to use the preprocessor file if we're doing that sort of thing
pre_test_file = @file_path_utils.form_preprocessed_file_filepath(test_file)
#actually look for the tests using Unity's test runner generator
contents = @file_wrapper.read(pre_test_file)
tests_and_line_numbers = @test_runner_generator.find_tests(contents)
@test_runner_generator.find_setup_and_teardown(contents)
#look up the line numbers in the original file
source_lines = @file_wrapper.read(test_file).split("\n")
source_index = 0;
tests_and_line_numbers.size.times do |i|
source_lines[source_index..-1].each_with_index do |line, index|
if (line =~ /#{tests_and_line_numbers[i][:test]}/)
source_index += index
tests_and_line_numbers[i][:line_number] = source_index + 1
break
end
end
end
else
#Just look for the tests using Unity's test runner generator
contents = @file_wrapper.read(test_file)
tests_and_line_numbers = @test_runner_generator.find_tests(contents)
@test_runner_generator.find_setup_and_teardown(contents)
end
return tests_and_line_numbers
end
def generate(module_name, runner_filepath, test_cases, mock_list, test_file_includes=[])
require 'generate_test_runner.rb'
header_extension = @configurator.extension_header
#actually build the test runner using Unity's test runner generator
#(there is no need to use preprocessor here because we've already looked up test cases and are passing them in here)
@test_runner_generator ||= UnityTestRunnerGenerator.new( @configurator.get_runner_config )
@test_runner_generator.generate( module_name,
runner_filepath,
test_cases,
mock_list.map{|f| File.basename(f,'.*')+header_extension},
test_file_includes.map{|f| File.basename(f,'.*')+header_extension})
end
end

View File

@ -0,0 +1,31 @@
class Loginator
constructor :configurator, :project_file_loader, :project_config_manager, :file_wrapper, :system_wrapper
def setup_log_filepath
config_files = []
config_files << @project_file_loader.main_file
config_files << @project_file_loader.user_file
config_files.concat( @project_config_manager.options_files )
config_files.compact!
config_files.map! { |file| file.ext('') }
log_name = config_files.join( '_' )
@project_log_filepath = File.join( @configurator.project_log_path, log_name.ext('.log') )
end
def log(string, heading=nil)
return if (not @configurator.project_logging)
output = "\n[#{@system_wrapper.time_now}]"
output += " :: #{heading}" if (not heading.nil?)
output += "\n#{string.strip}\n"
@file_wrapper.write(@project_log_filepath, output, 'a')
end
end

View File

@ -0,0 +1,46 @@
# modified version of Rake's provided make-style dependency loader
# customizations:
# (1) handles windows drives in paths -- colons don't confuse task demarcation
# (2) handles spaces in directory paths
module Rake
# Makefile loader to be used with the import file loader.
class MakefileLoader
# Load the makefile dependencies in +fn+.
def load(fn)
open(fn) do |mf|
lines = mf.read
lines.gsub!(/#[^\n]*\n/m, "") # remove comments
lines.gsub!(/\\\n/, ' ') # string together line continuations into single line
lines.split("\n").each do |line|
process_line(line)
end
end
end
private
# Process one logical line of makefile data.
def process_line(line)
# split on presence of task demaractor followed by space (i.e don't get confused by a colon in a win path)
file_tasks, args = line.split(/:\s/)
return if args.nil?
# split at non-escaped space boundary between files (i.e. escaped spaces in paths are left alone)
dependents = args.split(/\b\s+/)
# replace escaped spaces and clean up any extra whitespace
dependents.map! { |path| path.gsub(/\\ /, ' ').strip }
file_tasks.strip.split.each do |file_task|
file file_task => dependents
end
end
end
# Install the handler
Rake.application.add_loader('mf', MakefileLoader.new)
end

View File

@ -0,0 +1,313 @@
file_wrapper:
file_system_wrapper:
stream_wrapper:
rake_wrapper:
yaml_wrapper:
system_wrapper:
cmock_builder:
reportinator:
rake_utils:
compose:
- rake_wrapper
system_utils:
compose:
- system_wrapper
file_path_utils:
compose:
- configurator
- file_wrapper
file_system_utils:
compose: file_wrapper
project_file_loader:
compose:
- yaml_wrapper
- stream_wrapper
- system_wrapper
- file_wrapper
project_config_manager:
compose:
- cacheinator
- configurator
- yaml_wrapper
- file_wrapper
cacheinator:
compose:
- cacheinator_helper
- file_path_utils
- file_wrapper
- yaml_wrapper
cacheinator_helper:
compose:
- file_wrapper
- yaml_wrapper
tool_executor:
compose:
- configurator
- tool_executor_helper
- streaminator
- system_wrapper
tool_executor_helper:
compose:
- streaminator
- system_utils
- system_wrapper
configurator:
compose:
- configurator_setup
- configurator_plugins
- configurator_builder
- cmock_builder
- yaml_wrapper
- system_wrapper
configurator_setup:
compose:
- configurator_builder
- configurator_validator
- configurator_plugins
- stream_wrapper
configurator_plugins:
compose:
- stream_wrapper
- file_wrapper
- system_wrapper
configurator_validator:
compose:
- file_wrapper
- stream_wrapper
- system_wrapper
configurator_builder:
compose:
- file_system_utils
- file_wrapper
- system_wrapper
loginator:
compose:
- configurator
- project_file_loader
- project_config_manager
- file_wrapper
- system_wrapper
streaminator:
compose:
- streaminator_helper
- verbosinator
- loginator
- stream_wrapper
streaminator_helper:
setupinator:
plugin_builder:
plugin_manager:
compose:
- configurator
- plugin_manager_helper
- streaminator
- reportinator
- system_wrapper
plugin_manager_helper:
plugin_reportinator:
compose:
- plugin_reportinator_helper
- plugin_manager
- reportinator
plugin_reportinator_helper:
compose:
- configurator
- streaminator
- yaml_wrapper
- file_wrapper
verbosinator:
compose: configurator
file_finder:
compose:
- configurator
- file_finder_helper
- cacheinator
- file_path_utils
- file_wrapper
- yaml_wrapper
file_finder_helper:
compose: streaminator
test_includes_extractor:
compose:
- configurator
- yaml_wrapper
- file_wrapper
task_invoker:
compose:
- dependinator
- rake_utils
- rake_wrapper
- project_config_manager
flaginator:
compose:
- configurator
generator:
compose:
- configurator
- generator_helper
- preprocessinator
- cmock_builder
- generator_test_runner
- generator_test_results
- flaginator
- test_includes_extractor
- tool_executor
- file_finder
- file_path_utils
- streaminator
- plugin_manager
- file_wrapper
generator_helper:
compose:
- streaminator
generator_test_results:
compose:
- configurator
- generator_test_results_sanity_checker
- yaml_wrapper
generator_test_results_sanity_checker:
compose:
- configurator
- streaminator
generator_test_runner:
compose:
- configurator
- file_path_utils
- file_wrapper
dependinator:
compose:
- configurator
- project_config_manager
- test_includes_extractor
- file_path_utils
- rake_wrapper
- file_wrapper
preprocessinator:
compose:
- preprocessinator_helper
- preprocessinator_includes_handler
- preprocessinator_file_handler
- task_invoker
- file_path_utils
- yaml_wrapper
- project_config_manager
- configurator
preprocessinator_helper:
compose:
- configurator
- test_includes_extractor
- task_invoker
- file_finder
- file_path_utils
preprocessinator_includes_handler:
compose:
- configurator
- tool_executor
- task_invoker
- file_path_utils
- yaml_wrapper
- file_wrapper
- file_finder
preprocessinator_file_handler:
compose:
- preprocessinator_extractor
- configurator
- tool_executor
- file_path_utils
- file_wrapper
preprocessinator_extractor:
test_invoker:
compose:
- configurator
- test_invoker_helper
- plugin_manager
- streaminator
- preprocessinator
- task_invoker
- dependinator
- project_config_manager
- build_invoker_utils
- file_path_utils
- file_wrapper
test_invoker_helper:
compose:
- configurator
- task_invoker
- test_includes_extractor
- file_finder
- file_path_utils
- file_wrapper
release_invoker:
compose:
- configurator
- release_invoker_helper
- build_invoker_utils
- dependinator
- task_invoker
- file_path_utils
- file_wrapper
release_invoker_helper:
compose:
- configurator
- dependinator
- task_invoker
build_invoker_utils:
compose:
- configurator
- streaminator
erb_wrapper:

View File

@ -0,0 +1,19 @@
def par_map(n, things, &block)
queue = Queue.new
things.each { |thing| queue << thing }
threads = (1..n).collect do
Thread.new do
begin
while true
yield queue.pop(true)
end
rescue ThreadError
end
end
end
threads.each { |t| t.join }
end

View File

@ -0,0 +1,80 @@
class String
# reformat a multiline string to have given number of whitespace columns;
# helpful for formatting heredocs
def left_margin(margin=0)
non_whitespace_column = 0
new_lines = []
# find first line with non-whitespace and count left columns of whitespace
self.each_line do |line|
if (line =~ /^\s*\S/)
non_whitespace_column = $&.length - 1
break
end
end
# iterate through each line, chopping off leftmost whitespace columns and add back the desired whitespace margin
self.each_line do |line|
columns = []
margin.times{columns << ' '}
# handle special case of line being narrower than width to be lopped off
if (non_whitespace_column < line.length)
new_lines << "#{columns.join}#{line[non_whitespace_column..-1]}"
else
new_lines << "\n"
end
end
return new_lines.join
end
end
class Plugin
attr_reader :name, :environment
attr_accessor :plugin_objects
def initialize(system_objects, name)
@environment = []
@ceedling = system_objects
@name = name
self.setup
end
def setup; end
# mock generation
def pre_mock_generate(arg_hash); end
def post_mock_generate(arg_hash); end
# test runner generation
def pre_runner_generate(arg_hash); end
def post_runner_generate(arg_hash); end
# compilation (test or source)
def pre_compile_execute(arg_hash); end
def post_compile_execute(arg_hash); end
# linking (test or source)
def pre_link_execute(arg_hash); end
def post_link_execute(arg_hash); end
# test fixture execution
def pre_test_fixture_execute(arg_hash); end
def post_test_fixture_execute(arg_hash); end
# test task
def pre_test(test); end
def post_test(test); end
# release task
def pre_release; end
def post_release; end
# whole shebang (any use of Ceedling)
def pre_build; end
def post_build; end
def summary; end
end

View File

@ -0,0 +1,53 @@
require 'ceedling/plugin'
class PluginBuilder
attr_accessor :plugin_objects
def construct_plugin(plugin_name, object_map_yaml, system_objects)
# @streaminator.stdout_puts("Constructing plugin #{plugin_name}...", Verbosity::OBNOXIOUS)
object_map = {}
@plugin_objects = {}
@system_objects = system_objects
if object_map_yaml
@object_map = YAML.load(object_map_yaml)
@object_map.each_key do |obj|
construct_object(obj)
end
else
raise "Invalid object map for plugin #{plugin_name}!"
end
return @plugin_objects
end
private
def camelize(underscored_name)
return underscored_name.gsub(/(_|^)([a-z0-9])/) {$2.upcase}
end
def construct_object(obj)
if @plugin_objects[obj].nil?
if @object_map[obj] && @object_map[obj]['compose']
@object_map[obj]['compose'].each do |dep|
construct_object(dep)
end
end
build_object(obj)
end
end
def build_object(new_object)
if @plugin_objects[new_object.to_sym].nil?
# @streaminator.stdout_puts("Building plugin object #{new_object}", Verbosity::OBNOXIOUS)
require new_object
class_name = camelize(new_object)
new_instance = eval("#{class_name}.new(@system_objects, class_name.to_s)")
new_instance.plugin_objects = @plugin_objects
@plugin_objects[new_object.to_sym] = new_instance
end
end
end

View File

@ -0,0 +1,107 @@
require 'ceedling/constants'
class PluginManager
constructor :configurator, :plugin_manager_helper, :streaminator, :reportinator, :system_wrapper
def setup
@build_fail_registry = []
@plugin_objects = [] # so we can preserve order
end
def load_plugin_scripts(script_plugins, system_objects)
environment = []
script_plugins.each do |plugin|
# protect against instantiating object multiple times due to processing config multiple times (option files, etc)
next if (@plugin_manager_helper.include?(@plugin_objects, plugin))
begin
@system_wrapper.require_file( "#{plugin}.rb" )
object = @plugin_manager_helper.instantiate_plugin_script( camelize(plugin), system_objects, plugin )
@plugin_objects << object
environment += object.environment
# add plugins to hash of all system objects
system_objects[plugin.downcase.to_sym] = object
rescue
puts "Exception raised while trying to load plugin: #{plugin}"
raise
end
end
yield( { :environment => environment } ) if (environment.size > 0)
end
def plugins_failed?
return (@build_fail_registry.size > 0)
end
def print_plugin_failures
if (@build_fail_registry.size > 0)
report = @reportinator.generate_banner('BUILD FAILURE SUMMARY')
@build_fail_registry.each do |failure|
report += "#{' - ' if (@build_fail_registry.size > 1)}#{failure}\n"
end
report += "\n"
@streaminator.stderr_puts(report, Verbosity::ERRORS)
end
end
def register_build_failure(message)
@build_fail_registry << message if (message and not message.empty?)
end
#### execute all plugin methods ####
def pre_mock_generate(arg_hash); execute_plugins(:pre_mock_generate, arg_hash); end
def post_mock_generate(arg_hash); execute_plugins(:post_mock_generate, arg_hash); end
def pre_runner_generate(arg_hash); execute_plugins(:pre_runner_generate, arg_hash); end
def post_runner_generate(arg_hash); execute_plugins(:post_runner_generate, arg_hash); end
def pre_compile_execute(arg_hash); execute_plugins(:pre_compile_execute, arg_hash); end
def post_compile_execute(arg_hash); execute_plugins(:post_compile_execute, arg_hash); end
def pre_link_execute(arg_hash); execute_plugins(:pre_link_execute, arg_hash); end
def post_link_execute(arg_hash); execute_plugins(:post_link_execute, arg_hash); end
def pre_test_fixture_execute(arg_hash); execute_plugins(:pre_test_fixture_execute, arg_hash); end
def post_test_fixture_execute(arg_hash)
# special arbitration: raw test results are printed or taken over by plugins handling the job
@streaminator.stdout_puts(arg_hash[:shell_result][:output]) if (@configurator.plugins_display_raw_test_results)
execute_plugins(:post_test_fixture_execute, arg_hash)
end
def pre_test(test); execute_plugins(:pre_test, test); end
def post_test(test); execute_plugins(:post_test, test); end
def pre_release; execute_plugins(:pre_release); end
def post_release; execute_plugins(:post_release); end
def pre_build; execute_plugins(:pre_build); end
def post_build; execute_plugins(:post_build); end
def post_error; execute_plugins(:post_error); end
def summary; execute_plugins(:summary); end
private ####################################
def camelize(underscored_name)
return underscored_name.gsub(/(_|^)([a-z0-9])/) {$2.upcase}
end
def execute_plugins(method, *args)
@plugin_objects.each do |plugin|
begin
plugin.send(method, *args) if plugin.respond_to?(method)
rescue
puts "Exception raised in plugin: #{plugin.name}, in method #{method}"
raise
end
end
end
end

View File

@ -0,0 +1,19 @@
class PluginManagerHelper
def include?(plugins, name)
include = false
plugins.each do |plugin|
if (plugin.name == name)
include = true
break
end
end
return include
end
def instantiate_plugin_script(plugin, system_objects, name)
return eval("#{plugin}.new(system_objects, name)")
end
end

View File

@ -0,0 +1,76 @@
require 'ceedling/constants'
require 'ceedling/defaults'
class PluginReportinator
constructor :plugin_reportinator_helper, :plugin_manager, :reportinator
def setup
@test_results_template = nil
end
def set_system_objects(system_objects)
@plugin_reportinator_helper.ceedling = system_objects
end
def fetch_results(results_path, test, options={:boom => false})
return @plugin_reportinator_helper.fetch_results( File.join(results_path, test), options )
end
def generate_banner(message)
return @reportinator.generate_banner(message)
end
def assemble_test_results(results_list, options={:boom => false})
aggregated_results = get_results_structure
results_list.each do |result_path|
results = @plugin_reportinator_helper.fetch_results( result_path, options )
@plugin_reportinator_helper.process_results(aggregated_results, results)
end
return aggregated_results
end
def register_test_results_template(template)
@test_results_template = template if (@test_results_template.nil?)
end
def run_test_results_report(hash, verbosity=Verbosity::NORMAL, &block)
run_report( $stdout,
((@test_results_template.nil?) ? DEFAULT_TESTS_RESULTS_REPORT_TEMPLATE : @test_results_template),
hash,
verbosity,
&block )
end
def run_report(stream, template, hash=nil, verbosity=Verbosity::NORMAL)
failure = nil
failure = yield() if block_given?
@plugin_manager.register_build_failure( failure )
@plugin_reportinator_helper.run_report( stream, template, hash, verbosity )
end
private ###############################
def get_results_structure
return {
:successes => [],
:failures => [],
:ignores => [],
:stdout => [],
:counts => {:total => 0, :passed => 0, :failed => 0, :ignored => 0, :stdout => 0},
:time => 0.0
}
end
end

View File

@ -0,0 +1,51 @@
require 'erb'
require 'rubygems'
require 'rake' # for ext()
require 'ceedling/constants'
class PluginReportinatorHelper
attr_writer :ceedling
constructor :configurator, :streaminator, :yaml_wrapper, :file_wrapper
def fetch_results(results_path, options)
pass_path = File.join(results_path.ext( @configurator.extension_testpass ))
fail_path = File.join(results_path.ext( @configurator.extension_testfail ))
if (@file_wrapper.exist?(fail_path))
return @yaml_wrapper.load(fail_path)
elsif (@file_wrapper.exist?(pass_path))
return @yaml_wrapper.load(pass_path)
else
if (options[:boom])
@streaminator.stderr_puts("Could find no test results for '#{File.basename(results_path).ext(@configurator.extension_source)}'", Verbosity::ERRORS)
raise
end
end
return {}
end
def process_results(aggregate_results, results)
return if (results.empty?)
aggregate_results[:successes] << { :source => results[:source].clone, :collection => results[:successes].clone } if (results[:successes].size > 0)
aggregate_results[:failures] << { :source => results[:source].clone, :collection => results[:failures].clone } if (results[:failures].size > 0)
aggregate_results[:ignores] << { :source => results[:source].clone, :collection => results[:ignores].clone } if (results[:ignores].size > 0)
aggregate_results[:stdout] << { :source => results[:source].clone, :collection => results[:stdout].clone } if (results[:stdout].size > 0)
aggregate_results[:counts][:total] += results[:counts][:total]
aggregate_results[:counts][:passed] += results[:counts][:passed]
aggregate_results[:counts][:failed] += results[:counts][:failed]
aggregate_results[:counts][:ignored] += results[:counts][:ignored]
aggregate_results[:counts][:stdout] += results[:stdout].size
aggregate_results[:time] += results[:time]
end
def run_report(stream, template, hash, verbosity)
output = ERB.new(template, 0, "%<>")
@streaminator.stream_puts(stream, output.result(binding()), verbosity)
end
end

View File

@ -0,0 +1,56 @@
class Preprocessinator
constructor :preprocessinator_helper, :preprocessinator_includes_handler, :preprocessinator_file_handler, :task_invoker, :file_path_utils, :yaml_wrapper, :project_config_manager, :configurator
def setup
# fashion ourselves callbacks @preprocessinator_helper can use
@preprocess_includes_proc = Proc.new { |filepath| self.preprocess_shallow_includes(filepath) }
@preprocess_mock_file_proc = Proc.new { |filepath| self.preprocess_file(filepath) }
@preprocess_test_file_directives_proc = Proc.new { |filepath| self.preprocess_file_directives(filepath) }
@preprocess_test_file_proc = Proc.new { |filepath| self.preprocess_file(filepath) }
end
def preprocess_shallow_source_includes(test)
@preprocessinator_helper.preprocess_source_includes(test)
end
def preprocess_test_and_invoke_test_mocks(test)
@preprocessinator_helper.preprocess_includes(test, @preprocess_includes_proc)
mocks_list = @preprocessinator_helper.assemble_mocks_list(test)
@project_config_manager.process_test_defines_change(mocks_list)
@preprocessinator_helper.preprocess_mockable_headers(mocks_list, @preprocess_mock_file_proc)
@task_invoker.invoke_test_mocks(mocks_list)
if (@configurator.project_use_preprocessor_directives)
@preprocessinator_helper.preprocess_test_file(test, @preprocess_test_file_directives_proc)
else
@preprocessinator_helper.preprocess_test_file(test, @preprocess_test_file_proc)
end
return mocks_list
end
def preprocess_shallow_includes(filepath)
includes = @preprocessinator_includes_handler.extract_includes(filepath)
@preprocessinator_includes_handler.write_shallow_includes_list(
@file_path_utils.form_preprocessed_includes_list_filepath(filepath), includes)
end
def preprocess_file(filepath)
@preprocessinator_includes_handler.invoke_shallow_includes_list(filepath)
@preprocessinator_file_handler.preprocess_file( filepath, @yaml_wrapper.load(@file_path_utils.form_preprocessed_includes_list_filepath(filepath)) )
end
def preprocess_file_directives(filepath)
@preprocessinator_includes_handler.invoke_shallow_includes_list( filepath )
@preprocessinator_file_handler.preprocess_file_directives( filepath,
@yaml_wrapper.load( @file_path_utils.form_preprocessed_includes_list_filepath( filepath ) ) )
end
end

View File

@ -0,0 +1,55 @@
class PreprocessinatorExtractor
def extract_base_file_from_preprocessed_expansion(filepath)
# preprocessing by way of toolchain preprocessor expands macros, eliminates
# comments, strips out #ifdef code, etc. however, it also expands in place
# each #include'd file. so, we must extract only the lines of the file
# that belong to the file originally preprocessed
# iterate through all lines and alternate between extract and ignore modes
# all lines between a '#'line containing file name of our filepath and the
# next '#'line should be extracted
base_name = File.basename(filepath)
not_pragma = /^#(?!pragma\b)/ # preprocessor directive that's not a #pragma
pattern = /^#.*(\s|\/|\\|\")#{Regexp.escape(base_name)}/
found_file = false # have we found the file we care about?
lines = []
File.readlines(filepath).each do |line|
line.encode!('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: '')
if found_file and not line =~ not_pragma
lines << line
else
found_file = false
end
found_file = true if line =~ pattern
end
return lines
end
def extract_base_file_from_preprocessed_directives(filepath)
# preprocessing by way of toolchain preprocessor eliminates directives only
# like #ifdef's and leave other code
# iterate through all lines and only get last chunk of file after a last
# '#'line containing file name of our filepath
base_name = File.basename(filepath)
pattern = /^#.*(\s|\/|\\|\")#{Regexp.escape(base_name)}/
found_file = false # have we found the file we care about?
lines = []
File.readlines(filepath).each do |line|
line.encode!('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: '')
lines << line
if line =~ pattern
lines = []
end
end
return lines
end
end

View File

@ -0,0 +1,34 @@
class PreprocessinatorFileHandler
constructor :preprocessinator_extractor, :configurator, :tool_executor, :file_path_utils, :file_wrapper
def preprocess_file(filepath, includes)
preprocessed_filepath = @file_path_utils.form_preprocessed_file_filepath(filepath)
command = @tool_executor.build_command_line(@configurator.tools_test_file_preprocessor, [], filepath, preprocessed_filepath)
@tool_executor.exec(command[:line], command[:options])
contents = @preprocessinator_extractor.extract_base_file_from_preprocessed_expansion(preprocessed_filepath)
includes.each{|include| contents.unshift("#include \"#{include}\"")}
@file_wrapper.write(preprocessed_filepath, contents.join("\n"))
end
def preprocess_file_directives(filepath, includes)
preprocessed_filepath = @file_path_utils.form_preprocessed_file_filepath(filepath)
command = @tool_executor.build_command_line(@configurator.tools_test_file_preprocessor_directives, [], filepath, preprocessed_filepath)
@tool_executor.exec(command[:line], command[:options])
contents = @preprocessinator_extractor.extract_base_file_from_preprocessed_directives(preprocessed_filepath)
includes.each{|include| contents.unshift("#include \"#{include}\"")}
@file_wrapper.write(preprocessed_filepath, contents.join("\n"))
end
end

View File

@ -0,0 +1,50 @@
class PreprocessinatorHelper
constructor :configurator, :test_includes_extractor, :task_invoker, :file_finder, :file_path_utils
def preprocess_includes(test, preprocess_includes_proc)
if (@configurator.project_use_test_preprocessor)
preprocessed_includes_list = @file_path_utils.form_preprocessed_includes_list_filepath(test)
preprocess_includes_proc.call( @file_finder.find_test_from_file_path(preprocessed_includes_list) )
@test_includes_extractor.parse_includes_list(preprocessed_includes_list)
else
@test_includes_extractor.parse_test_file(test)
end
end
def preprocess_source_includes(test)
@test_includes_extractor.parse_test_file_source_include(test)
end
def assemble_mocks_list(test)
return @file_path_utils.form_mocks_source_filelist( @test_includes_extractor.lookup_raw_mock_list(test) )
end
def preprocess_mockable_headers(mock_list, preprocess_file_proc)
if (@configurator.project_use_test_preprocessor)
preprocess_files_smartly(
@file_path_utils.form_preprocessed_mockable_headers_filelist(mock_list),
preprocess_file_proc ) { |file| @file_finder.find_header_file(file) }
end
end
def preprocess_test_file(test, preprocess_file_proc)
return if (!@configurator.project_use_test_preprocessor)
preprocess_file_proc.call(test)
end
private ############################
def preprocess_files_smartly(file_list, preprocess_file_proc)
if (@configurator.project_use_deep_dependencies)
@task_invoker.invoke_test_preprocessed_files(file_list)
else
file_list.each { |file| preprocess_file_proc.call( yield(file) ) }
end
end
end

View File

@ -0,0 +1,189 @@
class PreprocessinatorIncludesHandler
constructor :configurator, :tool_executor, :task_invoker, :file_path_utils, :yaml_wrapper, :file_wrapper, :file_finder
@@makefile_cache = {}
# shallow includes: only those headers a source file explicitly includes
def invoke_shallow_includes_list(filepath)
@task_invoker.invoke_test_shallow_include_lists( [@file_path_utils.form_preprocessed_includes_list_filepath(filepath)] )
end
##
# Ask the preprocessor for a make-style dependency rule of only the headers
# the source file immediately includes.
#
# === Arguments
# +filepath+ _String_:: Path to the test file to process.
#
# === Return
# _String_:: The text of the dependency rule generated by the preprocessor.
def form_shallow_dependencies_rule(filepath)
if @@makefile_cache.has_key?(filepath)
return @@makefile_cache[filepath]
end
# change filename (prefix of '_') to prevent preprocessor from finding
# include files in temp directory containing file it's scanning
temp_filepath = @file_path_utils.form_temp_path(filepath, '_')
# read the file and replace all include statements with a decorated version
# (decorating the names creates file names that don't exist, thus preventing
# the preprocessor from snaking out and discovering the entire include path
# that winds through the code). The decorated filenames indicate files that
# are included directly by the test file.
contents = @file_wrapper.read(filepath)
if !contents.valid_encoding?
contents = contents.encode("UTF-16be", :invalid=>:replace, :replace=>"?").encode('UTF-8')
end
contents.gsub!( /^\s*#include\s+[\"<]\s*(\S+)\s*[\">]/, "#include \"\\1\"\n#include \"@@@@\\1\"" )
contents.gsub!( /^\s*TEST_FILE\(\s*\"\s*(\S+)\s*\"\s*\)/, "#include \"\\1\"\n#include \"@@@@\\1\"")
@file_wrapper.write( temp_filepath, contents )
# extract the make-style dependency rule telling the preprocessor to
# ignore the fact that it can't find the included files
command = @tool_executor.build_command_line(@configurator.tools_test_includes_preprocessor, [], temp_filepath)
shell_result = @tool_executor.exec(command[:line], command[:options])
@@makefile_cache[filepath] = shell_result[:output]
return shell_result[:output]
end
##
# Extract the headers that are directly included by a source file using the
# provided, annotated Make dependency rule.
#
# === Arguments
# +filepath+ _String_:: C source or header file to extract includes for.
#
# === Return
# _Array_ of _String_:: Array of the direct dependencies for the source file.
def extract_includes(filepath)
to_process = [filepath]
ignore_list = []
list = []
all_mocks = []
include_paths = @configurator.project_config_hash[:collection_paths_include]
include_paths = [] if include_paths.nil?
include_paths.map! {|path| File.expand_path(path)}
while to_process.length > 0
target = to_process.shift()
ignore_list << target
new_deps, new_to_process, all_mocks = extract_includes_helper(target, include_paths, ignore_list, all_mocks)
list += new_deps
to_process += new_to_process
if !@configurator.project_config_hash[:project_auto_link_deep_dependencies]
break
else
list = list.uniq()
to_process = to_process.uniq()
end
end
return list
end
def extract_includes_helper(filepath, include_paths, ignore_list, mocks)
# Extract the dependencies from the make rule
make_rule = self.form_shallow_dependencies_rule(filepath)
target_file = make_rule.split[0].gsub(':', '').gsub('\\','/')
base = File.basename(target_file, File.extname(target_file))
make_rule_dependencies = make_rule.gsub(/.*\b#{Regexp.escape(base)}\S*/, '').gsub(/\\$/, '')
# Extract the headers dependencies from the make rule
hdr_ext = @configurator.extension_header
headers_dependencies = make_rule_dependencies.split.find_all {|path| path.end_with?(hdr_ext) }.uniq
headers_dependencies.map! {|hdr| hdr.gsub('\\','/') }
full_path_headers_dependencies = extract_full_path_dependencies(headers_dependencies)
# Extract the sources dependencies from the make rule
src_ext = @configurator.extension_source
sources_dependencies = make_rule_dependencies.split.find_all {|path| path.end_with?(src_ext) }.uniq
sources_dependencies.map! {|src| src.gsub('\\','/') }
full_path_sources_dependencies = extract_full_path_dependencies(sources_dependencies)
list = full_path_headers_dependencies + full_path_sources_dependencies
mock_prefix = @configurator.project_config_hash[:cmock_mock_prefix]
# Creating list of mocks
mocks += full_path_headers_dependencies.find_all do |header|
File.basename(header) =~ /^#{mock_prefix}.*$/
end.compact
# ignore real file when both mock and real file exist
mocks.each do |mock|
list.each do |filename|
if File.basename(filename) == File.basename(mock).sub(mock_prefix, '')
ignore_list << filename
end
end
end.compact
# Filtering list of final includes to only include mocks and anything that is NOT in the ignore_list
list = list.select do |item|
mocks.include? item or !(ignore_list.any? { |ignore_item| !item.match(/^(.*\/)?#{Regexp.escape(ignore_item)}$/).nil? })
end
to_process = []
if @configurator.project_config_hash[:project_auto_link_deep_dependencies]
# Creating list of headers that should be recursively pre-processed
# Skipping mocks and vendor headers
headers_to_deep_link = full_path_headers_dependencies.select do |hdr|
!(mocks.include? hdr) and (hdr.match(/^(.*\/)(#{VENDORS_FILES.join('|')}) + #{Regexp.escape(hdr_ext)}$/).nil?)
end
headers_to_deep_link.map! {|hdr| File.expand_path(hdr) }
headers_to_deep_link.compact!
headers_to_deep_link.each do |hdr|
if (ignore_list.none? {|ignore_header| hdr.match(/^(.*\/)?#{Regexp.escape(ignore_header)}$/)} and
include_paths.none? {|include_path| hdr =~ /^#{include_path}\.*/})
if File.exist?(hdr)
to_process << hdr
src = @file_finder.find_compilation_input_file(hdr, :ignore)
to_process << src if src
end
end
end
end
return list, to_process, mocks
end
def write_shallow_includes_list(filepath, list)
@yaml_wrapper.dump(filepath, list)
end
private
def extract_full_path_dependencies(dependencies)
# Separate the real files form the annotated ones and remove the '@@@@'
annotated_files, real_files = dependencies.partition {|file| file =~ /^@@@@/}
annotated_files.map! {|file| file.gsub('@@@@','') }
# Matching annotated_files values against real_files to ensure that
# annotated_files contain full path entries (as returned by make rule)
annotated_files.map! {|file| real_files.find {|real| !real.match(/^(.*\/)?#{Regexp.escape(file)}$/).nil?}}
annotated_files = annotated_files.compact
# Find which of our annotated files are "real" dependencies. This is
# intended to weed out dependencies that have been removed due to build
# options defined in the project yaml and/or in the files themselves.
return annotated_files.find_all do |annotated_file|
# find the index of the "real" file that matches the annotated one.
idx = real_files.find_index do |real_file|
real_file =~ /^(.*\/)?#{Regexp.escape(annotated_file)}$/
end
# If we found a real file, delete it from the array and return it,
# otherwise return nil. Since nil is falsy this has the effect of making
# find_all return only the annotated filess for which a real file was
# found/deleted
idx ? real_files.delete_at(idx) : nil
end.compact
end
end

View File

@ -0,0 +1,52 @@
require 'ceedling/constants'
class ProjectConfigManager
attr_reader :options_files, :release_config_changed, :test_config_changed, :test_defines_changed
attr_accessor :config_hash
constructor :cacheinator, :configurator, :yaml_wrapper, :file_wrapper
def setup
@options_files = []
@release_config_changed = false
@test_config_changed = false
@test_defines_changed = false
end
def merge_options(config_hash, option_filepath)
@options_files << File.basename( option_filepath )
config_hash.deep_merge!( @yaml_wrapper.load( option_filepath ) )
end
def filter_internal_sources(sources)
filtered_sources = sources.clone
filtered_sources.delete_if { |item| item =~ /#{CMOCK_MOCK_PREFIX}.+#{Regexp.escape(EXTENSION_SOURCE)}$/ }
filtered_sources.delete_if { |item| item =~ /#{VENDORS_FILES.map{|source| '\b' + Regexp.escape(source.ext(EXTENSION_SOURCE)) + '\b'}.join('|')}$/ }
return filtered_sources
end
def process_release_config_change
# has project configuration changed since last release build
@release_config_changed = @cacheinator.diff_cached_release_config?( @config_hash )
end
def process_test_config_change
# has project configuration changed since last test build
@test_config_changed = @cacheinator.diff_cached_test_config?( @config_hash )
end
def process_test_defines_change(files)
# has definitions changed since last test build
@test_defines_changed = @cacheinator.diff_cached_test_defines?( files )
if @test_defines_changed
# update timestamp for rake task prerequisites
@file_wrapper.touch( @configurator.project_test_force_rebuild_filepath, :mtime => Time.now + 10 )
end
end
end

View File

@ -0,0 +1,99 @@
require 'ceedling/constants'
class ProjectFileLoader
attr_reader :main_file, :user_file
constructor :yaml_wrapper, :stream_wrapper, :system_wrapper, :file_wrapper
def setup
@main_file = nil
@mixin_files = []
@user_file = nil
@main_project_filepath = ''
@mixin_project_filepaths = []
@user_project_filepath = ''
end
def find_project_files
# first go hunting for optional user project file by looking for environment variable and then default location on disk
user_filepath = @system_wrapper.env_get('CEEDLING_USER_PROJECT_FILE')
if ( not user_filepath.nil? and @file_wrapper.exist?(user_filepath) )
@user_project_filepath = user_filepath
elsif (@file_wrapper.exist?(DEFAULT_CEEDLING_USER_PROJECT_FILE))
@user_project_filepath = DEFAULT_CEEDLING_USER_PROJECT_FILE
end
# next check for mixin project files by looking for environment variable
mixin_filepaths = @system_wrapper.env_get('CEEDLING_MIXIN_PROJECT_FILES')
if ( not mixin_filepaths.nil? )
mixin_filepaths.split(File::PATH_SEPARATOR).each do |filepath|
if ( @file_wrapper.exist?(filepath) )
@mixin_project_filepaths.push(filepath)
end
end
end
# next check for main project file by looking for environment variable and then default location on disk;
# blow up if we don't find this guy -- like, he's so totally important
main_filepath = @system_wrapper.env_get('CEEDLING_MAIN_PROJECT_FILE')
if ( not main_filepath.nil? and @file_wrapper.exist?(main_filepath) )
@main_project_filepath = main_filepath
elsif (@file_wrapper.exist?(DEFAULT_CEEDLING_MAIN_PROJECT_FILE))
@main_project_filepath = DEFAULT_CEEDLING_MAIN_PROJECT_FILE
else
# no verbosity checking since this is lowest level reporting anyhow &
# verbosity checking depends on configurator which in turns needs this class (circular dependency)
@stream_wrapper.stderr_puts('Found no Ceedling project file (*.yml)')
raise
end
@main_file = File.basename( @main_project_filepath )
@mixin_project_filepaths.each do |filepath|
@mixin_files.push(File.basename( filepath ))
end
@user_file = File.basename( @user_project_filepath ) if ( not @user_project_filepath.empty? )
end
def yaml_merger(y1, y2)
o1 = y1
y2.each_pair do |k,v|
if o1[k].nil?
o1[k] = v
else
if (o1[k].instance_of? Hash)
o1[k] = yaml_merger(o1[k], v)
elsif (o1[k].instance_of? Array)
o1[k] += v
else
o1[k] = v
end
end
end
return o1
end
def load_project_config
config_hash = @yaml_wrapper.load(@main_project_filepath)
# if there are mixin project files, then use them
@mixin_project_filepaths.each do |filepath|
mixin = @yaml_wrapper.load(filepath)
config_hash = yaml_merger( config_hash, mixin )
end
# if there's a user project file, then use it
if ( not @user_project_filepath.empty? )
user_hash = @yaml_wrapper.load(@user_project_filepath)
config_hash = yaml_merger( config_hash, user_hash )
end
return config_hash
end
end

View File

@ -0,0 +1,17 @@
class RakeUtils
constructor :rake_wrapper
def task_invoked?(task_regex)
task_invoked = false
@rake_wrapper.task_list.each do |task|
if ((task.already_invoked) and (task.to_s =~ task_regex))
task_invoked = true
break
end
end
return task_invoked
end
end

View File

@ -0,0 +1,33 @@
require 'rubygems'
require 'rake'
require 'ceedling/makefile' # our replacement for rake's make-style dependency loader
include Rake::DSL if defined?(Rake::DSL)
class Rake::Task
attr_reader :already_invoked
end
class RakeWrapper
def initialize
@makefile_loader = Rake::MakefileLoader.new # use our custom replacement noted above
end
def [](task)
return Rake::Task[task]
end
def task_list
return Rake::Task.tasks
end
def create_file_task(file_task, dependencies)
file(file_task => dependencies)
end
def load_dependencies(dependencies_path)
@makefile_loader.load(dependencies_path)
end
end

View File

@ -0,0 +1,85 @@
require 'fileutils'
# get directory containing this here file, back up one directory, and expand to full path
CEEDLING_ROOT = File.expand_path(File.dirname(__FILE__) + '/../..')
CEEDLING_LIB = File.join(CEEDLING_ROOT, 'lib')
CEEDLING_VENDOR = File.join(CEEDLING_ROOT, 'vendor')
CEEDLING_RELEASE = File.join(CEEDLING_ROOT, 'release')
$LOAD_PATH.unshift( CEEDLING_LIB )
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'unity/auto') )
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'diy/lib') )
$LOAD_PATH.unshift( File.join(CEEDLING_VENDOR, 'cmock/lib') )
require 'rake'
#Let's make sure we remember the task descriptions in case we need them
Rake::TaskManager.record_task_metadata = true
require 'diy'
require 'constructor'
require 'ceedling/constants'
require 'ceedling/target_loader'
# construct all our objects
# ensure load path contains all libraries needed first
lib_ceedling_load_path_temp = File.join(CEEDLING_LIB, 'ceedling')
$LOAD_PATH.unshift( lib_ceedling_load_path_temp )
@ceedling = DIY::Context.from_yaml( File.read( File.join(lib_ceedling_load_path_temp, 'objects.yml') ) )
@ceedling.build_everything
# now that all objects are built, delete 'lib/ceedling' from load path
$LOAD_PATH.delete(lib_ceedling_load_path_temp)
# one-stop shopping for all our setup and such after construction
@ceedling[:setupinator].ceedling = @ceedling
project_config =
begin
cfg = @ceedling[:setupinator].load_project_files
TargetLoader.inspect(cfg, ENV['TARGET'])
rescue TargetLoader::NoTargets
cfg
rescue TargetLoader::RequestReload
@ceedling[:setupinator].load_project_files
end
@ceedling[:setupinator].do_setup( project_config )
# tell all our plugins we're about to do something
@ceedling[:plugin_manager].pre_build
# load rakefile component files (*.rake)
PROJECT_RAKEFILE_COMPONENT_FILES.each { |component| load(component) }
# tell rake to shut up by default (overridden in verbosity / debug tasks as appropriate)
verbose(false)
# end block always executed following rake run
END {
$stdout.flush unless $stdout.nil?
$stderr.flush unless $stderr.nil?
# cache our input configurations to use in comparison upon next execution
@ceedling[:cacheinator].cache_test_config( @ceedling[:setupinator].config_hash ) if (@ceedling[:task_invoker].test_invoked?)
@ceedling[:cacheinator].cache_release_config( @ceedling[:setupinator].config_hash ) if (@ceedling[:task_invoker].release_invoked?)
# delete all temp files unless we're in debug mode
if (not @ceedling[:configurator].project_debug)
@ceedling[:file_wrapper].rm_f( @ceedling[:file_wrapper].directory_listing( File.join(@ceedling[:configurator].project_temp_path, '*') ))
end
# only perform these final steps if we got here without runtime exceptions or errors
if (@ceedling[:system_wrapper].ruby_success)
# tell all our plugins the build is done and process results
@ceedling[:plugin_manager].post_build
@ceedling[:plugin_manager].print_plugin_failures
exit(1) if (@ceedling[:plugin_manager].plugins_failed? && !@ceedling[:setupinator].config_hash[:graceful_fail])
else
puts "ERROR: Ceedling Failed"
@ceedling[:plugin_manager].post_error
end
}

View File

@ -0,0 +1,98 @@
require 'ceedling/constants'
class ReleaseInvoker
constructor :configurator, :release_invoker_helper, :build_invoker_utils, :dependinator, :task_invoker, :file_path_utils, :file_wrapper
def setup_and_invoke_c_objects( c_files )
objects = @file_path_utils.form_release_build_c_objects_filelist( c_files )
begin
@release_invoker_helper.process_deep_dependencies( @file_path_utils.form_release_dependencies_filelist( c_files ) )
@dependinator.enhance_release_file_dependencies( objects )
@task_invoker.invoke_release_objects( objects )
rescue => e
@build_invoker_utils.process_exception( e, RELEASE_SYM, false )
end
return objects
end
def setup_and_invoke_asm_objects( asm_files )
objects = @file_path_utils.form_release_build_asm_objects_filelist( asm_files )
begin
@dependinator.enhance_release_file_dependencies( objects )
@task_invoker.invoke_release_objects( objects )
rescue => e
@build_invoker_utils.process_exception( e, RELEASE_SYM, false )
end
return objects
end
def refresh_c_deep_dependencies
return if (not @configurator.project_use_deep_dependencies)
@file_wrapper.rm_f(
@file_wrapper.directory_listing(
File.join( @configurator.project_release_dependencies_path, '*' + @configurator.extension_dependencies ) ) )
@release_invoker_helper.process_deep_dependencies(
@file_path_utils.form_release_dependencies_filelist(
@configurator.collection_all_source ) )
end
def artifactinate( *files )
files.flatten.each do |file|
@file_wrapper.cp( file, @configurator.project_release_artifacts_path ) if @file_wrapper.exist?( file )
end
end
def convert_libraries_to_arguments(libraries)
args = ((libraries || []) + ((defined? LIBRARIES_SYSTEM) ? LIBRARIES_SYSTEM : [])).flatten
if (defined? LIBRARIES_FLAG)
args.map! {|v| LIBRARIES_FLAG.gsub(/\$\{1\}/, v) }
end
return args
end
def get_library_paths_to_arguments()
paths = (defined? PATHS_LIBRARIES) ? (PATHS_LIBRARIES || []).clone : []
if (defined? LIBRARIES_PATH_FLAG)
paths.map! {|v| LIBRARIES_PATH_FLAG.gsub(/\$\{1\}/, v) }
end
return paths
end
def sort_objects_and_libraries(both)
extension = if ((defined? EXTENSION_SUBPROJECTS) && (defined? EXTENSION_LIBRARIES))
extension_libraries = if (EXTENSION_LIBRARIES.class == Array)
EXTENSION_LIBRARIES.join(")|(?:\\")
else
EXTENSION_LIBRARIES
end
"(?:\\#{EXTENSION_SUBPROJECTS})|(?:\\#{extension_libraries})"
elsif (defined? EXTENSION_SUBPROJECTS)
"\\#{EXTENSION_SUBPROJECTS}"
elsif (defined? EXTENSION_LIBRARIES)
if (EXTENSION_LIBRARIES.class == Array)
"(?:\\#{EXTENSION_LIBRARIES.join(")|(?:\\")})"
else
"\\#{EXTENSION_LIBRARIES}"
end
else
"\\.LIBRARY"
end
sorted_objects = both.group_by {|v| v.match(/.+#{extension}$/) ? :libraries : :objects }
libraries = sorted_objects[:libraries] || []
objects = sorted_objects[:objects] || []
return objects, libraries
end
end

View File

@ -0,0 +1,19 @@
class ReleaseInvokerHelper
constructor :configurator, :dependinator, :task_invoker
def process_deep_dependencies(dependencies_list)
return if (not @configurator.project_use_deep_dependencies)
if @configurator.project_generate_deep_dependencies
@dependinator.enhance_release_file_dependencies( dependencies_list )
@task_invoker.invoke_release_dependencies_files( dependencies_list )
end
@dependinator.load_release_object_deep_dependencies( dependencies_list )
end
end

View File

@ -0,0 +1,26 @@
##
# Pretifies reports
class Reportinator
##
# Generates a banner for a message based on the length of the message or a
# given width.
# ==== Attributes
#
# * _message_: The message to put.
# * _width_: The width of the message. If nil the size of the banner is
# determined by the length of the message.
#
# ==== Examples
#
# rp = Reportinator.new
# rp.generate_banner("Hello world!") => "------------\nHello world!\n------------\n"
# rp.generate_banner("Hello world!", 3) => "---\nHello world!\n---\n"
#
#
def generate_banner(message, width=nil)
dash_count = ((width.nil?) ? message.strip.length : width)
return "#{'-' * dash_count}\n#{message}\n#{'-' * dash_count}\n"
end
end

View File

@ -0,0 +1,9 @@
rule(/#{CMOCK_MOCK_PREFIX}[^\/\\]+#{'\\'+EXTENSION_SOURCE}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_header_input_for_mock_file(task_name)
end
]) do |mock|
@ceedling[:generator].generate_mock(TEST_SYM, mock.source)
end

View File

@ -0,0 +1,26 @@
# invocations against this rule should only happen when enhanced dependencies are enabled;
# otherwise, dependency tracking will be too shallow and preprocessed files could intermittently
# fail to be updated when they actually need to be.
rule(/#{PROJECT_TEST_PREPROCESS_FILES_PATH}\/.+/ => [
proc do |task_name|
@ceedling[:file_finder].find_test_or_source_or_header_file(task_name)
end
]) do |file|
if (not @ceedling[:configurator].project_use_deep_dependencies)
raise 'ERROR: Ceedling preprocessing rule invoked though neccessary auxiliary dependency support not enabled.'
end
@ceedling[:generator].generate_preprocessed_file(TEST_SYM, file.source)
end
# invocations against this rule can always happen as there are no deeper dependencies to consider
rule(/#{PROJECT_TEST_PREPROCESS_INCLUDES_PATH}\/.+/ => [
proc do |task_name|
@ceedling[:file_finder].find_test_or_source_or_header_file(task_name)
end
]) do |file|
@ceedling[:generator].generate_shallow_includes_list(TEST_SYM, file.source)
end

View File

@ -0,0 +1,99 @@
RELEASE_COMPILE_TASK_ROOT = RELEASE_TASK_ROOT + 'compile:' unless defined?(RELEASE_COMPILE_TASK_ROOT)
RELEASE_ASSEMBLE_TASK_ROOT = RELEASE_TASK_ROOT + 'assemble:' unless defined?(RELEASE_ASSEMBLE_TASK_ROOT)
# If GCC and Releasing a Library, Update Tools to Automatically Have Necessary Tags
if (TOOLS_RELEASE_COMPILER[:executable] == DEFAULT_RELEASE_COMPILER_TOOL[:executable])
if (File.extname(PROJECT_RELEASE_BUILD_TARGET) == '.so')
TOOLS_RELEASE_COMPILER[:arguments] << "-fPIC" unless TOOLS_RELEASE_COMPILER[:arguments].include?("-fPIC")
TOOLS_RELEASE_LINKER[:arguments] << "-shared" unless TOOLS_RELEASE_LINKER[:arguments].include?("-shared")
elsif (File.extname(PROJECT_RELEASE_BUILD_TARGET) == '.a')
TOOLS_RELEASE_COMPILER[:arguments] << "-fPIC" unless TOOLS_RELEASE_COMPILER[:arguments].include?("-fPIC")
TOOLS_RELEASE_LINKER[:executable] = 'ar'
TOOLS_RELEASE_LINKER[:arguments] = ['rcs', '${2}', '${1}'].compact
end
end
if (RELEASE_BUILD_USE_ASSEMBLY)
rule(/#{PROJECT_RELEASE_BUILD_OUTPUT_ASM_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_assembly_file(task_name)
end
]) do |object|
@ceedling[:generator].generate_object_file(
TOOLS_RELEASE_ASSEMBLER,
OPERATION_ASSEMBLE_SYM,
RELEASE_SYM,
object.source,
object.name )
end
end
rule(/#{PROJECT_RELEASE_BUILD_OUTPUT_C_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_compilation_input_file(task_name, :error, true)
end
]) do |object|
@ceedling[:generator].generate_object_file(
TOOLS_RELEASE_COMPILER,
OPERATION_COMPILE_SYM,
RELEASE_SYM,
object.source,
object.name,
@ceedling[:file_path_utils].form_release_build_c_list_filepath( object.name ),
@ceedling[:file_path_utils].form_release_dependencies_filepath( object.name ) )
end
rule(/#{PROJECT_RELEASE_BUILD_TARGET}/) do |bin_file|
objects, libraries = @ceedling[:release_invoker].sort_objects_and_libraries(bin_file.prerequisites)
tool = TOOLS_RELEASE_LINKER.clone
lib_args = @ceedling[:release_invoker].convert_libraries_to_arguments(libraries)
lib_paths = @ceedling[:release_invoker].get_library_paths_to_arguments()
map_file = @ceedling[:configurator].project_release_build_map
@ceedling[:generator].generate_executable_file(
tool,
RELEASE_SYM,
objects,
bin_file.name,
map_file,
lib_args,
lib_paths )
@ceedling[:release_invoker].artifactinate( bin_file.name, map_file, @ceedling[:configurator].release_build_artifacts )
end
namespace RELEASE_SYM do
# use rules to increase efficiency for large projects (instead of iterating through all sources and creating defined tasks)
namespace :compile do
rule(/^#{RELEASE_COMPILE_TASK_ROOT}\S+#{'\\'+EXTENSION_SOURCE}$/ => [ # compile task names by regex
proc do |task_name|
source = task_name.sub(/#{RELEASE_COMPILE_TASK_ROOT}/, '')
@ceedling[:file_finder].find_source_file(source, :error)
end
]) do |compile|
@ceedling[:rake_wrapper][:directories].invoke
@ceedling[:project_config_manager].process_release_config_change
@ceedling[:release_invoker].setup_and_invoke_c_objects( [compile.source] )
end
end
if (RELEASE_BUILD_USE_ASSEMBLY)
namespace :assemble do
rule(/^#{RELEASE_ASSEMBLE_TASK_ROOT}\S+#{'\\'+EXTENSION_ASSEMBLY}$/ => [ # assemble task names by regex
proc do |task_name|
source = task_name.sub(/#{RELEASE_ASSEMBLE_TASK_ROOT}/, '')
@ceedling[:file_finder].find_assembly_file(source)
end
]) do |assemble|
@ceedling[:rake_wrapper][:directories].invoke
@ceedling[:project_config_manager].process_release_config_change
@ceedling[:release_invoker].setup_and_invoke_asm_objects( [assemble.source] )
end
end
end
end

View File

@ -0,0 +1,15 @@
rule(/#{PROJECT_RELEASE_DEPENDENCIES_PATH}\/#{'.+\\'+EXTENSION_DEPENDENCIES}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_compilation_input_file(task_name, :error, true)
end
]) do |dep|
@ceedling[:generator].generate_dependencies_file(
TOOLS_RELEASE_DEPENDENCIES_GENERATOR,
RELEASE_SYM,
dep.source,
@ceedling[:file_path_utils].form_release_build_c_object_filepath(dep.source),
dep.name)
end

View File

@ -0,0 +1,73 @@
rule(/#{PROJECT_TEST_FILE_PREFIX}#{'.+'+TEST_RUNNER_FILE_SUFFIX}#{'\\'+EXTENSION_SOURCE}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_test_input_for_runner_file(task_name)
end
]) do |runner|
@ceedling[:generator].generate_test_runner(TEST_SYM, runner.source, runner.name)
end
rule(/#{PROJECT_TEST_BUILD_OUTPUT_C_PATH}\/#{'.+\\'+EXTENSION_OBJECT}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_compilation_input_file(task_name)
end
]) do |object|
if (File.basename(object.source) =~ /#{EXTENSION_SOURCE}$/)
@ceedling[:generator].generate_object_file(
TOOLS_TEST_COMPILER,
OPERATION_COMPILE_SYM,
TEST_SYM,
object.source,
object.name,
@ceedling[:file_path_utils].form_test_build_list_filepath( object.name ),
@ceedling[:file_path_utils].form_test_dependencies_filepath( object.name ))
elsif (defined?(TEST_BUILD_USE_ASSEMBLY) && TEST_BUILD_USE_ASSEMBLY)
@ceedling[:generator].generate_object_file(
TOOLS_TEST_ASSEMBLER,
OPERATION_ASSEMBLE_SYM,
TEST_SYM,
object.source,
object.name )
end
end
rule(/#{PROJECT_TEST_BUILD_OUTPUT_PATH}\/#{'.+\\'+EXTENSION_EXECUTABLE}$/) do |bin_file|
lib_args = @ceedling[:test_invoker].convert_libraries_to_arguments()
lib_paths = @ceedling[:test_invoker].get_library_paths_to_arguments()
@ceedling[:generator].generate_executable_file(
TOOLS_TEST_LINKER,
TEST_SYM,
bin_file.prerequisites,
bin_file.name,
@ceedling[:file_path_utils].form_test_build_map_filepath( bin_file.name ),
lib_args,
lib_paths )
end
rule(/#{PROJECT_TEST_RESULTS_PATH}\/#{'.+\\'+EXTENSION_TESTPASS}$/ => [
proc do |task_name|
@ceedling[:file_path_utils].form_test_executable_filepath(task_name)
end
]) do |test_result|
@ceedling[:generator].generate_test_results(TOOLS_TEST_FIXTURE, TEST_SYM, test_result.source, test_result.name)
end
namespace TEST_SYM do
# use rules to increase efficiency for large projects (instead of iterating through all sources and creating defined tasks)
rule(/^#{TEST_TASK_ROOT}\S+$/ => [ # test task names by regex
proc do |task_name|
test = task_name.sub(/#{TEST_TASK_ROOT}/, '')
test = "#{PROJECT_TEST_FILE_PREFIX}#{test}" if not (test.start_with?(PROJECT_TEST_FILE_PREFIX))
@ceedling[:file_finder].find_test_from_file_path(test)
end
]) do |test|
@ceedling[:rake_wrapper][:test_deps].invoke
@ceedling[:test_invoker].setup_and_invoke([test.source])
end
end

View File

@ -0,0 +1,15 @@
rule(/#{PROJECT_TEST_DEPENDENCIES_PATH}\/#{'.+\\'+EXTENSION_DEPENDENCIES}$/ => [
proc do |task_name|
@ceedling[:file_finder].find_compilation_input_file(task_name)
end
]) do |dep|
@ceedling[:generator].generate_dependencies_file(
TOOLS_TEST_DEPENDENCIES_GENERATOR,
TEST_SYM,
dep.source,
@ceedling[:file_path_utils].form_test_build_c_object_filepath(dep.source),
dep.name)
end

View File

@ -0,0 +1,53 @@
class Setupinator
attr_reader :config_hash
attr_writer :ceedling
def setup
@ceedling = {}
@config_hash = {}
end
def load_project_files
@ceedling[:project_file_loader].find_project_files
return @ceedling[:project_file_loader].load_project_config
end
def do_setup(config_hash)
@config_hash = config_hash
# load up all the constants and accessors our rake files, objects, & external scripts will need;
# note: configurator modifies the cmock section of the hash with a couple defaults to tie
# project together - the modified hash is used to build cmock object
@ceedling[:configurator].populate_defaults( config_hash )
@ceedling[:configurator].populate_unity_defaults( config_hash )
@ceedling[:configurator].populate_cmock_defaults( config_hash )
@ceedling[:configurator].find_and_merge_plugins( config_hash )
@ceedling[:configurator].merge_imports( config_hash )
@ceedling[:configurator].eval_environment_variables( config_hash )
@ceedling[:configurator].tools_setup( config_hash )
@ceedling[:configurator].eval_paths( config_hash )
@ceedling[:configurator].standardize_paths( config_hash )
@ceedling[:configurator].validate( config_hash )
@ceedling[:configurator].build( config_hash, :environment )
@ceedling[:configurator].insert_rake_plugins( @ceedling[:configurator].rake_plugins )
@ceedling[:configurator].tools_supplement_arguments( config_hash )
# merge in any environment variables plugins specify, after the main build
@ceedling[:plugin_manager].load_plugin_scripts( @ceedling[:configurator].script_plugins, @ceedling ) do |env|
@ceedling[:configurator].eval_environment_variables( env )
@ceedling[:configurator].build_supplement( config_hash, env )
end
@ceedling[:plugin_reportinator].set_system_objects( @ceedling )
@ceedling[:file_finder].prepare_search_sources
@ceedling[:loginator].setup_log_filepath
@ceedling[:project_config_manager].config_hash = config_hash
end
def reset_defaults(config_hash)
@ceedling[:configurator].reset_defaults( config_hash )
end
end

View File

@ -0,0 +1,28 @@
class StreamWrapper
def stdout_override(&fnc)
@stdout_overide_fnc = fnc
end
def stdout_puts(string)
if @stdout_overide_fnc
@stdout_overide_fnc.call(string)
else
$stdout.puts(string)
end
end
def stdout_flush
$stdout.flush
end
def stderr_puts(string)
$stderr.puts(string)
end
def stderr_flush
$stderr.flush
end
end

View File

@ -0,0 +1,40 @@
require 'ceedling/constants'
class Streaminator
constructor :streaminator_helper, :verbosinator, :loginator, :stream_wrapper
# for those objects for whom the configurator has already been instantiated,
# Streaminator is a convenience object for handling verbosity and writing to the std streams
def stdout_puts(string, verbosity=Verbosity::NORMAL)
if (@verbosinator.should_output?(verbosity))
@stream_wrapper.stdout_puts(string)
@stream_wrapper.stdout_flush
end
# write to log as though Verbosity::OBNOXIOUS
@loginator.log( string, @streaminator_helper.extract_name($stdout) )
end
def stderr_puts(string, verbosity=Verbosity::NORMAL)
if (@verbosinator.should_output?(verbosity))
@stream_wrapper.stderr_puts(string)
@stream_wrapper.stderr_flush
end
# write to log as though Verbosity::OBNOXIOUS
@loginator.log( string, @streaminator_helper.extract_name($stderr) )
end
def stream_puts(stream, string, verbosity=Verbosity::NORMAL)
if (@verbosinator.should_output?(verbosity))
stream.puts(string)
stream.flush
end
# write to log as though Verbosity::OBNOXIOUS
@loginator.log( string, @streaminator_helper.extract_name(stream) )
end
end

View File

@ -0,0 +1,15 @@
class StreaminatorHelper
def extract_name(stream)
name = case (stream.fileno)
when 0 then '#<IO:$stdin>'
when 1 then '#<IO:$stdout>'
when 2 then '#<IO:$stderr>'
else stream.inspect
end
return name
end
end

View File

@ -0,0 +1,37 @@
class Object
def deep_clone
Marshal::load(Marshal.dump(self))
end
end
##
# Class containing system utility funcions.
class SystemUtils
constructor :system_wrapper
##
# Sets up the class.
def setup
@tcsh_shell = nil
end
##
# Checks the system shell to see if it a tcsh shell.
def tcsh_shell?
# once run a single time, return state determined at that execution
return @tcsh_shell if not @tcsh_shell.nil?
result = @system_wrapper.shell_backticks('echo $version')
if ((result[:exit_code] == 0) and (result[:output].strip =~ /^tcsh/))
@tcsh_shell = true
else
@tcsh_shell = false
end
return @tcsh_shell
end
end

View File

@ -0,0 +1,80 @@
require 'rbconfig'
class SystemWrapper
# static method for use in defaults
def self.windows?
return ((RbConfig::CONFIG['host_os'] =~ /mswin|mingw/) ? true : false) if defined?(RbConfig)
return ((Config::CONFIG['host_os'] =~ /mswin|mingw/) ? true : false)
end
# class method so as to be mockable for tests
def windows?
return SystemWrapper.windows?
end
def module_eval(string)
return Object.module_eval("\"" + string + "\"")
end
def eval(string)
return eval(string)
end
def search_paths
return ENV['PATH'].split(File::PATH_SEPARATOR)
end
def cmdline_args
return ARGV
end
def env_set(name, value)
ENV[name] = value
end
def env_get(name)
return ENV[name]
end
def time_now
return Time.now.asctime
end
def shell_backticks(command, boom = true)
retval = `#{command}`.freeze
$exit_code = ($?.exitstatus).freeze if boom
return {
:output => retval.freeze,
:exit_code => ($?.exitstatus).freeze
}
end
def shell_system(command, boom = true)
system( command )
$exit_code = ($?.exitstatus).freeze if boom
return {
:output => "".freeze,
:exit_code => ($?.exitstatus).freeze
}
end
def add_load_path(path)
$LOAD_PATH.unshift(path)
end
def require_file(path)
require(path)
end
def ruby_success
# We are successful if we've never had an exit code that went boom (either because it's empty or it was 0)
return ($exit_code.nil? || ($exit_code == 0)) && ($!.nil? || $!.is_a?(SystemExit) && $!.success?)
end
def constants_include?(item)
# forcing to strings provides consistency across Ruby versions
return Object.constants.map{|constant| constant.to_s}.include?(item.to_s)
end
end

View File

@ -0,0 +1,38 @@
module TargetLoader
class NoTargets < Exception; end
class NoDirectory < Exception; end
class NoDefault < Exception; end
class NoSuchTarget < Exception; end
class RequestReload < Exception; end
def self.inspect(config, target_name=nil)
unless config[:targets]
raise NoTargets
end
targets = config[:targets]
unless targets[:targets_directory]
raise NoDirectory.new("No targets directory specified.")
end
unless targets[:default_target]
raise NoDefault.new("No default target specified.")
end
target_path = lambda {|name| File.join(targets[:targets_directory], name + ".yml")}
target = if target_name
target_path.call(target_name)
else
target_path.call(targets[:default_target])
end
unless File.exists? target
raise NoSuchTarget.new("No such target: #{target}")
end
ENV['CEEDLING_MAIN_PROJECT_FILE'] = target
raise RequestReload
end
end

View File

@ -0,0 +1,122 @@
require 'ceedling/par_map'
class TaskInvoker
attr_accessor :first_run
constructor :dependinator, :rake_utils, :rake_wrapper, :project_config_manager
def setup
@test_regexs = [/^#{TEST_ROOT_NAME}:/]
@release_regexs = [/^#{RELEASE_ROOT_NAME}(:|$)/]
@first_run = true
end
def add_test_task_regex(regex)
@test_regexs << regex
end
def add_release_task_regex(regex)
@release_regexs << regex
end
def test_invoked?
invoked = false
@test_regexs.each do |regex|
invoked = true if (@rake_utils.task_invoked?(regex))
break if invoked
end
return invoked
end
def release_invoked?
invoked = false
@release_regexs.each do |regex|
invoked = true if (@rake_utils.task_invoked?(regex))
break if invoked
end
return invoked
end
def invoked?(regex)
return @rake_utils.task_invoked?(regex)
end
def reset_rake_task_for_changed_defines(file)
if !(file =~ /#{VENDORS_FILES.map{|ignore| '\b' + ignore.ext(File.extname(file)) + '\b'}.join('|')}$/)
@rake_wrapper[file].clear_actions if @first_run == false && @project_config_manager.test_defines_changed
@rake_wrapper[file].reenable if @first_run == false && @project_config_manager.test_defines_changed
end
end
def invoke_test_mocks(mocks)
@dependinator.enhance_mock_dependencies( mocks )
mocks.each { |mock|
reset_rake_task_for_changed_defines( mock )
@rake_wrapper[mock].invoke
}
end
def invoke_test_runner(runner)
@dependinator.enhance_runner_dependencies( runner )
reset_rake_task_for_changed_defines( runner )
@rake_wrapper[runner].invoke
end
def invoke_test_shallow_include_lists(files)
@dependinator.enhance_shallow_include_lists_dependencies( files )
par_map(PROJECT_COMPILE_THREADS, files) do |file|
reset_rake_task_for_changed_defines( file )
@rake_wrapper[file].invoke
end
end
def invoke_test_preprocessed_files(files)
@dependinator.enhance_preprocesed_file_dependencies( files )
par_map(PROJECT_COMPILE_THREADS, files) do |file|
reset_rake_task_for_changed_defines( file )
@rake_wrapper[file].invoke
end
end
def invoke_test_dependencies_files(files)
@dependinator.enhance_dependencies_dependencies( files )
par_map(PROJECT_COMPILE_THREADS, files) do |file|
reset_rake_task_for_changed_defines( file )
@rake_wrapper[file].invoke
end
end
def invoke_test_objects(objects)
par_map(PROJECT_COMPILE_THREADS, objects) do |object|
reset_rake_task_for_changed_defines( object )
@rake_wrapper[object].invoke
end
end
def invoke_test_executable(file)
@rake_wrapper[file].invoke
end
def invoke_test_results(result)
@dependinator.enhance_results_dependencies( result )
@rake_wrapper[result].invoke
end
def invoke_release_dependencies_files(files)
par_map(PROJECT_COMPILE_THREADS, files) do |file|
@rake_wrapper[file].invoke
end
end
def invoke_release_objects(objects)
par_map(PROJECT_COMPILE_THREADS, objects) do |object|
@rake_wrapper[object].invoke
end
end
end

Some files were not shown because too many files have changed in this diff Show More