DeepCover Embedded Security in IoT: Public-key Secured Data Paths
Dependencies: MaximInterface
The MAXREFDES155# is an internet-of-things (IoT) embedded-security reference design, built to authenticate and control a sensing node using elliptic-curve-based public-key cryptography with control and notification from a web server.
The hardware includes an ARM® mbed™ shield and attached sensor endpoint. The shield contains a DS2476 DeepCover® ECDSA/SHA-2 coprocessor, Wifi communication, LCD push-button controls, and status LEDs. The sensor endpoint is attached to the shield using a 300mm cable and contains a DS28C36 DeepCover ECDSA/SHA-2 authenticator, IR-thermal sensor, and aiming laser for the IR sensor. The MAXREFDES155# is equipped with a standard Arduino® form-factor shield connector for immediate testing using an mbed board such as the MAX32600MBED#. The combination of these two devices represent an IoT device. Communication to the web server is accomplished with the shield Wifi circuitry. Communication from the shield to the attached sensor module is accomplished over I2C . The sensor module represents an IoT endpoint that generates small data with a requirement for message authenticity/integrity and secure on/off operational control.
The design is hierarchical with each mbed platform and shield communicating data from the sensor node to a web server that maintains a centralized log and dispatches notifications as necessary. The simplicity of this design enables rapid integration into any star-topology IoT network to provide security with the low overhead and cost provided by the ECDSA-P256 asymmetric-key and SHA-256 symmetric-key algorithms.
More information about the MAXREFDES155# is available on the Maxim Integrated website.
Diff: xternal/rapidjson/internal/stack.h
- Revision:
- 16:a004191a79ab
- Parent:
- 0:33d4e66780c0
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/xternal/rapidjson/internal/stack.h Thu Oct 03 11:40:13 2019 -0500 @@ -0,0 +1,230 @@ +// Tencent is pleased to support the open source community by making RapidJSON available. +// +// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. +// +// Licensed under the MIT License (the "License"); you may not use this file except +// in compliance with the License. You may obtain a copy of the License at +// +// http://opensource.org/licenses/MIT +// +// Unless required by applicable law or agreed to in writing, software distributed +// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +// CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. + +#ifndef RAPIDJSON_INTERNAL_STACK_H_ +#define RAPIDJSON_INTERNAL_STACK_H_ + +#include "../allocators.h" +#include "swap.h" + +#if defined(__clang__) +RAPIDJSON_DIAG_PUSH +RAPIDJSON_DIAG_OFF(c++98-compat) +#endif + +RAPIDJSON_NAMESPACE_BEGIN +namespace internal { + +/////////////////////////////////////////////////////////////////////////////// +// Stack + +//! A type-unsafe stack for storing different types of data. +/*! \tparam Allocator Allocator for allocating stack memory. +*/ +template <typename Allocator> +class Stack { +public: + // Optimization note: Do not allocate memory for stack_ in constructor. + // Do it lazily when first Push() -> Expand() -> Resize(). + Stack(Allocator* allocator, size_t stackCapacity) : allocator_(allocator), ownAllocator_(0), stack_(0), stackTop_(0), stackEnd_(0), initialCapacity_(stackCapacity) { + } + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + Stack(Stack&& rhs) + : allocator_(rhs.allocator_), + ownAllocator_(rhs.ownAllocator_), + stack_(rhs.stack_), + stackTop_(rhs.stackTop_), + stackEnd_(rhs.stackEnd_), + initialCapacity_(rhs.initialCapacity_) + { + rhs.allocator_ = 0; + rhs.ownAllocator_ = 0; + rhs.stack_ = 0; + rhs.stackTop_ = 0; + rhs.stackEnd_ = 0; + rhs.initialCapacity_ = 0; + } +#endif + + ~Stack() { + Destroy(); + } + +#if RAPIDJSON_HAS_CXX11_RVALUE_REFS + Stack& operator=(Stack&& rhs) { + if (&rhs != this) + { + Destroy(); + + allocator_ = rhs.allocator_; + ownAllocator_ = rhs.ownAllocator_; + stack_ = rhs.stack_; + stackTop_ = rhs.stackTop_; + stackEnd_ = rhs.stackEnd_; + initialCapacity_ = rhs.initialCapacity_; + + rhs.allocator_ = 0; + rhs.ownAllocator_ = 0; + rhs.stack_ = 0; + rhs.stackTop_ = 0; + rhs.stackEnd_ = 0; + rhs.initialCapacity_ = 0; + } + return *this; + } +#endif + + void Swap(Stack& rhs) RAPIDJSON_NOEXCEPT { + internal::Swap(allocator_, rhs.allocator_); + internal::Swap(ownAllocator_, rhs.ownAllocator_); + internal::Swap(stack_, rhs.stack_); + internal::Swap(stackTop_, rhs.stackTop_); + internal::Swap(stackEnd_, rhs.stackEnd_); + internal::Swap(initialCapacity_, rhs.initialCapacity_); + } + + void Clear() { stackTop_ = stack_; } + + void ShrinkToFit() { + if (Empty()) { + // If the stack is empty, completely deallocate the memory. + Allocator::Free(stack_); + stack_ = 0; + stackTop_ = 0; + stackEnd_ = 0; + } + else + Resize(GetSize()); + } + + // Optimization note: try to minimize the size of this function for force inline. + // Expansion is run very infrequently, so it is moved to another (probably non-inline) function. + template<typename T> + RAPIDJSON_FORCEINLINE void Reserve(size_t count = 1) { + // Expand the stack if needed + if (RAPIDJSON_UNLIKELY(stackTop_ + sizeof(T) * count > stackEnd_)) + Expand<T>(count); + } + + template<typename T> + RAPIDJSON_FORCEINLINE T* Push(size_t count = 1) { + Reserve<T>(count); + return PushUnsafe<T>(count); + } + + template<typename T> + RAPIDJSON_FORCEINLINE T* PushUnsafe(size_t count = 1) { + RAPIDJSON_ASSERT(stackTop_ + sizeof(T) * count <= stackEnd_); + T* ret = reinterpret_cast<T*>(stackTop_); + stackTop_ += sizeof(T) * count; + return ret; + } + + template<typename T> + T* Pop(size_t count) { + RAPIDJSON_ASSERT(GetSize() >= count * sizeof(T)); + stackTop_ -= count * sizeof(T); + return reinterpret_cast<T*>(stackTop_); + } + + template<typename T> + T* Top() { + RAPIDJSON_ASSERT(GetSize() >= sizeof(T)); + return reinterpret_cast<T*>(stackTop_ - sizeof(T)); + } + + template<typename T> + const T* Top() const { + RAPIDJSON_ASSERT(GetSize() >= sizeof(T)); + return reinterpret_cast<T*>(stackTop_ - sizeof(T)); + } + + template<typename T> + T* End() { return reinterpret_cast<T*>(stackTop_); } + + template<typename T> + const T* End() const { return reinterpret_cast<T*>(stackTop_); } + + template<typename T> + T* Bottom() { return reinterpret_cast<T*>(stack_); } + + template<typename T> + const T* Bottom() const { return reinterpret_cast<T*>(stack_); } + + bool HasAllocator() const { + return allocator_ != 0; + } + + Allocator& GetAllocator() { + RAPIDJSON_ASSERT(allocator_); + return *allocator_; + } + + bool Empty() const { return stackTop_ == stack_; } + size_t GetSize() const { return static_cast<size_t>(stackTop_ - stack_); } + size_t GetCapacity() const { return static_cast<size_t>(stackEnd_ - stack_); } + +private: + template<typename T> + void Expand(size_t count) { + // Only expand the capacity if the current stack exists. Otherwise just create a stack with initial capacity. + size_t newCapacity; + if (stack_ == 0) { + if (!allocator_) + ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator()); + newCapacity = initialCapacity_; + } else { + newCapacity = GetCapacity(); + newCapacity += (newCapacity + 1) / 2; + } + size_t newSize = GetSize() + sizeof(T) * count; + if (newCapacity < newSize) + newCapacity = newSize; + + Resize(newCapacity); + } + + void Resize(size_t newCapacity) { + const size_t size = GetSize(); // Backup the current size + stack_ = static_cast<char*>(allocator_->Realloc(stack_, GetCapacity(), newCapacity)); + stackTop_ = stack_ + size; + stackEnd_ = stack_ + newCapacity; + } + + void Destroy() { + Allocator::Free(stack_); + RAPIDJSON_DELETE(ownAllocator_); // Only delete if it is owned by the stack + } + + // Prohibit copy constructor & assignment operator. + Stack(const Stack&); + Stack& operator=(const Stack&); + + Allocator* allocator_; + Allocator* ownAllocator_; + char *stack_; + char *stackTop_; + char *stackEnd_; + size_t initialCapacity_; +}; + +} // namespace internal +RAPIDJSON_NAMESPACE_END + +#if defined(__clang__) +RAPIDJSON_DIAG_POP +#endif + +#endif // RAPIDJSON_STACK_H_