Skip to content

Commit

Permalink
Merge pull request #2544 from bitshares/release
Browse files Browse the repository at this point in the history
Merge release branch into master branch for 6.0.0 release
  • Loading branch information
abitmore committed Nov 30, 2021
2 parents b859cbf + 3843bf6 commit e52b9ff
Show file tree
Hide file tree
Showing 145 changed files with 21,729 additions and 2,886 deletions.
8 changes: 8 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
target-branch: "develop"
4 changes: 2 additions & 2 deletions .github/workflows/build-and-test.mac.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ jobs:
name: Build and test in macOS
strategy:
matrix:
os: [macos-10.15, macos-11.0]
os: [macos-10.15, macos-11]
runs-on: ${{ matrix.os }}
steps:
- name: Install dependencies
Expand All @@ -28,7 +28,7 @@ jobs:
-D CMAKE_C_COMPILER_LAUNCHER=ccache \
-D CMAKE_CXX_COMPILER_LAUNCHER=ccache \
-D BOOST_ROOT=/usr/local/opt/boost@1.69 \
-D OPENSSL_ROOT_DIR=/usr/local/opt/openssl \
-D OPENSSL_ROOT_DIR=/usr/local/opt/openssl@1.1 \
..
- name: Load Cache
uses: actions/cache@v2
Expand Down
9 changes: 8 additions & 1 deletion .github/workflows/build-and-test.ubuntu-debug.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ jobs:
name: Build and test in Debug mode
strategy:
matrix:
os: [ ubuntu-16.04, ubuntu-18.04, ubuntu-20.04 ]
os: [ ubuntu-18.04, ubuntu-20.04 ]
runs-on: ${{ matrix.os }}
services:
elasticsearch:
Expand Down Expand Up @@ -85,6 +85,8 @@ jobs:
df -h
make -j 2 -C _build chain_test
make -j 2 -C _build cli_test
make -j 2 -C _build app_test
make -j 2 -C _build es_test
make -j 2 -C _build cli_wallet
make -j 2 -C _build witness_node
make -j 2 -C _build
Expand All @@ -96,14 +98,19 @@ jobs:
run: |
_build/tests/app_test -l test_suite
df -h
rm -rf /tmp/graphene*
curl -XPUT -H "Content-Type: application/json" http://localhost:9200/_cluster/settings \
-d '{ "transient": { "cluster.routing.allocation.disk.threshold_enabled": false } }'
echo
_build/tests/es_test -l test_suite
df -h
rm -rf /tmp/graphene*
libraries/fc/tests/run-parallel-tests.sh _build/tests/chain_test -l test_suite
df -h
rm -rf /tmp/graphene*
_build/tests/cli_test -l test_suite
df -h
rm -rf /tmp/graphene*
- name: Quick test for program arguments
run: |
_build/programs/witness_node/witness_node --version
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-and-test.ubuntu-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ jobs:
name: Build and test in Release mode
strategy:
matrix:
os: [ ubuntu-16.04, ubuntu-18.04, ubuntu-20.04 ]
os: [ ubuntu-18.04, ubuntu-20.04 ]
runs-on: ${{ matrix.os }}
services:
elasticsearch:
Expand Down
21 changes: 13 additions & 8 deletions .github/workflows/build-and-test.win.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,15 @@ jobs:
name: Build required 3rd-party libraries
runs-on: ubuntu-latest
steps:
# Get OS version to be used in cache key - see https://github.com/actions/cache/issues/543
- run: |
echo "OS_VERSION=`lsb_release -sr`" >> $GITHUB_ENV
- name: Load Cache
id: cache-libs
uses: actions/cache@v1
uses: actions/cache@v2
with:
path: libs
key: mingw64-libs-${{ env.BOOST_VERSION }}_${{ env.CURL_VERSION }}_${{ env.OPENSSL_VERSION }}_${{ env.ZLIB_VERSION }}
key: mingw64-libs-${{ env.OS_VERSION }}-${{ env.BOOST_VERSION }}_${{ env.CURL_VERSION }}_${{ env.OPENSSL_VERSION }}_${{ env.ZLIB_VERSION }}
- name: Install dependencies
if: steps.cache-libs.outputs.cache-hit != 'true'
run: |
Expand Down Expand Up @@ -113,11 +116,13 @@ jobs:
- uses: actions/checkout@v2
with:
submodules: recursive
- run: |
echo "OS_VERSION=`lsb_release -sr`" >> $GITHUB_ENV
- name: Load external libraries
uses: actions/cache@v1
uses: actions/cache@v2
with:
path: libs
key: mingw64-libs-${{ env.BOOST_VERSION }}_${{ env.CURL_VERSION }}_${{ env.OPENSSL_VERSION }}_${{ env.ZLIB_VERSION }}
key: mingw64-libs-${{ env.OS_VERSION }}-${{ env.BOOST_VERSION }}_${{ env.CURL_VERSION }}_${{ env.OPENSSL_VERSION }}_${{ env.ZLIB_VERSION }}
- name: Configure
run: |
LIBS="`pwd`/libs"
Expand All @@ -138,13 +143,13 @@ jobs:
-D GRAPHENE_DISABLE_UNITY_BUILD=ON \
..
- name: Load Cache
uses: actions/cache@v1
uses: actions/cache@v2
with:
path: ccache
key: ccache-mingw64-${{ github.ref }}-${{ github.sha }}
key: ccache-mingw64-${{ env.OS_VERSION }}-${{ github.ref }}-${{ github.sha }}
restore-keys: |
ccache-mingw64-${{ github.ref }}-
ccache-mingw64-
ccache-mingw64-${{ env.OS_VERSION }}-${{ github.ref }}-
ccache-mingw64-${{ env.OS_VERSION }}-
- name: Build
run: |
export CCACHE_DIR="$GITHUB_WORKSPACE/ccache"
Expand Down
46 changes: 46 additions & 0 deletions .github/workflows/build-docker.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
name: Build and push to DockerHub
on: [ push, pull_request ]
jobs:
docker:
runs-on: ubuntu-latest
steps:
- name: Inject slug/short environment variables
uses: rlespinasse/github-slug-action@v3.x
- name: Decide whether to push to DockerHub, and set tag
if: |
github.event_name == 'push' &&
( startsWith( github.ref, 'refs/tags/' ) ||
contains( fromJSON('["master","develop","testnet","hardfork"]'), env.GITHUB_REF_NAME ) )
run: |
if [[ "${GITHUB_REF_NAME}" == "master" ]] ; then
DOCKER_PUSH_TAG=latest
else
DOCKER_PUSH_TAG=${GITHUB_REF_NAME}
fi
echo "DOCKER_PUSH_TAG=${DOCKER_PUSH_TAG}"
echo "DOCKER_PUSH_TAG=${DOCKER_PUSH_TAG}" >> $GITHUB_ENV
- name: Test tag
if: env.DOCKER_PUSH_TAG != ''
run: echo "${DOCKER_PUSH_TAG}"
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Build only
uses: docker/build-push-action@v2
with:
context: .
load: true
- name: Login to DockerHub
if: env.DOCKER_PUSH_TAG != ''
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Push to DockerHub
if: env.DOCKER_PUSH_TAG != ''
uses: docker/build-push-action@v2
with:
context: .
push: true
tags: bitshares/bitshares-core:${{ env.DOCKER_PUSH_TAG }}
65 changes: 47 additions & 18 deletions .github/workflows/sonar-scan.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,16 +92,19 @@ jobs:
-D Boost_USE_STATIC_LIBS=OFF \
..
popd
# Get OS version to be used in cache key - see https://github.com/actions/cache/issues/543
- run: |
echo "OS_VERSION=`lsb_release -sr`" >> $GITHUB_ENV
- name: Load Cache
uses: actions/cache@v2
with:
path: |
ccache
sonar_cache
key: sonar-${{ github.ref }}-${{ github.sha }}
key: sonar-${{ env.OS_VERSION }}-${{ github.ref }}-${{ github.sha }}
restore-keys: |
sonar-${{ github.ref }}-
sonar-
sonar-${{ env.OS_VERSION }}-${{ github.ref }}-
sonar-${{ env.OS_VERSION }}-
- name: Build
run: |
export CCACHE_DIR="$GITHUB_WORKSPACE/ccache"
Expand All @@ -114,21 +117,6 @@ jobs:
du -hs _build/libraries/* _build/programs/* _build/tests/*
du -hs _build/*
du -hs /_build/*
- name: Unit-Tests
run: |
_build/tests/app_test -l test_suite
df -h
curl -XPUT -H "Content-Type: application/json" http://localhost:9200/_cluster/settings \
-d '{ "transient": { "cluster.routing.allocation.disk.threshold_enabled": false } }'
echo
_build/tests/es_test -l test_suite
df -h
libraries/fc/tests/run-parallel-tests.sh _build/tests/chain_test -l test_suite
_build/tests/cli_test -l test_suite
df -h
echo "Cleanup"
rm -rf /tmp/graphene*
df -h
- name: Quick test for program arguments
run: |
_build/programs/witness_node/witness_node --version
Expand Down Expand Up @@ -156,6 +144,38 @@ jobs:
else \
echo "Pass: got expected error."; \
fi
- name: Remove binaries that we no longer need
run: |
df -h
echo "Cleanup"
rm -rf _build/programs/witness_node/witness_node
rm -rf _build/programs/cli_wallet/cli_wallet
rm -rf _build/programs/network_mapper/network_mapper
rm -rf _build/programs/js_operation_serializer/js_operation_serializer
rm -rf _build/programs/genesis_util/get_dev_key
df -h
- name: Unit-Tests
run: |
_build/tests/app_test -l test_suite
df -h
echo "Cleanup"
rm -rf /tmp/graphene*
curl -XPUT -H "Content-Type: application/json" http://localhost:9200/_cluster/settings \
-d '{ "transient": { "cluster.routing.allocation.disk.threshold_enabled": false } }'
echo
_build/tests/es_test -l test_suite
df -h
echo "Cleanup"
rm -rf /tmp/graphene*
libraries/fc/tests/run-parallel-tests.sh _build/tests/chain_test -l test_suite
df -h
echo "Cleanup"
rm -rf /tmp/graphene*
_build/tests/cli_test -l test_suite
df -h
echo "Cleanup"
rm -rf /tmp/graphene*
df -h
- name: Prepare for scanning with SonarScanner
run: |
mkdir -p sonar_cache
Expand Down Expand Up @@ -183,3 +203,12 @@ jobs:
run: |
sonar-scanner \
-Dsonar.login=${{ secrets.SONAR_TOKEN }}
- name: Cleanup
run: |
df -h
echo "Final cleanup"
rm -rf _build/tests/app_test
rm -rf _build/tests/chain_test
rm -rf _build/tests/cli_test
rm -rf _build/tests/es_test
df -h
2 changes: 1 addition & 1 deletion CONTRIBUTORS.txt
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ Anzhy Cherrnyavski <a.chernyavski@pixelplex.io>
Tengfei Niu <spartucus@users.noreply.github.com>
Tiago Peralta <tperalta82@gmail.com>
ioBanker <37595908+ioBanker@users.noreply.github.com>
xiao93 <42384581+xiao93@users.noreply.github.com>
Karl Semich <0xloem@gmail.com>
SahkanDesertHawk <panasiuki@gmail.com>
Scott Howard <showard314@gmail.com>
Expand All @@ -55,7 +56,6 @@ d.yakovitsky <d.yakovitsky@aetsoft.by>
ddylko <ddylko@ddylkoPC>
iHashFury <iPerky@users.noreply.github.com>
necklace <necklace@163.com>
xiao93 <42384581+xiao93@users.noreply.github.com>
xuquan316 <xuquan316@vip.qq.com>
Bartek Wrona <wrona@syncad.com>
BhuzOr <giaquinta.adriano@gmail.com>
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM phusion/baseimage:0.11
FROM phusion/baseimage:focal-1.0.0
MAINTAINER The bitshares decentralized organisation

ENV LANG=en_US.UTF-8
Expand Down
2 changes: 1 addition & 1 deletion Doxyfile
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ PROJECT_NAME = "BitShares-Core"
# could be handy for archiving the generated documentation or if some version
# control system is used.

PROJECT_NUMBER = "5.2.1"
PROJECT_NUMBER = "6.0.0"

# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
Expand Down
2 changes: 1 addition & 1 deletion docs
Submodule docs updated from aa7661 to 961733
16 changes: 12 additions & 4 deletions libraries/app/api.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -586,7 +586,7 @@ namespace graphene { namespace app {
const auto& idx = hist_idx.indices().get<by_pool_op_type_time>();
auto itr = start.valid() ? idx.lower_bound( boost::make_tuple( pool_id, *operation_type, *start ) )
: idx.lower_bound( boost::make_tuple( pool_id, *operation_type ) );
auto itr_stop = stop.valid() ? idx.upper_bound( boost::make_tuple( pool_id, *operation_type, *stop ) )
auto itr_stop = stop.valid() ? idx.lower_bound( boost::make_tuple( pool_id, *operation_type, *stop ) )
: idx.upper_bound( boost::make_tuple( pool_id, *operation_type ) );
while( itr != itr_stop && result.size() < limit )
{
Expand All @@ -599,7 +599,7 @@ namespace graphene { namespace app {
const auto& idx = hist_idx.indices().get<by_pool_time>();
auto itr = start.valid() ? idx.lower_bound( boost::make_tuple( pool_id, *start ) )
: idx.lower_bound( pool_id );
auto itr_stop = stop.valid() ? idx.upper_bound( boost::make_tuple( pool_id, *stop ) )
auto itr_stop = stop.valid() ? idx.lower_bound( boost::make_tuple( pool_id, *stop ) )
: idx.upper_bound( pool_id );
while( itr != itr_stop && result.size() < limit )
{
Expand Down Expand Up @@ -645,7 +645,11 @@ namespace graphene { namespace app {
const auto& idx_t = hist_idx.indices().get<by_pool_op_type_time>();
auto itr = start.valid() ? idx.lower_bound( boost::make_tuple( pool_id, *operation_type, *start ) )
: idx.lower_bound( boost::make_tuple( pool_id, *operation_type ) );
auto itr_temp = stop.valid() ? idx_t.upper_bound( boost::make_tuple( pool_id, *operation_type, *stop ) )
if( itr == idx.end() || itr->pool != pool_id || itr->op_type != *operation_type ) // empty result
return result;
if( stop.valid() && itr->time <= *stop ) // empty result
return result;
auto itr_temp = stop.valid() ? idx_t.lower_bound( boost::make_tuple( pool_id, *operation_type, *stop ) )
: idx_t.upper_bound( boost::make_tuple( pool_id, *operation_type ) );
auto itr_stop = ( itr_temp == idx_t.end() ? idx.end() : idx.iterator_to( *itr_temp ) );
while( itr != itr_stop && result.size() < limit )
Expand All @@ -660,7 +664,11 @@ namespace graphene { namespace app {
const auto& idx_t = hist_idx.indices().get<by_pool_time>();
auto itr = start.valid() ? idx.lower_bound( boost::make_tuple( pool_id, *start ) )
: idx.lower_bound( pool_id );
auto itr_temp = stop.valid() ? idx_t.upper_bound( boost::make_tuple( pool_id, *stop ) )
if( itr == idx.end() || itr->pool != pool_id ) // empty result
return result;
if( stop.valid() && itr->time <= *stop ) // empty result
return result;
auto itr_temp = stop.valid() ? idx_t.lower_bound( boost::make_tuple( pool_id, *stop ) )
: idx_t.upper_bound( pool_id );
auto itr_stop = ( itr_temp == idx_t.end() ? idx.end() : idx.iterator_to( *itr_temp ) );
while( itr != itr_stop && result.size() < limit )
Expand Down
Loading

0 comments on commit e52b9ff

Please sign in to comment.