Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test stats #1411

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
198 changes: 116 additions & 82 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ jobs:
build-hstream-admin-store:
needs: pre-build
runs-on: ubuntu-latest
if: false
strategy:
fail-fast: false
steps:
Expand Down Expand Up @@ -159,97 +160,128 @@ jobs:
${{ env.CABAL }} update
${{ env.SHELL }} "'make clean'"
${{ env.SHELL }} make
${{ env.CABAL }} ${{ env.EXTRA_CABAL_ARGS }} build --enable-tests --enable-benchmarks all
${{ env.CABAL }} ${{ env.EXTRA_CABAL_ARGS }} install hstream

- name: start hstream server
run: |
export CONTAINER_NAME=$TEST_CONTAINER_NAME
export IMAGE="docker.io/hstreamdb/haskell:${{ matrix.ghc }}"
export EXTRA_OPTS="--check --no-interactive --detach"
export COMMAND=" "
export EXE=$(find dist-newstyle -name "hstream-server" -type f)
./script/start-server.sh
sleep 5
docker logs --tail 100 $TEST_CONTAINER_NAME

- name: test
run: |
${{ env.CABAL }} ${{ env.EXTRA_CABAL_ARGS }} test --test-show-details=direct all
${{ env.CABAL }} ${{ env.EXTRA_CABAL_ARGS }} build hstream-common-stats-test
# ${{ env.CABAL }} ${{ env.EXTRA_CABAL_ARGS }} build --enable-tests --enable-benchmarks all
# ${{ env.CABAL }} ${{ env.EXTRA_CABAL_ARGS }} install hstream

# - name: start hstream server
# run: |
# export CONTAINER_NAME=$TEST_CONTAINER_NAME
# export IMAGE="docker.io/hstreamdb/haskell:${{ matrix.ghc }}"
# export EXTRA_OPTS="--check --no-interactive --detach"
# export COMMAND=" "
# export EXE=$(find dist-newstyle -name "hstream-server" -type f)
# ./script/start-server.sh
# sleep 5
# docker logs --tail 100 $TEST_CONTAINER_NAME
#
# - name: test
# run: |
# for i in {1..100}; do
# ${{ env.CABAL }} ${{ env.EXTRA_CABAL_ARGS }} test --test-show-details=direct all
# done
#
# - name: collect hserver logs
# if: ${{ success() }} || ${{ failure() }}
# run: |
# rm -rf hserver.log
# docker logs $TEST_CONTAINER_NAME &> hserver.log
#
# - name: upload hserver logs
# uses: actions/upload-artifact@v3
# if: ${{ success() }} || ${{ failure() }}
# with:
# name: hserver-logs-${{ matrix.ghc }}
# path: hserver.log
# retention-days: 7
#
# # Due to an [cabal bug](https://github.com/haskell/cabal/issues/7423),
# # `cabal check` will emit a warning even if the `-O2` option is just
# # an flag. This is disabled until the problem is fixed.
# #- name: check
# # run: |
# # python3 script/dev-tools cabal --check --no-interactive -i docker.io/hstreamdb/haskell:${{ matrix.ghc }} -- sdist all
#
# # # unfortunately, there is no `cabal check all`
# # #log_info "Run all cabal check..."
# # # Note that we ignore hstream-store package to run cabal check, because there
# # # is an unexpected warning:
# # # ...
# # # Warning: 'cpp-options': -std=c++17 is not portable C-preprocessor flag
# # # Warning: Hackage would reject this package.
# # for dir in hstream-sql hstream-processing hstream; do
# # python3 script/dev-tools shell --check --no-interactive -i docker.io/hstreamdb/haskell:${{ matrix.ghc }} "'cd $dir && cabal check'"
# # done
#
# # TODO
# #- name: haddock
# # python3 script/dev-tools cabal --check --no-interactive -i docker.io/hstreamdb/haskell:${{ matrix.ghc }} -- haddock haddock --enable-documentation --haddock-for-hackage all
#
# # -------------------------------------------------------------------------------
#
# - name: stop all started services
# run: docker rm -f $(docker ps -a -q)
#
# # NOTE: The quick-build-dev-image relies on the "hstreamdb/hstream" base image.
# # If you have installed any additional libraries in the builder image (hstreamdb/haskell),
# # and these libraries are required (e.g., if a lib.so file is needed), you may encounter a
# # linking error during the integration tests that follow. In such cases, you will need to
# # publish a new version of the hstreamdb/hstream image first, which includes the necessary
# # libraries.
# - name: quick build new hstream image
# run: |
# mkdir -p ~/data
# if [ "${{ env.GHC_MAJOR_VER }}" = "8" ]; then
# python3 script/dev-tools quick-build-dev-image \
# --builder-image docker.io/hstreamdb/haskell:${{ matrix.ghc }} \
# --project-file cabal.project.ghc810 \
# --only-hstream \
# -t $NEW_HSTREAM_IMAGE
# else
# python3 script/dev-tools quick-build-dev-image \
# --builder-image docker.io/hstreamdb/haskell:${{ matrix.ghc }} \
# --only-hstream \
# -t $NEW_HSTREAM_IMAGE
# fi
#
# docker save -o ~/data/new_hstream_image.tar $NEW_HSTREAM_IMAGE
#
# - uses: actions/upload-artifact@v3
# with:
# name: image-testing-${{ matrix.ghc }}
# path: ~/data/new_hstream_image.tar
# retention-days: 2

- name: collect hserver logs
if: ${{ success() }} || ${{ failure() }}
run: |
rm -rf hserver.log
docker logs $TEST_CONTAINER_NAME &> hserver.log
- uses: actions/upload-artifact@v3
with:
name: stats_test
path: dist-newstyle/build/x86_64-linux/ghc-9.2.7/hstream-common-stats-0.1.0.0/build/hstream-common-stats-test/hstream-common-stats-test
retention-days: 2

- name: upload hserver logs
uses: actions/upload-artifact@v3
if: ${{ success() }} || ${{ failure() }}
stats-tests:
needs: [build-and-test]
runs-on: ubuntu-latest
name: run-stats-tests
steps:
- uses: actions/download-artifact@v3
with:
name: hserver-logs-${{ matrix.ghc }}
path: hserver.log
retention-days: 7
name: stats_test
path: ~/data/hstream-common-stats-test

# Due to an [cabal bug](https://github.com/haskell/cabal/issues/7423),
# `cabal check` will emit a warning even if the `-O2` option is just
# an flag. This is disabled until the problem is fixed.
#- name: check
# run: |
# python3 script/dev-tools cabal --check --no-interactive -i docker.io/hstreamdb/haskell:${{ matrix.ghc }} -- sdist all

# # unfortunately, there is no `cabal check all`
# #log_info "Run all cabal check..."
# # Note that we ignore hstream-store package to run cabal check, because there
# # is an unexpected warning:
# # ...
# # Warning: 'cpp-options': -std=c++17 is not portable C-preprocessor flag
# # Warning: Hackage would reject this package.
# for dir in hstream-sql hstream-processing hstream; do
# python3 script/dev-tools shell --check --no-interactive -i docker.io/hstreamdb/haskell:${{ matrix.ghc }} "'cd $dir && cabal check'"
# done

# TODO
#- name: haddock
# python3 script/dev-tools cabal --check --no-interactive -i docker.io/hstreamdb/haskell:${{ matrix.ghc }} -- haddock haddock --enable-documentation --haddock-for-hackage all

# -------------------------------------------------------------------------------

- name: stop all started services
run: docker rm -f $(docker ps -a -q)

# NOTE: The quick-build-dev-image relies on the "hstreamdb/hstream" base image.
# If you have installed any additional libraries in the builder image (hstreamdb/haskell),
# and these libraries are required (e.g., if a lib.so file is needed), you may encounter a
# linking error during the integration tests that follow. In such cases, you will need to
# publish a new version of the hstreamdb/hstream image first, which includes the necessary
# libraries.
- name: quick build new hstream image
- name: CPU info
run: |
mkdir -p ~/data
if [ "${{ env.GHC_MAJOR_VER }}" = "8" ]; then
python3 script/dev-tools quick-build-dev-image \
--builder-image docker.io/hstreamdb/haskell:${{ matrix.ghc }} \
--project-file cabal.project.ghc810 \
--only-hstream \
-t $NEW_HSTREAM_IMAGE
else
python3 script/dev-tools quick-build-dev-image \
--builder-image docker.io/hstreamdb/haskell:${{ matrix.ghc }} \
--only-hstream \
-t $NEW_HSTREAM_IMAGE
fi
sudo apt-get install cpuid
cpuid

docker save -o ~/data/new_hstream_image.tar $NEW_HSTREAM_IMAGE

- uses: actions/upload-artifact@v3
with:
name: image-testing-${{ matrix.ghc }}
path: ~/data/new_hstream_image.tar
retention-days: 2
- run: |
chmod +x ~/data/hstream-common-stats-test/hstream-common-stats-test
for i in {1..1000}; do
docker run -t --rm -v $HOME/data:/data docker.io/hstreamdb/haskell:9.2.7 /data/hstream-common-stats-test/hstream-common-stats-test
done

integration-tests:
needs: [pre-build, build-and-test]
if: false
runs-on: ubuntu-latest
name: integration-tests-ghc-${{ matrix.ghc }}
strategy:
Expand Down Expand Up @@ -307,6 +339,7 @@ jobs:

integration-tests-rqlite:
needs: [pre-build, build-and-test]
if: false
runs-on: ubuntu-latest
name: integration-tests-rqlite-ghc-${{ matrix.ghc }}
strategy:
Expand Down Expand Up @@ -365,6 +398,7 @@ jobs:

distributed-tests:
needs: [pre-build, build-and-test]
if: false
runs-on: ubuntu-latest
name: distributed-tests-ghc-${{ matrix.ghc }}
strategy:
Expand Down
6 changes: 6 additions & 0 deletions common/stats/cbits/stats/Stats.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
#include <folly/small_vector.h>
#include <folly/stats/BucketedTimeSeries.h>
#include <folly/stats/MultiLevelTimeSeries.h>
#include <iostream>
#include <logdevice/common/UpdateableSharedPtr.h>
#include <logdevice/common/checks.h>
#include <logdevice/common/stats/Stats.h>
Expand Down Expand Up @@ -59,6 +60,7 @@ template <typename VT, typename CT>
void MultiLevelTimeSeriesWrapper<VT, CT>::addValue(const ValueType& n) {
auto now = std::chrono::duration_cast<Duration>(
std::chrono::steady_clock::now().time_since_epoch());
std::cout << "-> Add value: " << now.count() << std::endl;
folly::MultiLevelTimeSeries<VT, CT>::addValue(now, n);
}

Expand Down Expand Up @@ -523,6 +525,7 @@ int perXTimeSeriesGet(
folly::Synchronized<Map> Stats::*stats_member_map,
//
HsInt interval_size, HsInt* ms_intervals, HsDouble* aggregate_vals) {
printf("-> key: %s\n", key);
using Duration = typename PerXTimeSeries::Duration;
using TimePoint = typename PerXTimeSeries::TimePoint;

Expand All @@ -535,12 +538,14 @@ int perXTimeSeriesGet(

bool has_found = false;
stats_holder->runForEach([&](Stats& s) {
printf("-> each stats\n");
// Use synchronizedCopy() so we do not have to hold a read lock on
// per_x_stats map while we iterate over individual entries.
for (auto& entry : s.synchronizedCopy(stats_member_map)) {
std::lock_guard<std::mutex> guard(entry.second->mutex);

std::string& key_ = entry.first;
printf("-> key_: %s\n", key_.c_str());
auto time_series = entry.second.get()->*member_ptr;
if (!time_series) {
continue;
Expand Down Expand Up @@ -572,6 +577,7 @@ int perXTimeSeriesGet(
// Duration may not be seconds, convert to seconds
aggregate_vals[i] += rate_per_time_type * Duration::period::den /
Duration::period::num;
printf("-> aggregate_vals %d:%f\n", i, aggregate_vals[i]);
}

// We have aggregated the stat from this Stats, because stream name
Expand Down
2 changes: 1 addition & 1 deletion common/stats/test/HStream/StatsSpec.hs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ spec = do
runIO setupFatalSignalHandler

statsSpec
threadedStatsSpec
--threadedStatsSpec

miscSpec

Expand Down
10 changes: 9 additions & 1 deletion common/stats/test/HStream/StatsSpecUtils.hs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import Data.Maybe (fromJust)
import Test.Hspec
import Z.Data.CBytes (CBytes)

import HStream.Stats (StatsHolder, resetStatsHolder)
import HStream.Stats

{-# ANN module ("HLint: ignore Use head" :: String) #-}

Expand All @@ -20,6 +20,7 @@ mkTimeSeriesTest
-> (StatsHolder -> CBytes -> [Int] -> IO (Either String (Map.Map CBytes [Double])))
-> Expectation
mkTimeSeriesTest h intervals stats_name stats_add stats_get stats_getall = do
print "================== start ================"
stats_add h "key_1" 1000
stats_add h "key_2" 10000
-- NOTE: we choose to sleep 1s so that we can assume the speed of key_1
Expand All @@ -28,18 +29,25 @@ mkTimeSeriesTest h intervals stats_name stats_add stats_get stats_getall = do
stats_add h "key_1" 1000
stats_add h "key_2" 10000

printStatsHolder h
print $ "=> stats_get " <> show stats_name <> " " <> show intervals

print "=> non-existed-key"
stats_get h stats_name "non-existed-key" intervals
`shouldReturn` Nothing

print "=> key1"
Just [rate1_p5s, rate1_p10s] <- stats_get h stats_name "key_1" intervals
rate1_p5s `shouldSatisfy` (\s -> s > 0 && s <= 2000)
rate1_p10s `shouldSatisfy` (\s -> s > 0 && s <= 2000)
print "=> key2"
Just [rate2_p5s, rate2_p10s] <- stats_get h stats_name "key_2" intervals
-- NOTE: There is a possibility that the speed is less than 2000. However, in
-- typical cases, it shouldn't.
rate2_p5s `shouldSatisfy` (\s -> s > 2000 && s <= 20000)
rate2_p10s `shouldSatisfy` (\s -> s > 2000 && s <= 20000)

print "================== end ================"
Right m <- stats_getall h stats_name intervals
Map.lookup "key_1" m `shouldSatisfy` ((\s -> s!!0 > 0 && s!!0 <= 2000) . fromJust)
Map.lookup "key_2" m `shouldSatisfy` ((\s -> s!!1 > 2000 && s!!1 <= 20000) . fromJust)
Expand Down
2 changes: 1 addition & 1 deletion external/proto3-suite