Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/candidate-9.6.x' into candidate-…
Browse files Browse the repository at this point in the history
…9.8.x

Signed-off-by: Gavin Halliday <[email protected]>
  • Loading branch information
ghalliday committed Oct 9, 2024
2 parents b8a79e1 + 6b285d9 commit 9f55380
Show file tree
Hide file tree
Showing 10 changed files with 228 additions and 19 deletions.
30 changes: 30 additions & 0 deletions .github/workflows/build-vcpkg.yml
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,36 @@ jobs:
asset-name: 'docker-ubuntu-22_04'
generate-zap: ""
secrets: inherit

check-documentation-changes:
if: ${{ contains('pull_request,push', github.event_name) }}
runs-on: ubuntu-22.04
outputs:
documentation_contents_changed: ${{ steps.variables.outputs.documentation_contents_changed }}
steps:
- name: Check for Documentation Changes
id: changed
uses: dorny/paths-filter@v3
with:
filters: |
src:
- 'docs/**'
- '.github/workflows/test-documentation.yml'
- name: Set Output
id: variables
run: |
echo "documentation_contents_changed=${{ steps.changed.outputs.src }}" >> $GITHUB_OUTPUT
- name: Print Variables
run: |
echo "${{ toJSON(steps.variables.outputs)}}"
test-documentation-ubuntu-22_04:
needs: check-documentation-changes
if: ${{ contains('pull_request,push', github.event_name) && needs.check-documentation-changes.outputs.documentation_contents_changed == 'true' }}
uses: ./.github/workflows/test-documentation.yml
with:
os: 'ubuntu-22.04'
asset-name: 'Documentation'

build-docker-ubuntu-20_04:
if: ${{ contains('schedule,push', github.event_name) }}
Expand Down
108 changes: 108 additions & 0 deletions .github/workflows/test-documentation.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
name: Build Documentation

on:
workflow_call:
inputs:
os:
type: string
description: 'Operating System'
required: false
default: 'ubuntu-22.04'
asset-name:
type: string
description: 'Asset Name'
required: false
default: 'Documentation'

workflow_dispatch:
inputs:
os:
type: string
description: 'Operating System'
required: false
default: 'ubuntu-22.04'
asset-name:
type: string
description: 'Asset Name'
required: false
default: 'Documentation'

jobs:
build-documentation:
name: Build Documentation
runs-on: ubuntu-22.04

steps:
- name: Checkout HPCC-Platform
uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
submodules: recursive
path: ${{ github.workspace }}/HPCC-Platform

- name: Calculate vars
id: vars
working-directory: ${{ github.workspace }}/HPCC-Platform/vcpkg
run: |
vcpkg_sha_short=$(git rev-parse --short=8 HEAD)
echo "vcpkg_sha_short=$vcpkg_sha_short" >> $GITHUB_OUTPUT
docker_build_label=hpccsystems/platform-build-${{ inputs.os }}
echo "docker_build_label=$docker_build_label" >> $GITHUB_OUTPUT
echo "docker_tag=$docker_build_label:$vcpkg_sha_short" >> $GITHUB_OUTPUT
community_base_ref=${{ github.event.base_ref || github.ref }}
candidate_branch=$(echo $community_base_ref | cut -d'/' -f3)
candidate_base_branch=$(echo $candidate_branch | awk -F'.' -v OFS='.' '{ $3="x"; print }')
echo "docker_tag_candidate_base=$docker_build_label:$candidate_base_branch" >> $GITHUB_OUTPUT
community_ref=${{ github.ref }}
community_tag=$(echo $community_ref | cut -d'/' -f3)
echo "community_tag=$community_tag" >> $GITHUB_OUTPUT
- name: Print vars
run: |
echo "${{ toJSON(steps.vars.outputs) }})"

- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3

- name: Create Build Image
uses: docker/build-push-action@v5
with:
builder: ${{ steps.buildx.outputs.name }}
file: ${{ github.workspace }}/HPCC-Platform/dockerfiles/vcpkg/${{ inputs.os }}.dockerfile
context: ${{ github.workspace }}/HPCC-Platform/dockerfiles/vcpkg
push: false
load: true
build-args: |
VCPKG_REF=${{ steps.vars.outputs.vcpkg_sha_short }}
tags: |
${{ steps.vars.outputs.docker_tag_candidate_base }}
cache-from: |
type=registry,ref=${{ steps.vars.outputs.docker_tag_candidate_base }}
type=registry,ref=${{ steps.vars.outputs.docker_tag }}
cache-to: type=inline

- name: CMake documentation
run: |
mkdir -p {${{ github.workspace }}/build,EN_US,PT_BR}
docker run --rm --mount source="${{ github.workspace }}/HPCC-Platform",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached --mount source="${{ github.workspace }}/build",target=/hpcc-dev/build,type=bind,consistency=cached ${{ steps.vars.outputs.docker_tag_candidate_base }} "\
cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build -DVCPKG_FILES_DIR=/hpcc-dev -DMAKE_DOCS_ONLY=ON -DUSE_NATIVE_LIBRARIES=ON -DDOCS_AUTO=ON -DDOC_LANGS=ALL && \
cmake --build /hpcc-dev/build --parallel $(nproc) --target all"
docker run --rm --mount source="${{ github.workspace }}/HPCC-Platform",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached --mount source="${{ github.workspace }}/build",target=/hpcc-dev/build,type=bind,consistency=cached ${{ steps.vars.outputs.docker_tag_candidate_base }} "cd /hpcc-dev/build/Release/docs/EN_US && zip ALL_HPCC_DOCS_EN_US-$(echo '${{ steps.vars.outputs.community_tag }}' | sed 's/community_//' ).zip *.pdf"
docker run --rm --mount source="${{ github.workspace }}/HPCC-Platform",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached --mount source="${{ github.workspace }}/build",target=/hpcc-dev/build,type=bind,consistency=cached ${{ steps.vars.outputs.docker_tag_candidate_base }} "cd /hpcc-dev/build/Release/docs/PT_BR && zip ALL_HPCC_DOCS_PT_BR-$(echo '${{ steps.vars.outputs.community_tag }}' | sed 's/community_//' ).zip *.pdf"
- name: Upload build artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ inputs.asset-name }}
path: |
${{ github.workspace }}/build/Release/docs/*.zip
${{ github.workspace }}/build/Release/docs/EN_US/*.zip
${{ github.workspace }}/build/Release/docs/PT_BR/*.zip
${{ github.workspace }}/build/docs/EN_US/EclipseHelp/*.zip
${{ github.workspace }}/build/docs/EN_US/HTMLHelp/*.zip
${{ github.workspace }}/build/docs/PT_BR/HTMLHelp/*.zip
compression-level: 0

14 changes: 14 additions & 0 deletions common/thorhelper/thorsoapcall.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2466,9 +2466,15 @@ class CWSCAsyncFor : implements IWSCAsyncFor, public CInterface, public CAsyncFo
checkTimeLimitExceeded(&remainingMS);
Url &connUrl = master->proxyUrlArray.empty() ? url : master->proxyUrlArray.item(0);

CCycleTimer dnsTimer;

// TODO: for DNS, do we use timeoutMS or remainingMS or remainingMS / maxRetries+1 or ?
ep.set(connUrl.host.get(), connUrl.port, master->timeoutMS);

unsigned __int64 dnsNs = dnsTimer.elapsedNs();
master->logctx.noteStatistic(StTimeSoapcallDNS, dnsNs);
master->activitySpanScope->setSpanAttribute("SoapcallDNSTimeNs", dnsNs);

if (ep.isNull())
throw MakeStringException(-1, "Failed to resolve host '%s'", nullText(connUrl.host.get()));

Expand All @@ -2489,6 +2495,8 @@ class CWSCAsyncFor : implements IWSCAsyncFor, public CInterface, public CAsyncFo
isReused = false;
keepAlive = true;

CCycleTimer connTimer;

// TODO: for each connect attempt, do we use timeoutMS or remainingMS or remainingMS / maxRetries or ?
socket.setown(blacklist->connect(ep, master->logctx, (unsigned)master->maxRetries, master->timeoutMS, master->roxieAbortMonitor, master->rowProvider));

Expand Down Expand Up @@ -2518,11 +2526,17 @@ class CWSCAsyncFor : implements IWSCAsyncFor, public CInterface, public CAsyncFo
throw makeStringException(0, err.str());
#endif
}

unsigned __int64 connNs = connTimer.elapsedNs();
master->logctx.noteStatistic(StTimeSoapcallConnect, connNs);
master->activitySpanScope->setSpanAttribute("SoapcallConnectTimeNs", connNs);
}
break;
}
catch (IException *e)
{
master->logctx.noteStatistic(StNumSoapcallConnectFailures, 1);

if (master->timeLimitExceeded)
{
master->activitySpanScope->recordError(SpanError("Time Limit Exceeded", e->errorCode(), true, true));
Expand Down
15 changes: 10 additions & 5 deletions helm/managed/logging/loki-stack/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ A Loki Datasource is created automatically, which allowers users to monitor/quer
### Helm Deployment
To deploy the light-weight Loki Stack for HPCC component log processing issue the following command:

>helm install myloki HPCC-Systems/helm/managed/logging/loki-stack/
>helm install myloki4hpcclogs HPCC-Systems/helm/managed/logging/loki-stack/
Note: the deployment name 'myloki4hpcclogs' is customizable; however, any changes need to be reflected in the LogAccess configuration (See section on configuring LogAccess below)

### Dependencies
This chart is dependent on the Grafana Loki-stack Helm charts which in turn is dependent on Loki, Grafana, Promtail.
Expand All @@ -23,7 +24,9 @@ Helm provides a convenient command to automatically pull appropriate dependencie
##### HELM Install parameter
Otherwise, provide the "--dependency-update" argument in the helm install command
For example:
> helm install myloki HPCC-Systems/helm/managed/logging/loki-stack/ --dependency-update
> helm install myloki4hpcclogs HPCC-Systems/helm/managed/logging/loki-stack/ --dependency-update
Note: the deployment name 'myloki4hpcclogs' is customizable; however, any changes need to be reflected in the LogAccess configuration (See section on configuring LogAccess below)

### Components
Grafana Loki Stack is comprised of a set of components that which serve as a full-featured logging stack.
Expand Down Expand Up @@ -172,7 +175,7 @@ username: 5 bytes

The target HPCC deployment should be directed to use the desired Grafana endpoint with the Loki datasource, and the newly created secret by providing appropriate logAccess values (such as ./grafana-hpcc-logaccess.yaml).

Example use:
Example use for targeting a loki stack deployed as 'myloki4hpcclogs' on the default namespace:

```
helm install myhpcc hpcc/hpcc -f HPCC-Platform/helm/managed/logging/loki-stack/grafana-hpcc-logaccess.yaml
Expand All @@ -182,8 +185,10 @@ Example use:

The grafana hpcc logaccess values should provide Grafana connection information, such as the host, and port; the Loki datasource where the logs reside; the k8s namespace under which the logs were created (non-default namespace highly recommended); and the hpcc component log format (table|json|xml)

Example values file describing logAccess targeting loki stack deployed as 'myloki4hpcclogs' on the default namespace. Note that the "host" entry must reflect the name of the deployed Loki stack, as shown in the excerpt below (eg **_myloki4hpcclogs_**-grafana.default.svc.cluster.local):

```
Example use:
global:
logAccess:
name: "Grafana/loki stack log access"
Expand Down Expand Up @@ -220,4 +225,4 @@ For example:
<logMaps type="node" searchColumn="node_name" columnMode="ALL" storeName="stream" columnType="string"/>
<logMaps type="pod" searchColumn="pod" columnMode="DEFAULT" storeName="stream" columnType="string"/>
</logAccess>
```
```
61 changes: 50 additions & 11 deletions roxie/ccd/ccdqueue.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2426,6 +2426,31 @@ class RoxieSocketQueueManager : public RoxieReceiverBase
DelayedPacketQueueManager delayed;
#endif

class WorkerUdpTracker : public TimeDivisionTracker<6, false>
{
public:
enum
{
other,
waiting,
allocating,
processing,
pushing,
checkingRunning
};

WorkerUdpTracker(const char *name, unsigned reportIntervalSeconds) : TimeDivisionTracker<6, false>(name, reportIntervalSeconds)
{
stateNames[other] = "other";
stateNames[waiting] = "waiting";
stateNames[allocating] = "allocating";
stateNames[processing] = "processing";
stateNames[pushing] = "pushing";
stateNames[checkingRunning] = "checking running";
}

} timeTracker;

class ReceiverThread : public Thread
{
RoxieSocketQueueManager &parent;
Expand All @@ -2445,7 +2470,7 @@ class RoxieSocketQueueManager : public RoxieReceiverBase
} readThread;

public:
RoxieSocketQueueManager(unsigned _numWorkers) : RoxieReceiverBase(_numWorkers), logctx("RoxieSocketQueueManager"), readThread(*this)
RoxieSocketQueueManager(unsigned _numWorkers) : RoxieReceiverBase(_numWorkers), logctx("RoxieSocketQueueManager"), timeTracker("WorkerUdpReader", 60), readThread(*this)
{
maxPacketSize = multicastSocket->get_max_send_size();
if ((maxPacketSize==0)||(maxPacketSize>65535))
Expand Down Expand Up @@ -2758,21 +2783,26 @@ class RoxieSocketQueueManager : public RoxieReceiverBase
// if found, send an IBYTI and discard retry request

bool alreadyRunning = false;
Owned<IPooledThreadIterator> wi = queue.running();
ForEach(*wi)
{
CRoxieWorker &w = (CRoxieWorker &) wi->query();
if (w.match(header))
WorkerUdpTracker::TimeDivision division(timeTracker, WorkerUdpTracker::checkingRunning);

Owned<IPooledThreadIterator> wi = queue.running();
ForEach(*wi)
{
alreadyRunning = true;
ROQ->sendIbyti(header, logctx, mySubchannel);
if (doTrace(traceRoxiePackets, TraceFlags::Max))
CRoxieWorker &w = (CRoxieWorker &) wi->query();
if (w.match(header))
{
StringBuffer xx; logctx.CTXLOG("Ignored retry on subchannel %u for running activity %s", mySubchannel, header.toString(xx).str());
alreadyRunning = true;
ROQ->sendIbyti(header, logctx, mySubchannel);
if (doTrace(traceRoxiePackets, TraceFlags::Max))
{
StringBuffer xx; logctx.CTXLOG("Ignored retry on subchannel %u for running activity %s", mySubchannel, header.toString(xx).str());
}
break;
}
break;
}
}

if (!alreadyRunning && checkCompleted && ROQ->replyPending(header))
{
alreadyRunning = true;
Expand All @@ -2788,6 +2818,7 @@ class RoxieSocketQueueManager : public RoxieReceiverBase
{
StringBuffer xx; logctx.CTXLOG("Retry %d received on subchannel %u for %s", retries+1, mySubchannel, header.toString(xx).str());
}
WorkerUdpTracker::TimeDivision division(timeTracker, WorkerUdpTracker::pushing);
#ifdef NEW_IBYTI
// It's debatable whether we should delay for the primary here - they had one chance already...
// But then again, so did we, assuming the timeout is longer than the IBYTIdelay
Expand All @@ -2806,6 +2837,7 @@ class RoxieSocketQueueManager : public RoxieReceiverBase
}
else // first time (not a retry).
{
WorkerUdpTracker::TimeDivision division(timeTracker, WorkerUdpTracker::pushing);
#ifdef NEW_IBYTI
unsigned delay = 0;
if (mySubchannel != 0 && (header.activityId & ~ROXIE_PRIORITY_MASK) < ROXIE_ACTIVITY_SPECIAL_FIRST) // i.e. I am not the primary here, and never delay special
Expand All @@ -2830,6 +2862,7 @@ class RoxieSocketQueueManager : public RoxieReceiverBase
doIbytiDelay?"YES":"NO", minIbytiDelay, initIbytiDelay);

MemoryBuffer mb;
WorkerUdpTracker::TimeDivision division(timeTracker, WorkerUdpTracker::other);
for (;;)
{
mb.clear();
Expand All @@ -2844,8 +2877,14 @@ class RoxieSocketQueueManager : public RoxieReceiverBase
#else
unsigned timeout = 5000;
#endif
division.switchState(WorkerUdpTracker::allocating);
void * buffer = mb.reserve(maxPacketSize);

division.switchState(WorkerUdpTracker::waiting);
unsigned l;
multicastSocket->readtms(mb.reserve(maxPacketSize), sizeof(RoxiePacketHeader), maxPacketSize, l, timeout);
multicastSocket->readtms(buffer, sizeof(RoxiePacketHeader), maxPacketSize, l, timeout);
division.switchState(WorkerUdpTracker::processing);

mb.setLength(l);
RoxiePacketHeader &header = *(RoxiePacketHeader *) mb.toByteArray();
if (l != header.packetlength)
Expand Down
4 changes: 2 additions & 2 deletions roxie/ccd/ccdserver.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -496,7 +496,7 @@ static const StatisticsMapping indexStatistics({StNumServerCacheHits, StNumIndex
static const StatisticsMapping diskStatistics({StNumServerCacheHits, StNumDiskRowsRead, StNumDiskSeeks, StNumDiskAccepted,
StNumDiskRejected, StSizeAgentReply, StTimeAgentWait, StTimeAgentQueue, StTimeAgentProcess, StTimeIBYTIDelay, StNumAckRetries, StNumAgentRequests, StSizeAgentRequests,
StSizeContinuationData, StNumContinuationRequests }, actStatistics);
static const StatisticsMapping soapStatistics({ StTimeSoapcall }, actStatistics);
static const StatisticsMapping soapStatistics({ StTimeSoapcall, StTimeSoapcallDNS, StTimeSoapcallConnect, StNumSoapcallConnectFailures }, actStatistics);
static const StatisticsMapping groupStatistics({ StNumGroups, StNumGroupMax }, actStatistics);
static const StatisticsMapping sortStatistics({ StTimeSortElapsed }, actStatistics);
static const StatisticsMapping indexWriteStatistics({ StNumDuplicateKeys, StNumLeafCacheAdds, StNumNodeCacheAdds, StNumBlobCacheAdds }, actStatistics);
Expand All @@ -518,7 +518,7 @@ extern const StatisticsMapping accumulatedStatistics({StWhenFirstRow, StTimeLoca
StCycleBlobFetchCycles, StCycleLeafFetchCycles, StCycleNodeFetchCycles, StTimeBlobFetch, StTimeLeafFetch, StTimeNodeFetch,
StNumNodeDiskFetches, StNumLeafDiskFetches, StNumBlobDiskFetches,
StNumDiskRejected, StSizeAgentReply, StTimeAgentWait,
StTimeSoapcall,
StTimeSoapcall, StTimeSoapcallDNS, StTimeSoapcallConnect, StNumSoapcallConnectFailures,
StNumGroups,
StTimeSortElapsed,
StNumDuplicateKeys,
Expand Down
5 changes: 5 additions & 0 deletions system/jlib/jstatcodes.h
Original file line number Diff line number Diff line change
Expand Up @@ -314,6 +314,11 @@ enum StatisticKind
StNumParallelExecute,
StNumAgentRequests,
StSizeAgentRequests,
StTimeSoapcallDNS, // Time spent in DNS lookups for soapcalls
StTimeSoapcallConnect, // Time spent in connect[+SSL_connect] for soapcalls
StCycleSoapcallDNSCycles,
StCycleSoapcallConnectCycles,
StNumSoapcallConnectFailures,
StMax,

//For any quantity there is potentially the following variants.
Expand Down
Loading

0 comments on commit 9f55380

Please sign in to comment.