Skip to content

Commit

Permalink
Merge pull request #7 from knackroot-technolabs-llp/main
Browse files Browse the repository at this point in the history
vardiff+backup merge
  • Loading branch information
argonmining authored Dec 24, 2024
2 parents c70e610 + f9b0b04 commit 2e976c6
Show file tree
Hide file tree
Showing 8 changed files with 281 additions and 151 deletions.
32 changes: 32 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,38 @@ Optionally, you can add a backup process to the DB. Check the ./backup folder.
You can build the suggested image via `docker build -t katpool-backup:0.4 .` and uncomment its part in the docker-compose.yml file.
We recommend to transfer the database dump files to other location as additional protection.

## Service Account Creation and Credentials for Google Cloud Backup

### Creating project in google cloud console
- Head over and Login to https://console.cloud.google.com/
- Go to Topbar right beside the Google Cloud logo
- Create New Project
- Select your newly created project

### Enabling drive api serivce
- From the navigation menu, select API & services (https://console.cloud.google.com/apis/dashboard)
- Click on ENABLE APIS AND SERVICES (https://console.cloud.google.com/apis/library)
- Go to the Google Workspace in sidebar
- Then click on Google Drive API (https://console.cloud.google.com/apis/library/drive.googleapis.com)
- Click on Enable button

### Creating the google cloud service account
- Go to (https://console.cloud.google.com/iam-admin/serviceaccounts)
- Click on CREATE SERVICE ACCOUNT, give the service account name, skip the optional fields

### Creating credentials for the service account
- Go to your newly created service account
- Go to KEYS tab and click on ADD KEY -> Create new key -> Key type : JSON
- Your credentials json file will be downloaded

### Running cloud backup script
- Add that json file to backup folder as "google-credentials.json"
- Configure the email address to access the dump file in config as "backupEmailAddress" Then execute the below commads:
```bash
cd backup/
bun run cloudBackup.ts fileName.sql
```

## How to install locally using bun (not recommended)
To install dependencies:

Expand Down
50 changes: 50 additions & 0 deletions backup/cloudBackup.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import { createReadStream, access, constants } from 'fs';
import path from 'path';
import { google } from 'googleapis';
import config from '../config/config.json'
import googleCredentials from './google-credentials.json';

const SCOPES = ["https://www.googleapis.com/auth/drive.file"];
const fileNameArgs = process.argv.slice(2);

async function authorize() {
const jwtClient = new google.auth.JWT(
googleCredentials.client_email,
undefined,
googleCredentials.private_key,
SCOPES
);
await jwtClient.authorize();
return jwtClient;
}

async function uploadFile(authClient: any) {
const drive = google.drive({ version: "v3", auth: authClient });

for(let i = 0; i < fileNameArgs.length; i++) {
access(fileNameArgs[i], constants.F_OK, async (err) => {
if (err) {
console.log(`The file ${fileNameArgs[i]} does not exist in the current directory.`);
} else {
const file = await drive.files.create({
media: {
body: createReadStream(fileNameArgs[i]),
},
fields: "id",
requestBody: {
name: path.basename(fileNameArgs[i]),
},
});
console.log("File Uploaded :", file.data.id);
const backupEmailAddress = config.backupEmailAddress
await drive.permissions.create({ fileId: file.data.id!, requestBody: { type: 'user', role: 'writer', emailAddress: backupEmailAddress } })
}
});
}
}

(async function main() {
const authClient = await authorize();
await uploadFile(authClient);
})()

15 changes: 5 additions & 10 deletions config/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,14 @@
"templates": {
"cacheSize": 30000
},
"difficulty": 64,
"difficulty": 4096,
"sharesPerMinute": 20,
"extraNonceSize": 2,
"varDiff": {
"varDiffStats": true,
"clampPow2": true,
"minDifficulty": 0.5,
"intervalMs": 120000,
"minElapsedSeconds": 30,
"adjustmentFactor": 1.1
}
"clampPow2": true,
"varDiff": true
},
"treasury": {
"fee": 5
}
},
"backupEmailAddress": ""
}
54 changes: 27 additions & 27 deletions go/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ func NewKaspaAPI(address string, blockWaitTime time.Duration) (*KaspaApi, error)

func fetchKaspaAccountFromPrivateKey(network, privateKeyHex string) (string, error) {
prefix := util.Bech32PrefixKaspa
if network == "testnet-10" {
if network == "testnet-10" || network == "testnet-11"{
prefix = util.Bech32PrefixKaspaTest
}

Expand Down Expand Up @@ -190,32 +190,32 @@ func main() {

templateMutex.Lock()
if currentTemplate != nil {
fmt.Printf(`
HashMerkleRoot : %v
AcceptedIDMerkleRoot : %v
UTXOCommitment : %v
Timestamp : %v
Bits : %v
Nonce : %v
DAAScore : %v
BlueWork : %v
BlueScore : %v
PruningPoint : %v
Transactions Length : %v
---------------------------------------
`,
currentTemplate.Block.Header.HashMerkleRoot,
currentTemplate.Block.Header.AcceptedIDMerkleRoot,
currentTemplate.Block.Header.UTXOCommitment,
currentTemplate.Block.Header.Timestamp,
currentTemplate.Block.Header.Bits,
currentTemplate.Block.Header.Nonce,
currentTemplate.Block.Header.DAAScore,
currentTemplate.Block.Header.BlueWork,
currentTemplate.Block.Header.BlueScore,
currentTemplate.Block.Header.PruningPoint,
len(currentTemplate.Block.Transactions),
)
// fmt.Printf(`
// HashMerkleRoot : %v
// AcceptedIDMerkleRoot : %v
// UTXOCommitment : %v
// Timestamp : %v
// Bits : %v
// Nonce : %v
// DAAScore : %v
// BlueWork : %v
// BlueScore : %v
// PruningPoint : %v
// Transactions Length : %v
// ---------------------------------------
// `,
// currentTemplate.Block.Header.HashMerkleRoot,
// currentTemplate.Block.Header.AcceptedIDMerkleRoot,
// currentTemplate.Block.Header.UTXOCommitment,
// currentTemplate.Block.Header.Timestamp,
// currentTemplate.Block.Header.Bits,
// currentTemplate.Block.Header.Nonce,
// currentTemplate.Block.Header.DAAScore,
// currentTemplate.Block.Header.BlueWork,
// currentTemplate.Block.Header.BlueScore,
// currentTemplate.Block.Header.PruningPoint,
// len(currentTemplate.Block.Transactions),
// )
} else {
fmt.Println("No block template fetched yet.")
}
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
"denque": "^2.1.0",
"dotenv": "^16.4.5",
"fs": "^0.0.1-security",
"googleapis": "^144.0.0",
"json-bigint": "^1.0.0",
"lmdb": "^3.0.11",
"node-cron": "^3.0.3",
Expand Down
25 changes: 16 additions & 9 deletions src/stratum/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,10 @@ export default class Stratum extends EventEmitter {
this.monitoring.log(`Stratum: Initialized with difficulty ${this.difficulty}`);

// Start the VarDiff thread
const varDiffStats = config.stratum.varDiff.varDiffStats || true; // Enable logging of VarDiff stats
const clampPow2 = config.stratum.varDiff.clampPow2 || true; // Enable clamping difficulty to powers of 2
this.sharesManager.startVardiffThread(sharesPerMin, varDiffStats, clampPow2);
const clampPow2 = config.stratum.clampPow2 || true; // Enable clamping difficulty to powers of 2
const varDiff = config.stratum.varDiff || false; // Enable variable difficulty
if (varDiff)
this.sharesManager.startVardiffThread(sharesPerMin, clampPow2);

this.extraNonceSize = Math.min(Number(config.stratum.extraNonceSize), 3 ) || 0;
this.maxExtranonce = Math.pow(2, 8 * Math.min(this.extraNonceSize, 3)) - 1;
Expand All @@ -68,17 +69,23 @@ export default class Stratum extends EventEmitter {
this.subscriptors.forEach((socket) => {
if (socket.readyState === "closed") {
this.subscriptors.delete(socket);
} else {
this.reflectDifficulty(socket);
} else {
socket.data.workers.forEach((worker, _) => {
var varDiff = this.sharesManager.getClientVardiff(worker)
if (varDiff != socket.data.difficulty && varDiff != 0) {
this.monitoring.log(`Stratum: Updating VarDiff for ${worker.name} from ${socket.data.difficulty} to ${varDiff}`);
this.sharesManager.updateSocketDifficulty(worker.address, varDiff)
this.reflectDifficulty(socket)
this.sharesManager.startClientVardiff(worker)
}
});

socket.write(tasksData[socket.data.encoding] + '\n');
}
});
}

reflectDifficulty(socket: Socket<Miner>) {
if (socket.data.encoding === Encoding.Bitmain) {
socket.data.difficulty = 4096
}
const event: Event<'mining.set_difficulty'> = {
method: 'mining.set_difficulty',
params: [socket.data.difficulty]
Expand Down Expand Up @@ -197,7 +204,7 @@ export default class Stratum extends EventEmitter {
const minerData = this.sharesManager.getMiners().get(worker.address);
const workerDiff = minerData?.workerStats.minDiff;
const socketDiff = socket.data.difficulty;
if (DEBUG) this.monitoring.debug(`Stratum: Current difficulties - Worker: ${workerDiff}, Socket: ${socketDiff}`);
if (DEBUG) this.monitoring.debug(`Stratum: Current difficulties , Worker Name: ${minerId} - Worker: ${workerDiff}, Socket: ${socketDiff}`);
const currentDifficulty = workerDiff || socketDiff;
if (DEBUG) this.monitoring.debug(`Stratum: Adding Share - Address: ${address}, Worker Name: ${name}, Hash: ${hash}, Difficulty: ${currentDifficulty}`);
// Add extranonce to noncestr if enabled and submitted nonce is shorter than
Expand Down
Loading

0 comments on commit 2e976c6

Please sign in to comment.