Skip to content

Commit

Permalink
v1.0.0 beta release. backwards compatible native auth for view data, …
Browse files Browse the repository at this point in the history
…deep param validations for mint and access data, comment out custom error methods and repalce with pure Error as they were hiding error details on client side catch, updated documentation
newbreedofgeek committed Sep 8, 2023
1 parent 5ed670b commit 13efc37
Showing 7 changed files with 393 additions and 120 deletions.
27 changes: 23 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
@@ -26,6 +26,8 @@ This SDK is currently focused on interacting with the Itheum's Data NFT technolo

## SDK DOCS

Note that all param requirements and method docs are marked up in the typescript code, so if you use typescript in your project your development tool (e.g. Visual Studio Code) will provide intellisense for all methods and functions.

### 1. Interacting with Data NFTs

```typescript
@@ -57,15 +59,32 @@ const address = 'address';
const dataNfts = [];
dataNfts = await DataNft.ownedByAddress(address);

// Retrieves the DataNft message from marshal to sign
// Retrieves the specific DataNft
const dataNft = DataNft.createFromApi(nonce);

// (A) Get a message from the Data Marshal node for your to sign to prove ownership
const message = await dataNft.messageToSign();

// Sign the message with a wallet
// (B) Sign the message with a wallet and obtain a signature
const signature = 'signature';

// Unlock the data inside the dataNft
dataNft.viewData(message, signature); // optional params "stream" (stream out data instead of downloading file), "fwdAllHeaders"/"fwdHeaderKeys", "fwdHeaderMapLookup" can be used to pass headers like Authorization to origin servers
// There are 2 methods to open a data NFT and view the content -->

// Method 1) Unlock the data inside the Data NFT via signature verification
dataNft.viewData({
message,
signature
}); // optional params "stream" (stream out data instead of downloading file), "fwdAllHeaders"/"fwdHeaderKeys", "fwdHeaderMapLookup" can be used to pass headers like Authorization to origin Data Stream servers


// Method 2) OR, you can use a MultiversX Native Auth access token to unlock the data inside the Data NFT without the need for the the signature steps above (A)(B). This has a much better UX
dataNft.viewDataViaMVXNativeAuth({
mvxNativeAuthOrigins: "http://localhost:3000", "https://mycoolsite.com"], // same whitelist domains your client app used when generating native auth token
mvxNativeAuthMaxExpirySeconds: 300, // same expiry seconds your client app used when generating native auth token
fwdHeaderMapLookup: {
authorization : "Bearer myNativeAuthToken"
}
}); // optional params "stream" (stream out data instead of downloading file), "fwdAllHeaders"/"fwdHeaderKeys" can be used to pass on the headers like Authorization to origin Data Stream servers
```

### 2. Interacting with Data NFT Minter
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@itheum/sdk-mx-data-nft",
"version": "0.3.0.beta.1",
"version": "1.0.0.beta.1",
"description": "SDK for Itheum's Data NFT Technology on MultiversX Blockchain",
"main": "out/index.js",
"types": "out/index.d.js",
106 changes: 58 additions & 48 deletions src/datanft.ts
Original file line number Diff line number Diff line change
@@ -14,17 +14,17 @@ import {
createNftIdentifier,
numberToPaddedHex,
parseDataNft,
validateSpecificParams
validateSpecificParamsViewData
} from './utils';
import minterAbi from './abis/datanftmint.abi.json';
import { NftType, ViewDataReturnType } from './interfaces';
import {
ErrDataNftCreation,
ErrDecodeAttributes,
ErrFailedOperation,
ErrAttributeNotSet,
ErrNetworkConfig
} from './errors';
// import {
// ErrDataNftCreation,
// ErrDecodeAttributes,
// ErrFailedOperation,
// ErrAttributeNotSet,
// ErrNetworkConfig
// } from './errors';

export class DataNft {
readonly tokenIdentifier: string = '';
@@ -86,11 +86,12 @@ export class DataNft {

return dataNft;
} catch (error) {
if (error instanceof Error) {
throw new ErrDataNftCreation(error);
} else {
throw ErrDataNftCreation;
}
throw error;
// if (error instanceof Error) {
// throw new ErrDataNftCreation(error);
// } else {
// throw ErrDataNftCreation;
// }
}
}

@@ -125,11 +126,12 @@ export class DataNft {
const dataNfts = data.map((value) => parseDataNft(value));
return dataNfts;
} catch (error) {
if (error instanceof Error) {
throw new ErrDataNftCreation(error);
} else {
throw ErrDataNftCreation;
}
throw error;
// if (error instanceof Error) {
// throw new ErrDataNftCreation(error);
// } else {
// throw ErrDataNftCreation;
// }
}
}

@@ -155,11 +157,12 @@ export class DataNft {
return dataNfts;
}
} catch (error) {
if (error instanceof Error) {
throw new ErrDataNftCreation(error);
} else {
throw ErrDataNftCreation;
}
throw error;
// if (error instanceof Error) {
// throw new ErrDataNftCreation(error);
// } else {
// throw ErrDataNftCreation;
// }
}
}

@@ -188,8 +191,9 @@ export class DataNft {
description: decodedAttributes['description'].toString(),
title: decodedAttributes['title'].toString()
};
} catch {
throw ErrDecodeAttributes;
} catch (error) {
throw error;
// throw ErrDecodeAttributes;
}
}

@@ -212,11 +216,12 @@ export class DataNft {

return dataNfts;
} catch (error) {
if (error instanceof Error) {
throw new ErrFailedOperation(this.ownedByAddress.name, error);
} else {
throw ErrFailedOperation;
}
throw error;
// if (error instanceof Error) {
// throw new ErrFailedOperation(this.ownedByAddress.name, error);
// } else {
// throw ErrFailedOperation;
// }
}
}

@@ -226,7 +231,8 @@ export class DataNft {
async getMessageToSign(): Promise<string> {
DataNft.ensureNetworkConfigSet();
if (!this.dataMarshal) {
throw new ErrAttributeNotSet('dataMarshal');
throw new Error('No data marshal set for getMessageToSign');
// throw new ErrAttributeNotSet('dataMarshal');
}
try {
const res = await fetch(
@@ -240,16 +246,17 @@ export class DataNft {

return data.nonce;
} catch (error) {
if (error instanceof Error) {
throw new ErrFailedOperation(this.getMessageToSign.name, error);
} else {
throw ErrFailedOperation;
}
throw error;
// if (error instanceof Error) {
// throw new ErrFailedOperation(this.getMessageToSign.name, error);
// } else {
// throw ErrFailedOperation;
// }
}
}

/**
* Method to get the data from the data marshal.
* Method to get the data via the Data Marshal.
* @param signedMessage Signed message from the data marshal
* @param signableMessage Signable message from the wallet
* @param stream [optional] Instead of auto-downloading if possible, request if data should always be streamed or not. i.e. true=stream, false/undefined=default behavior
@@ -269,16 +276,18 @@ export class DataNft {
}): Promise<ViewDataReturnType> {
DataNft.ensureNetworkConfigSet();
if (!this.dataMarshal) {
throw new ErrAttributeNotSet('dataMarshal');
throw new Error('No data marshal set for viewData');
// throw new ErrAttributeNotSet('dataMarshal');
}
const signResult = {
signature: '',
addrInHex: '',
success: false,
exception: ''
};

// S: run any format specific validation
const { allPassed, validationMessages } = validateSpecificParams({
const { allPassed, validationMessages } = validateSpecificParamsViewData({
signedMessage: p.signedMessage,
signableMessage: p.signableMessage,
stream: p.stream,
@@ -314,10 +323,9 @@ export class DataNft {
}

try {
// let url = `${this.dataMarshal}/access?nonce=${p.signedMessage}&NFTId=${
let url = `http://localhost:4000/datamarshalapi/achilles/v1/access?nonce=${
p.signedMessage
}&NFTId=${this.collection}-${numberToPaddedHex(this.nonce)}&signature=${
let url = `${this.dataMarshal}/access?nonce=${p.signedMessage}&NFTId=${
this.collection
}-${numberToPaddedHex(this.nonce)}&signature=${
signResult.signature
}&chainId=${
DataNft.networkConfiguration.chainID == 'D'
@@ -379,7 +387,7 @@ export class DataNft {
/**
* Method to get the data from the data marshal by authenticating and authorizing via MultiversX Native Auth. This has a better UX as it does not need a manually signed signableMessage
* @param mvxNativeAuthOrigins An string array of domains that the access token is validated against. e.g. ["http://localhost:3000", "https://mycoolsite.com"]
* @param mvxNativeAuthMaxExpirySeconds An number of that represents the "max expiry seconds" of your access token. e.g. if your client side access token is set fr 5 mins then send in 300
* @param mvxNativeAuthMaxExpirySeconds An number of that represents the "max expiry seconds" of your access token. e.g. if your client side access token is set for 5 mins then send in 300
* @param fwdHeaderMapLookup Used with fwdHeaderKeys to set a front-end client side lookup map of headers the SDK uses to setup the forward. e.g. { cookie : "xyz", authorization : "Bearer zxy" }. As it's Native Auth, you must sent in the authorization : "Bearer zxy" entry. Note that these are case-sensitive and need to match fwdHeaderKeys exactly for other entries.
* @param fwdHeaderKeys [optional] Forward only selected headers to the Origin Data Stream server. Has priority over fwdAllHeaders param. A comma separated lowercase string with less than 5 items. e.g. cookie,authorization
* @param fwdAllHeaders [optional] Forward all request headers to the Origin Data Stream server.
@@ -397,7 +405,7 @@ export class DataNft {
}): Promise<ViewDataReturnType> {
try {
// S: run any format specific validation
const { allPassed, validationMessages } = validateSpecificParams({
const { allPassed, validationMessages } = validateSpecificParamsViewData({
mvxNativeAuthOrigins: p.mvxNativeAuthOrigins,
mvxNativeAuthMaxExpirySeconds: p.mvxNativeAuthMaxExpirySeconds,
fwdHeaderKeys: p.fwdHeaderKeys,
@@ -427,8 +435,7 @@ export class DataNft {
mvxNativeAuthOriginsToBase64 = window.btoa(mvxNativeAuthOriginsToBase64); // convert to base64

// construct the api url
// let url = `${this.dataMarshal}/access?NFTId=${
let url = `http://localhost:4000/datamarshalapi/achilles/v1/access?NFTId=${
let url = `${this.dataMarshal}/access?NFTId=${
this.collection
}-${numberToPaddedHex(this.nonce)}&chainId=${
DataNft.networkConfiguration.chainID == 'D'
@@ -499,7 +506,10 @@ export class DataNft {

private static ensureNetworkConfigSet() {
if (!this.env || !this.apiConfiguration) {
throw ErrNetworkConfig;
throw new Error(
'Network configuration is not set. Call setNetworkConfig static method before calling any method that requires network configuration.'
);
// throw ErrNetworkConfig;
}
}
}
2 changes: 1 addition & 1 deletion src/errors.ts
Original file line number Diff line number Diff line change
@@ -10,7 +10,7 @@ export class Err extends Error {
}

/**
* Returns a pretty, friendly summary for the error or for the chain of errros (if appropriate).
* Returns a pretty, friendly summary for the error or for the chain of errors (if appropriate).
*/
summary(): any[] {
let result = [];
28 changes: 17 additions & 11 deletions src/marketplace.ts
Original file line number Diff line number Diff line change
@@ -26,7 +26,7 @@ import {
import dataMarketAbi from './abis/data_market.abi.json';
import { MarketplaceRequirements, Offer } from './interfaces';
import { parseOffer } from './utils';
import { ErrContractQuery } from './errors';
// import { ErrContractQuery } from './errors';

export class DataNftMarket {
readonly contract: SmartContract;
@@ -281,9 +281,10 @@ export class DataNftMarket {
};
return requirements;
} else {
throw new ErrContractQuery(
'Error while retrieving the marketplace requirements'
);
throw new Error('Error while retrieving the marketplace requirements');
// throw new ErrContractQuery(
// 'Error while retrieving the marketplace requirements'
// );
}
}

@@ -303,7 +304,9 @@ export class DataNftMarket {
const returnValue = firstValue?.valueOf();
return new U8Value(returnValue).valueOf().toNumber();
}
throw new ErrContractQuery('Error while retrieving the number of offers');

throw new Error('Error while retrieving the number of offers');
// throw new ErrContractQuery('Error while retrieving the number of offers');
}

/**
@@ -322,9 +325,11 @@ export class DataNftMarket {
const returnValue = firstValue?.valueOf();
return new U64Value(returnValue).valueOf().toNumber();
}
throw new ErrContractQuery(
'Error while retrieving the last valid offer id'
);

throw new Error('Error while retrieving the last valid offer id');
// throw new ErrContractQuery(
// 'Error while retrieving the last valid offer id'
// );
}

/**
@@ -343,9 +348,10 @@ export class DataNftMarket {
const returnValue = firstValue?.valueOf();
return new BooleanValue(returnValue).valueOf();
} else {
throw new ErrContractQuery(
'Error while retrieving the contract pause state'
);
throw new Error('Error while retrieving the contract pause state');
// throw new ErrContractQuery(
// 'Error while retrieving the contract pause state'
// );
}
}

139 changes: 88 additions & 51 deletions src/minter.ts
Original file line number Diff line number Diff line change
@@ -27,12 +27,16 @@ import dataNftMintAbi from './abis/datanftmint.abi.json';
import { MinterRequirements } from './interfaces';
import { NFTStorage } from 'nft.storage';
import { File } from '@web-std/file';
import { checkTraitsUrl, checkUrlIsUp } from './utils';
import {
ErrArgumentNotSet,
ErrContractQuery,
ErrFailedOperation
} from './errors';
checkTraitsUrl,
checkUrlIsUp,
validateSpecificParamsMint
} from './utils';
// import {
// ErrArgumentNotSet,
// ErrContractQuery,
// ErrFailedOperation
// } from './errors';

export class DataNftMinter {
readonly contract: SmartContract;
@@ -110,7 +114,8 @@ export class DataNftMinter {
};
return requirements;
} else {
throw new ErrContractQuery('Could not retrieve requirements');
throw new Error('Could not retrieve minter contract requirements');
// throw new ErrContractQuery('Could not retrieve requirements');
}
}

@@ -130,9 +135,10 @@ export class DataNftMinter {
const returnValue = firstValue?.valueOf();
return new BooleanValue(returnValue).valueOf();
} else {
throw new ErrContractQuery(
'Error while retrieving the contract pause state'
);
throw new Error('Error while retrieving the contract pause state');
// throw new ErrContractQuery(
// 'Error while retrieving the contract pause state'
// );
}
}

@@ -177,16 +183,16 @@ export class DataNftMinter {
* For more information, see the [README documentation](https://github.com/Itheum/sdk-mx-data-nft#create-a-mint-transaction).
*
* @param senderAddress the address of the user
* @param tokenName the name of the DataNFT-FT
* @param dataMarshalUrl the url of the data marshal
* @param dataStreamUrl the url of the data stream to be encrypted
* @param dataPreviewUrl the url of the data preview
* @param royalties the royalties to be set for the Data NFT-FT
* @param supply the supply of the Data NFT-FT
* @param datasetTitle the title of the dataset
* @param datasetDescription the description of the dataset
* @param antiSpamTax the anti spam tax to be set for the Data NFT-FT with decimals
* @param options optional parameters
* @param tokenName the name of the DataNFT-FT. Between 3 and 20 alphanumeric characters, no spaces.
* @param dataMarshalUrl the url of the data marshal. A live HTTPS URL that returns a 200 OK HTTP code.
* @param dataStreamUrl the url of the data stream to be encrypted. A live HTTPS URL that returns a 200 OK HTTP code.
* @param dataPreviewUrl the url of the data preview. A live HTTPS URL that returns a 200 OK HTTP code.
* @param royalties the royalties to be set for the Data NFT-FT. A number between 0 and 50. This equates to a % value. e.g. 10%
* @param supply the supply of the Data NFT-FT. A number between 1 and 1000.
* @param datasetTitle the title of the dataset. Between 10 and 60 alphanumeric characters.
* @param datasetDescription the description of the dataset. Between 10 and 400 alphanumeric characters.
* @param antiSpamTax the anti spam tax to be set for the Data NFT-FT with decimals. Needs to be greater than 0 and should be obtained in real time via {@link viewMinterRequirements} prior to calling mint.
* @param options [optional] below parameters are all optional
* - imageUrl: the URL of the image for the Data NFT
* - traitsUrl: the URL of the traits for the Data NFT
* - nftStorageToken: the nft storage token to be used to upload the image and metadata to IPFS
@@ -220,16 +226,47 @@ export class DataNftMinter {
]
} = options ?? {};

// S: run any format specific validation
const { allPassed, validationMessages } = validateSpecificParamsMint({
senderAddress,
tokenName,
royalties,
supply,
datasetTitle,
datasetDescription,
antiSpamTax,
_mandatoryParamsList: [
'senderAddress',
'tokenName',
'royalties',
'supply',
'datasetTitle',
'datasetDescription',
'antiSpamTax'
]
});

if (!allPassed) {
throw new Error(`Params have validation issues = ${validationMessages}`);
// throw new ErrFailedOperation(
// this.mint.name,
// new Error(`params have validation issues = ${validationMessages}`)
// );
}
// E: run any format specific validation...

// deep validate all mandatory URLs
try {
await checkUrlIsUp(dataStreamUrl);
await checkUrlIsUp(dataPreviewUrl);
await checkUrlIsUp(dataMarshalUrl);
await checkUrlIsUp(dataStreamUrl, [200, 403]);
await checkUrlIsUp(dataPreviewUrl, [200]);
await checkUrlIsUp(dataMarshalUrl + '/health-check', [200]);
} catch (error) {
if (error instanceof Error) {
throw new ErrFailedOperation(this.mint.name, error);
} else {
throw new ErrFailedOperation(this.mint.name);
}
throw error;
// if (error instanceof Error) {
// throw new ErrFailedOperation(this.mint.name, error);
// } else {
// throw new ErrFailedOperation(this.mint.name);
// }
}

let imageOnIpfsUrl: string;
@@ -240,10 +277,13 @@ export class DataNftMinter {

if (!imageUrl) {
if (!nftStorageToken) {
throw new ErrArgumentNotSet(
'nftStorageToken',
throw new Error(
'NFT Storage token is required when not using custom image and traits'
);
// throw new ErrArgumentNotSet(
// 'nftStorageToken',
// 'NFT Storage token is required when not using custom image and traits'
// );
}
const { image, traits } = await this.createFileFromUrl(
`${this.imageServiceUrl}/v1/generateNFTArt?hash=${dataNftHash}`,
@@ -262,10 +302,11 @@ export class DataNftMinter {
metadataOnIpfsUrl = metadataIpfsUrl;
} else {
if (!traitsUrl) {
throw new ErrArgumentNotSet(
'traitsUrl',
'Traits URL is required when using custom image'
);
throw new Error('Traits URL is required when using custom image');
// throw new ErrArgumentNotSet(
// 'traitsUrl',
// 'Traits URL is required when using custom image'
// );
}

await checkTraitsUrl(traitsUrl);
@@ -323,13 +364,6 @@ export class DataNftMinter {
dataNFTStreamUrl: string,
dataMarshalUrl: string
): Promise<{ dataNftHash: string; dataNftStreamUrlEncrypted: string }> {
/*
1) Call the data marshal and get a encrypted data stream url and hash of url (s1)
2) Use the hash for to generate the gen img URL from the generative API (s2)
2.1) Save the new generative image to IPFS and get it's IPFS url (s3)
3) Mint the SFT via the Minter Contract (s4)
*/

const myHeaders = new Headers();
myHeaders.append('cache-control', 'no-cache');
myHeaders.append('Content-Type', 'application/json');
@@ -350,17 +384,19 @@ export class DataNftMinter {
dataNftStreamUrlEncrypted: data.encryptedMessage
};
} else {
throw new ErrFailedOperation(this.dataNFTDataStreamAdvertise.name);
throw new Error('Issue with data marshal generate payload');
// throw new ErrFailedOperation(this.dataNFTDataStreamAdvertise.name);
}
} catch (error) {
if (error instanceof Error) {
throw new ErrFailedOperation(
this.dataNFTDataStreamAdvertise.name,
error
);
} else {
throw new ErrFailedOperation(this.dataNFTDataStreamAdvertise.name);
}
throw error;
// if (error instanceof Error) {
// throw new ErrFailedOperation(
// this.dataNFTDataStreamAdvertise.name,
// error
// );
// } else {
// throw new ErrFailedOperation(this.dataNFTDataStreamAdvertise.name);
// }
}
}

@@ -376,8 +412,9 @@ export class DataNftMinter {
});
const dir = [image, traits];
res = await nftstorage.storeDirectory(dir);
} catch {
throw new ErrFailedOperation(this.storeToIpfs.name);
} catch (error) {
throw error;
// throw new ErrFailedOperation(this.storeToIpfs.name);
}
return {
imageOnIpfsUrl: `https://ipfs.io/ipfs/${res}/image.png`,
209 changes: 205 additions & 4 deletions src/utils.ts
Original file line number Diff line number Diff line change
@@ -91,7 +91,7 @@ export async function checkTraitsUrl(traitsUrl: string) {
}
}

export function validateSpecificParams(params: {
export function validateSpecificParamsViewData(params: {
signedMessage?: string | undefined;
signableMessage?: any;
stream?: boolean | undefined;
@@ -324,10 +324,211 @@ export function validateSpecificParams(params: {
};
}

export async function checkUrlIsUp(url: string) {
export function validateSpecificParamsMint(params: {
senderAddress?: any;
tokenName?: string | undefined;
datasetTitle?: string | undefined;
datasetDescription?: string | undefined;
royalties?: number | undefined;
supply?: number | undefined;
antiSpamTax?: number | undefined;
_mandatoryParamsList: string[]; // a pure JS fallback way to validate mandatory params, as typescript rules for mandatory can be bypassed by client app
}): {
allPassed: boolean;
validationMessages: string;
} {
let allPassed = true;
let validationMessages = '';

try {
// senderAddress test
let senderAddressValid = true;

if (
params.senderAddress !== undefined ||
params._mandatoryParamsList.includes('senderAddress')
) {
senderAddressValid = false;

if (params.senderAddress !== undefined) {
senderAddressValid = true;
} else {
validationMessages += '[senderAddress needs to be a valid type]';
}
}

// tokenName test
let tokenNameValid = true;

if (
params.tokenName !== undefined ||
params._mandatoryParamsList.includes('tokenName')
) {
tokenNameValid = false; // it exists or needs to exist, so we need to validate

if (
params.tokenName !== undefined &&
typeof params.tokenName === 'string' &&
params.tokenName.trim() !== '' &&
params.tokenName.trim().match(/^[a-zA-Z0-9]+$/) &&
params.tokenName.trim().length >= 3 &&
params.tokenName.trim().length <= 20
) {
tokenNameValid = true;
} else {
validationMessages +=
'[tokenName needs to be a string between 3 and 20 characters (Only alphanumeric characters allowed, no spaces allowed)]';
}
}

// datasetTitle test
let datasetTitleValid = true;

if (
params.datasetTitle !== undefined ||
params._mandatoryParamsList.includes('datasetTitle')
) {
datasetTitleValid = false; // it exists or needs to exist, so we need to validate

if (
params.datasetTitle !== undefined &&
typeof params.datasetTitle === 'string' &&
params.datasetTitle.trim() !== '' &&
params.datasetTitle.trim().match(/^[a-zA-Z0-9\s]+$/) &&
params.datasetTitle.trim().length >= 10 &&
params.datasetTitle.trim().length <= 60
) {
datasetTitleValid = true;
} else {
validationMessages +=
'[datasetTitle needs to be a string between 10 and 60 characters (Only alphanumeric characters)]';
}
}

// datasetDescription test
let datasetDescriptionValid = true;

if (
params.datasetDescription !== undefined ||
params._mandatoryParamsList.includes('datasetDescription')
) {
datasetDescriptionValid = false; // it exists or needs to exist, so we need to validate

if (
params.datasetDescription !== undefined &&
typeof params.datasetDescription === 'string' &&
params.datasetDescription.trim() !== '' &&
params.datasetDescription.trim().match(/^[a-zA-Z0-9\s]+$/) &&
params.datasetDescription.trim().length >= 10 &&
params.datasetDescription.trim().length <= 400
) {
datasetDescriptionValid = true;
} else {
validationMessages +=
'[datasetDescription needs to be a string between 10 and 400 characters (Only alphanumeric characters)]';
}
}

// royalties test
let royaltiesValid = true;

if (
params.royalties !== undefined ||
params._mandatoryParamsList.includes('royalties')
) {
royaltiesValid = false;

if (
params.royalties !== undefined &&
typeof params.royalties === 'number' &&
!(params.royalties % 1 != 0) && // modulus checking. (10 % 1 != 0) EQ false, (10.5 % 1 != 0) EQ true,
params.royalties >= 0 &&
params.royalties <= 50
) {
royaltiesValid = true;
} else {
validationMessages +=
'[royalties needs to a whole number (not decimal) between 0 and 50]';
}
}

// supply test
let supplyValid = true;

if (
params.supply !== undefined ||
params._mandatoryParamsList.includes('supply')
) {
supplyValid = false;

if (
params.supply !== undefined &&
typeof params.supply === 'number' &&
params.supply >= 1 &&
params.supply <= 1000
) {
supplyValid = true;
} else {
validationMessages += '[supply needs to a number between 1 and 1000]';
}
}

// antiSpamTax test
let antiSpamTaxValid = true;

if (
params.antiSpamTax !== undefined ||
params._mandatoryParamsList.includes('antiSpamTax')
) {
antiSpamTaxValid = false;

if (
params.antiSpamTax !== undefined &&
typeof params.antiSpamTax === 'number' &&
params.antiSpamTax >= 0
) {
antiSpamTaxValid = true;
} else {
validationMessages +=
'[antiSpamTax needs to be a number greater than or equal to 0]';
}
}

if (
!senderAddressValid ||
!tokenNameValid ||
!datasetTitleValid ||
!datasetDescriptionValid ||
!royaltiesValid ||
!supplyValid ||
!antiSpamTaxValid
) {
allPassed = false;
}
} catch (e: any) {
allPassed = false;
validationMessages = e.toString();
}

return {
allPassed,
validationMessages
};
}

export async function checkUrlIsUp(url: string, expectedHttpCodes: number[]) {
// also do an https check as well
if (!url.trim().toLowerCase().includes('https://')) {
throw new Error(
`URLs need to be served via a 'https://' secure protocol : ${url}`
);
}

const response = await fetch(url);

if (!response.ok) {
throw new Error(`URL is not up: ${url}`);
if (!expectedHttpCodes.includes(response.status)) {
throw new Error(
`URL needs to return a 200 OK response code (or a 403 Forbidden error code is also allowed for protected Data Streams). url : ${url}, actual HTTP status: ${response.status}`
);
}
}

0 comments on commit 13efc37

Please sign in to comment.