diff --git a/.git_commit_template.txt b/.git_commit_template.txt deleted file mode 100755 index baf62a39..00000000 --- a/.git_commit_template.txt +++ /dev/null @@ -1,77 +0,0 @@ - - -# 1. `Separate subject from body with a blank line` -# 2. `Limit the subject line to 50 characters` -# 3. `Capitalize the subject line` -# 4. `Do not end the subject line with a period` -# 5. `Use the imperative mood in the subject line` -# 6. `Wrap the body at 72 characters` -# 7. `Use the body to explain what and why vs. how` - -# # For example # - -# 1. Fix typo in introduction to user guide - -# 5. Refactor subsystem X for readability -# Update getting started documentation -# Remove deprecated methods -# Release version 1.0.0 - -# Subject line should always be able to complete the following sentence: -# If applied, this commit will `your subject line here` -# If applied, this commit will refactor subsystem X for readability -# If applied, this commit will update getting started documentation -# If applied, this commit will remove deprecated methods -# If applied, this commit will release version 1.0.0 -# If applied, this commit will merge pull request #123 from user/branch - -# Example-1: ------------------------------------------------------ - -# Summarize changes in around 50 characters or less - -# More detailed explanatory text, if necessary. Wrap it to about 72 -# characters or so. In some contexts, the first line is treated as the -# subject of the commit and the rest of the text as the body. The -# blank line separating the summary from the body is critical (unless -# you omit the body entirely); various tools like `log`, `shortlog` -# and `rebase` can get confused if you run the two together. - -# Explain the problem that this commit is solving. Focus on why you -# are making this change as opposed to how (the code explains that). -# Are there side effects or other unintuitive consequences of this -# change? Here's the place to explain them. - -# Further paragraphs come after blank lines. - -# - Bullet points are okay, too - -# - Typically a hyphen or asterisk is used for the bullet, preceded -# by a single space, with blank lines in between, but conventions -# vary here - -# If you use an issue tracker, put references to them at the bottom, -# like this: - -# Resolves: #123 -# See also: #456, #789 - -# Example-2: ------------------------------------------------------ -# Simplify serialize.h's exception handling - -# Remove the 'state' and 'exceptmask' from serialize.h's stream -# implementations, as well as related methods. - -# As exceptmask always included 'failbit', and setstate was always -# called with bits = failbit, all it did was immediately raise an -# exception. Get rid of those variables, and replace the setstate -# with direct exception throwing (which also removes some dead -# code). - -# As a result, good() is never reached after a failure (there are -# only 2 calls, one of which is in tests), and can just be replaced -# by !eof(). - -# fail(), clear(n) and exceptions() are just never called. Delete -# them. - -# Link: https://chris.beams.io/posts/git-commit/ diff --git a/.gitmodules b/.gitmodules index 0abde01d..2d0576a3 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "orcid-authentication"] path = orcid-authentication - url = https://github.com/avatar-lavventura/orcid-authentication + url = https://github.com/avatar-lavventura/orcid-authentication \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 221f9bc7..b692c250 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,16 @@ # syntax=docker/dockerfile:1 FROM golang:latest RUN apt-get install -y ca-certificates -RUN wget --no-check-certificate -q "https://dist.ipfs.io/go-ipfs/v0.11.0/go-ipfs_v0.11.0_linux-amd64.tar.gz" \ - && tar -xvf "go-ipfs_v0.11.0_linux-amd64.tar.gz" \ - && rm -f go-ipfs_v0.11.0_linux-amd64.tar.gz -WORKDIR go-ipfs -RUN make install \ +RUN wget --no-check-certificate -q "https://dist.ipfs.io/go-ipfs/v0.13.0/go-ipfs_v0.13.0_linux-amd64.tar.gz" \ + && tar -xf "go-ipfs_v0.13.0_linux-amd64.tar.gz" \ + && rm -f go-ipfs_v0.13.0_linux-amd64.tar.gz \ + && cd go-ipfs \ + && make install \ && ./install.sh -RUN git clone https://github.com/prasmussen/gdrive.git /workspace -WORKDIR /workspace/gdrive -RUN go env -w GO111MODULE=auto \ +RUN git clone https://github.com/prasmussen/gdrive.git /workspace/gdrive \ + && cd /workspace/gdrive \ + && go env -w GO111MODULE=auto \ && go get github.com/prasmussen/gdrive FROM python:3.7 @@ -21,10 +21,37 @@ ARG DEBIAN_FRONTEND=noninteractive ARG DEBCONF_NOWARNINGS="yes" EXPOSE 6817 6818 6819 6820 3306 -## ebloc-broker -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- +COPY --from=0 /go /go +COPY --from=0 /usr/local/bin /usr/local/bin +COPY --from=0 /usr/local/go /usr/local/go +COPY --from=0 /workspace/gdrive /workspace/gdrive + +ENV GOPATH=/go +ENV GOROOT=/usr/local/go +ENV PATH /go/bin:/usr/local/go/bin:$PATH + +## Add Tini +## ======== +ENV TINI_VERSION v0.19.0 +ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini +RUN chmod +x /tini + +## mongodb +## ======= +RUN curl -fsSL https://www.mongodb.org/static/pgp/server-4.4.asc | apt-key add - \ + && echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/4.4 multiverse" | \ + tee /etc/apt/sources.list.d/mongodb-org-4.4.list \ + && apt-get update \ + && apt-get install -y mongodb-org \ + && mkdir -p /data/db \ + && chown -R mongodb. /var/log/mongodb \ + && chown -R mongodb. /var/lib/mongodb \ + && chown mongodb:mongodb /data/db + RUN apt-get update \ && apt-get install -y --no-install-recommends --assume-yes apt-utils \ && apt-get install -y --no-install-recommends --assume-yes \ + aptitude \ build-essential \ libdbus-1-dev \ libdbus-glib-1-dev \ @@ -40,8 +67,8 @@ RUN apt-get update \ npm \ nodejs \ python3-venv \ - # sudo \ - # slurm-packages + sudo \ + ## required packages to install for Slurm gcc \ munge \ libmunge-dev \ @@ -72,44 +99,15 @@ RUN python3 -m venv /opt/venv #: enable venv ENV PATH="/opt/venv/bin:$PATH" -WORKDIR /workspace -RUN git clone https://github.com/ebloc/ebloc-broker.git -WORKDIR /workspace/ebloc-broker -#: `pip install -e .` takes few minutes -RUN git checkout dev >/dev/null 2>&1 \ - && git fetch --all --quiet >/dev/null 2>&1 \ - && git pull --all -r -v >/dev/null 2>&1 \ - && pip install --upgrade pip \ - && pip install -U pip wheel setuptools \ - && pip install -e . --use-deprecated=legacy-resolver \ - && eblocbroker >/dev/null 2>&1 \ - && ./broker/_utils/yaml.py >/dev/null 2>&1 - -COPY --from=0 /go /go -COPY --from=0 /usr/local/bin /usr/local/bin -COPY --from=0 /usr/local/go /usr/local/go -COPY --from=0 /workspace/gdrive /workspace/gdrive - -ENV GOPATH=/go -ENV GOROOT=/usr/local/go -ENV PATH /go/bin:/usr/local/go/bin:$PATH - -# Instal SLURM -# -=-=-=-=-=-= -# Add Tini -ENV TINI_VERSION v0.19.0 -ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini -RUN chmod +x /tini - +## Instal SLURM +## -=-=-=-=-=-= RUN git config --global advice.detachedHead false WORKDIR /workspace -ARG JOBS=4 RUN git clone -b slurm-19-05-8-1 --single-branch --depth 1 https://github.com/SchedMD/slurm.git \ && cd slurm \ - && ./configure --prefix=/usr --sysconfdir=/etc/slurm --enable-slurmrestd \ - --with-mysql_config=/usr/bin --libdir=/usr/lib64 \ + && ./configure --prefix=/usr --sysconfdir=/etc/slurm --with-mysql_config=/usr/bin --libdir=/usr/lib64 \ && make \ - && make -j ${JOBS} install \ + && make -j 4 install \ && install -D -m644 etc/cgroup.conf.example /etc/slurm/cgroup.conf.example \ && install -D -m644 etc/slurm.conf.example /etc/slurm/slurm.conf.example \ && install -D -m600 etc/slurmdbd.conf.example /etc/slurm/slurmdbd.conf.example \ @@ -129,51 +127,57 @@ RUN git clone -b slurm-19-05-8-1 --single-branch --depth 1 https://github.com/Sc /var/log/slurm \ /var/run/slurm +## ebloc-broker +# -=-=-=-=-=-=- +WORKDIR /workspace +RUN git clone https://github.com/ebloc/ebloc-broker.git +WORKDIR /workspace/ebloc-broker +#: `pip install -e .` takes few minutes +RUN git checkout dev >/dev/null 2>&1 \ + && git fetch --all --quiet >/dev/null 2>&1 \ + && git pull --all -r -v >/dev/null 2>&1 \ + && pip install --upgrade pip \ + && pip install -U pip wheel setuptools \ + && pip install -e . --use-deprecated=legacy-resolver \ + && mkdir -p ~/.cache/black/$(pip freeze | grep black | sed 's|black==||g') \ + && eblocbroker >/dev/null 2>&1 \ + && ./broker/_utils/yaml.py >/dev/null 2>&1 + +WORKDIR /workspace/ebloc-broker/empty_folder +RUN brownie init \ + && cd ~ \ + && rm -rf /workspace/ebloc-broker/empty_folder \ + && /workspace/ebloc-broker/broker/python_scripts/add_bloxberg_into_network_config.py \ + && cd /workspace//ebloc-broker/contract \ + && brownie compile + +# orginize slurm files RUN chown root:munge -R /etc/munge /etc/munge/munge.key /var/lib/munge # works but root is alright? WORKDIR /var/log/slurm WORKDIR /var/run/supervisor -COPY broker/_slurm/files/supervisord.conf /etc/ +COPY docker/slurm/files/supervisord.conf /etc/ -# Mark externally mounted volumes +# mark externally mounted volumes VOLUME ["/var/lib/mysql", "/var/lib/slurmd", "/var/spool/slurm", "/var/log/slurm", "/run/munge"] - -COPY --chown=slurm broker/_slurm/files/slurm/slurm.conf /etc/slurm/slurm.conf -COPY --chown=slurm broker/_slurm/files/slurm/gres.conf /etc/slurm/gres.conf -COPY --chown=slurm broker/_slurm/files/slurm/slurmdbd.conf /etc/slurm/slurmdbd.conf +COPY --chown=slurm docker/slurm/files/slurm/slurm.conf /etc/slurm/slurm.conf +COPY --chown=slurm docker/slurm/files/slurm/gres.conf /etc/slurm/gres.conf +COPY --chown=slurm docker/slurm/files/slurm/slurmdbd.conf /etc/slurm/slurmdbd.conf RUN chmod 0600 /etc/slurm/slurmdbd.conf -## mongodb -RUN curl -fsSL https://www.mongodb.org/static/pgp/server-4.4.asc | apt-key add - \ - && echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/4.4 multiverse" | \ - tee /etc/apt/sources.list.d/mongodb-org-4.4.list \ - && apt-get update \ - && apt-get install -y mongodb-org \ - && mkdir -p /data/db \ - && chown -R mongodb. /var/log/mongodb \ - && chown -R mongodb. /var/lib/mongodb \ - && chown mongodb:mongodb /data/db - -# RUN git clone --depth=1 https://github.com/romkatv/powerlevel10k.git ~/powerlevel10k \ -# && echo "source ~/powerlevel10k/powerlevel10k.zsh-theme" >> ~/.zshrc \ - -## Finally -WORKDIR /workspace/ebloc-broker/broker -RUN apt-get clean \ - && apt-get autoremove \ - && apt-get autoclean \ +## finally # sysctl -w net.core.rmem_max=2500000 ? +RUN gdrive version \ && ipfs version \ - && ganache --version + && ipfs init \ + && ipfs config Reprovider.Strategy roots \ + && ipfs config Routing.Type none \ + && ganache --version \ + && /workspace/ebloc-broker/broker/bash_scripts/ubuntu_clean.sh >/dev/null 2>&1 \ + && echo "alias ls='ls -h --color=always -v --author --time-style=long-iso'" >> ~/.bashrc \ + && echo "export SQUEUE_FORMAT=\"%8i %9u %5P %2t %12M %12l %5D %3C %30j\"v" >> ~/.bashrc \ + && du -sh / 2>&1 | grep -v "cannot" -COPY broker/_slurm/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh -ENTRYPOINT ["/tini", "--", "/usr/local/bin/docker-entrypoint.sh"] +WORKDIR /workspace/ebloc-broker/broker CMD ["/bin/bash"] -# -=-=-=-=-=-=-=-=-=-=-=-=- DELETE -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= -# COPY --from=1 /opt/venv /opt/venv # /opt/venv/bin -# COPY --from=1 /usr/local/lib/node_modules /usr/local/lib/node_modules -# COPY --from=1 /usr/local/bin /usr/local/bin -# COPY --from=1 /workspace/ebloc-broker /workspace/ebloc-broker - -# COPY --from=1 /usr/local/sbin/ /usr/local/sbin/ -# COPY --from=1 /usr/local/bin /usr/local/bin -# COPY --from=1 /usr/local/lib /usr/local/lib +COPY docker/slurm/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh +ENTRYPOINT ["/tini", "--", "/usr/local/bin/docker-entrypoint.sh"] diff --git a/README.org b/README.org index 9146f3dd..bbdf9707 100755 --- a/README.org +++ b/README.org @@ -1,14 +1,16 @@ * eBlocBroker -eBlocBroker is a blockchain based autonomous computational resource broker. +eBlocBroker is smart contract that provides an application of blockchain technology to provide +computational and data resources to research communities. -** Website: [[http://ebloc.cmpe.boun.edu.tr]] -# http://ebloc.org +# ** Website: [[http://ebloc.cmpe.boun.edu.tr]] +# # http://ebloc.org # - [[http://ebloc.cmpe.boun.edu.tr:3003/index.html][Documentation]] ** Prerequisites -- [[https://github.com/SchedMD/slurm][Slurm]],[[https://geth.ethereum.org/docs/getting-started][Geth]], +- [[https://github.com/SchedMD/slurm][Slurm]], + [[https://geth.ethereum.org/docs/getting-started][Geth]], [[https://ipfs.io][IPFS]], [[https://github.com/prasmussen/gdrive][prasmussen/gdrive]], [[https://github.com/owncloud/pyocclient][owncloud/pyocclient]], @@ -20,22 +22,19 @@ eBlocBroker is a blockchain based autonomous computational resource broker. You can use a sandbox container provided in the [[./docker-compose.yml]] file for testing inside a Docker environment. -This container provides everything you need to test using a Python 3.7 interpreter. - -Start the test environment: +This container provides everything you need to test using a ~Python 3.7~ interpreter. Start the test environment: #+begin_src bash -docker build -t ebb:latest . --progress plain docker-compose up -d #+end_src -To enter the shell of the running container in interactive mode, run: +To enter the shell of the running container in the interactive mode, run: #+begin_src bash -docker exec -it ebloc-broker_slurm_1 /bin/bash +docker exec --detach-keys="ctrl-e,e" -it ebloc-broker_slurm_1 /bin/bash #+end_src -To stop the cluster container, run: +To stop the container, run: #+begin_src bash docker-compose down @@ -49,9 +48,9 @@ docker-compose down First, from [[https://b2access.eudat.eu/home/][B2ACCESS home page]] -~No account? Signup~ => ~Create B2ACCESS user account (username) only~ +~No account SignUp~ => ~Create B2ACCESS user account (username) only~ -- [[b2drop.eudat.eu][B2DROP login site]] +- [[https://b2drop.eudat.eu/][B2DROP login site]] **** Create app password @@ -71,7 +70,6 @@ Next, type ~eblocbroker --help~ for basic usage information. *** Submit Job - In order to submit your job each user should already registered into eBlocBroker. You can use [[./broker/eblocbroker/register_requester.py]] to register. Please update following arguments inside ~register.yaml~. @@ -117,13 +115,11 @@ config: - ~cache_type~ should be variable from [ ~public~, ~private~ ] - ~storaage_id~ should be variable from [ ~ipfs~, ~ipfs_gpg~, ~none~, ~eudat~, ~gdrive~ ] --------------- +-------------------------------- ** Provider -Provider should run: [[./eblocbroker.py]] driver Python script. - -~$ ./eblocbroker.py driver~ +Each provider should run ~eblocbroker driver~ for start running the Python script. *** Screenshot of provider GUI: diff --git a/broker/Driver.py b/broker/Driver.py index bf6d857f..cd93a3a8 100755 --- a/broker/Driver.py +++ b/broker/Driver.py @@ -114,12 +114,12 @@ def _tools(block_continue): # noqa if not output: log( - f"E: provider's registered gmail=[magenta]{gmail}[/magenta] does not match\n" - f" with the set gdrive's gmail=[magenta]{gdrive_gmail}[/magenta]" + f"E: provider's registered gmail=[m]{gmail}[/m] does not match\n" + f" with the set gdrive's gmail=[m]{gdrive_gmail}[/m]" ) raise QuietExit - log(f"==> provider_gmail=[magenta]{gmail}") + log(f"==> provider_gmail=[m]{gmail}") if env.IS_IPFS_USE: if not os.path.isfile(env.GPG_PASS_FILE): @@ -171,6 +171,7 @@ def __init__(self): self.requester_id: str = "" self.storage_duration: List[int] = [] self.received_block: List[int] = [] + self.is_cached = {} #: indicates Lock check for the received job whether received or not self.is_provider_received_job = False @@ -210,8 +211,8 @@ def process_logged_job(self, idx): index = self.logged_job.args["index"] self.job_block_number = self.logged_job["blockNumber"] self.cloud_storage_id = self.logged_job.args["cloudStorageID"] - log(f"## job_key=[magenta]{job_key}[/magenta] | index={index}", "b") - log(f" received_block_number={self.job_block_number}", "b") + log(f"## job_key=[m]{job_key}[/m] | index={index}", "b") + log(f" received_bn={self.job_block_number}", "b") log(f" tx_hash={self.logged_job['transactionHash'].hex()} | log_index={self.logged_job['logIndex']}", "b") log(f" provider={self.logged_job.args['provider']}", "b") log(f" received={self.logged_job.args['received']}", "b") @@ -301,7 +302,7 @@ def process_logged_jobs(self): except Exception as e: print_tb(e) log(str(e)) - breakpoint() # DEBUG + # breakpoint() # DEBUG def run_driver(given_bn): diff --git a/broker/TODO.org b/broker/TODO.org index 41fa8a12..0fa5e75d 100644 --- a/broker/TODO.org +++ b/broker/TODO.org @@ -2,3 +2,4 @@ * TASKS ** TODO patch eudat and google-drive ** TODO verify downloaded data +** TODO re-run the tests diff --git a/broker/_slurm/DAG/dag.py b/broker/_slurm/DAG/dag.py index 9d4c9322..2a4c3910 100755 --- a/broker/_slurm/DAG/dag.py +++ b/broker/_slurm/DAG/dag.py @@ -1,7 +1,5 @@ #!/usr/bin/env python3 -# from random import randint - import networkx as nx G = nx.DiGraph() @@ -20,3 +18,4 @@ # now draw the graph: pos = {0: (0, 0), 1: (1, 1), 2: (-1, 1), 3: (0, 2), 4: (2, 2)} nx.draw(G, pos, edge_color="r") +print("end") diff --git a/broker/_slurm/DAG/ex.py b/broker/_slurm/DAG/ex.py new file mode 100755 index 00000000..4d97fbb4 --- /dev/null +++ b/broker/_slurm/DAG/ex.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python3 + +import matplotlib.pyplot as plt +import networkx as nx + + +def ex_1(): + G = nx.DiGraph() + G.add_edges_from( + [("A", "B"), ("A", "C"), ("D", "B"), ("E", "C"), ("E", "F"), ("B", "H"), ("B", "G"), ("B", "F"), ("C", "G")] + ) + + val_map = {"A": 1.0, "D": 0.5714285714285714, "H": 0.0} + + values = [val_map.get(node, 0.25) for node in G.nodes()] + + # Specify the edges you want here + red_edges = [("A", "C"), ("E", "C")] + # edge_colours = ["black" if not edge in red_edges else "red" for edge in G.edges()] + black_edges = [edge for edge in G.edges() if edge not in red_edges] + + # Need to create a layout when doing + # separate calls to draw nodes and edges + pos = nx.spring_layout(G) + nx.draw_networkx_nodes(G, pos, cmap=plt.get_cmap("jet"), node_color=values, node_size=500) + nx.draw_networkx_labels(G, pos) + nx.draw_networkx_edges(G, pos, edgelist=red_edges, edge_color="r", arrows=True) + nx.draw_networkx_edges(G, pos, edgelist=black_edges, arrows=False) + plt.show() + + +def ex_2(): + G = nx.DiGraph() + G.add_edges_from([("A", "B")]) + + val_map = {"A": 1.0, "D": 0.5714285714285714, "H": 0.0} + + values = [val_map.get(node, 0.25) for node in G.nodes()] + + # Specify the edges you want here + # red_edges = [("A", "C"), ("E", "C")] + # edge_colours = ["black" if not edge in red_edges else "red" for edge in G.edges()] + # black_edges = [edge for edge in G.edges() if edge not in red_edges] + + # Need to create a layout when doing + # separate calls to draw nodes and edges + pos = nx.spring_layout(G) + nx.draw_networkx_nodes(G, pos, cmap=plt.get_cmap("jet"), node_color=values, node_size=500) + nx.draw_networkx_labels(G, pos) + # nx.draw_networkx_edges(G, pos, edgelist=red_edges, edge_color="r", arrows=True) + # nx.draw_networkx_edges(G, pos, edgelist=black_edges, arrows=False) + plt.show() + + +ex_2() diff --git a/broker/_slurm/README.org b/broker/_slurm/README.org index 631e31f2..e7f80d48 100644 --- a/broker/_slurm/README.org +++ b/broker/_slurm/README.org @@ -5,6 +5,7 @@ - [[https://stackoverflow.com/a/40707189/2402577]] - [[https://www.hpc.caltech.edu/documentation/slurm-commands]] - [[https://rolk.github.io/2015/04/20/slurm-cluster]] + ** Package install #+begin_src bash @@ -90,7 +91,7 @@ sacctmgr create user $user_name defaultaccount=$user_name adminlevel=None --imme sacctmgr remove user where user=user_name #+end_src ------------------- +--------------------------------------------------------------------------------------- *** Check registered provider and users @@ -131,7 +132,6 @@ alper question Can Slurm emulate a larger cluster?), it appears so – damienfrancois #+end_src - ** DOCKER #+begin_src bash diff --git a/broker/_utils/_log.py b/broker/_utils/_log.py index 61bfcefc..22543bed 100644 --- a/broker/_utils/_log.py +++ b/broker/_utils/_log.py @@ -18,17 +18,17 @@ install() # for rich, show_locals=True # pretty.install() +IS_WRITE = True # if False disable write into file for the process DRIVER_LOG = None IS_THREADING_MODE_PRINT = False thread_log_files: Dict[str, str] = {} custom_theme = Theme( { - "info": "dim cyan", - "warning": "magenta", + "info": "bold magenta", + # "info": "bold dim magenta", "danger": "bold red", "b": "bold", "m": "magenta", - # "magenta": "#ff79c6", } ) console = Console(theme=custom_theme) @@ -63,29 +63,23 @@ def __init__(self): self.LOG_FILENAME: Union[str, pathlib.Path] = "" self.console: Dict[str, Console] = {} - def print_color(self, text: str, color=None, is_bold=True, end=None) -> None: + def print_color(self, text: str, color=None, is_bold=True, end="\n") -> None: """Print string in color format.""" if text[0:3] in ["==>", "#> ", "## "]: if color and text == "==> ": - print(f"[bold {color}]{text[0:3]}[/bold {color}]", end="", flush=True) + console.print(f"[bold][{color}]{text[0:3]}[{color}][/bold]", end="") else: - print(f"[bold blue]{text[0:3]}[/bold blue]", end="", flush=True) + console.print(f"[bold blue]{text[0:3]}[/bold blue]", end="") text = text[3:] elif text[0:2] == "E:": - print("[bold red]E:[/bold red]", end="", flush=True) + console.print("[bold red]E:[/bold red]", end="") text = text[2:] - if end is None: - if is_bold: - print(f"[bold {color}]{text}[/bold {color}]") - else: - print(f"[{color}]{text}[/{color}]") + if is_bold: + console.print(f"[bold][{color}]{text}[{color}][/bold]", end=end) else: - if is_bold: - print(f"[bold {color}]{text}[/bold {color}]", end="", flush=True) - else: - print(f"[{color}]{text}[/{color}]", end="") + console.print(f"[{color}]{text}[/{color}]", end=end) def pre_color_check(self, text, color, is_bold): """Check color for substring.""" @@ -164,14 +158,14 @@ def console_ruler(msg="", character="=", color="cyan", fn=""): ll.console[fn] = Console(file=open(fn, "a"), force_terminal=True, theme=custom_theme) if msg: - console.rule(f"[bold {color}]{msg}", characters=character) - ll.console[fn].rule(f"[bold {color}]{msg}", characters=character) + console.rule(f"[bold][{color}]{msg}", characters=character) + ll.console[fn].rule(f"[bold][{color}]{msg}", characters=character) else: console.rule(characters=character) ll.console[fn].rule(characters=character) -def _log(text, color, is_bold, flush, fn, end, is_write=True, is_output=True): +def _log(text, color, is_bold, fn, end="\n", is_write=True, is_output=True): if not is_output: is_print = is_output else: @@ -191,11 +185,8 @@ def _log(text, color, is_bold, flush, fn, end, is_write=True, is_output=True): if is_print: if not IS_THREADING_MODE_PRINT or threading.current_thread().name == "MainThread": if is_bullet: - print( - f"[bold {_color}]{is_r}{text[:_len]}[/bold {_color}][{color}]{text[_len:]}[/{color}]", - end=end, - flush=flush, - ) + _msg = f"[bold][{_color}]{is_r}{text[:_len]}[/{_color}][/bold][{color}]{text[_len:]}[/{color}]" + console.print(_msg, end=end) else: ll.print_color(str(text), color, is_bold=is_bold, end=end) @@ -205,22 +196,22 @@ def _log(text, color, is_bold, flush, fn, end, is_write=True, is_output=True): _text = text[_len:] _text = text[_len:] - if is_write: + if is_write and IS_WRITE: if is_bullet: ll.console[fn].print( - f"[bold {_color}]{is_r}{text[:_len]}[/bold {_color}][{color}]{_text}[/{color}]", + f"[bold][{_color}]{is_r}{text[:_len]}[/{_color}][/bold][{color}]{_text}[/{color}]", end=end, soft_wrap=True, ) else: if color: - ll.console[fn].print(f"[bold {color}]{_text}[/bold {color}]", end="", soft_wrap=True) + ll.console[fn].print(f"[bold][{color}]{_text}[/{color}][/bold]", end=end, soft_wrap=True) else: - ll.console[fn].print(_text, end="", soft_wrap=True) + ll.console[fn].print(_text, end=end, soft_wrap=True) else: text_to_write = "" if is_bullet: - text_to_write = f"[bold {_color}]{is_r}{_text[:_len]}[/bold {_color}][bold]{_text[_len:]}[/bold]" + text_to_write = f"[bold][{_color}]{is_r}{_text[:_len]}[/{_color}][/bold][bold]{_text[_len:]}[/bold]" else: if _color: text_to_write = f"[{_color}]{_text}[/{_color}]" @@ -228,28 +219,17 @@ def _log(text, color, is_bold, flush, fn, end, is_write=True, is_output=True): text_to_write = _text if is_print: - if end == "": - print(text_to_write, end="") - else: - print(text_to_write, flush=flush) + console.print(text_to_write, end=end) - if is_write: + if is_write and IS_WRITE: ll.console[fn].print(text_to_write, end=end, soft_wrap=True) - if end is None: - if is_write: - ll.console[fn].print("") - - if color and is_bullet: - print() - def log( text="", color=None, fn=None, - end=None, - flush=False, + end="\n", is_write=True, where_back=0, is_code=False, @@ -263,9 +243,10 @@ def log( * colors: __ https://rich.readthedocs.io/en/latest/appendix/colors.html#appendix-colors - :param text: string to print - :param color: color of the complete string - :param fn: filename to write + :param end: (str, optional) Character to write at end of output. Defaults to "\\n". + :param text: String to print + :param color: Color of the complete string + :param fn: Filename to write """ is_bold: bool = False if color in ["bold", "b"]: @@ -301,7 +282,7 @@ def log( if is_align: text = "\n".join(textwrap.wrap(text, 80, break_long_words=False, break_on_hyphens=False)) - if is_write: + if is_write and IS_WRITE: if threading.current_thread().name != "MainThread" and cfg.IS_THREADING_ENABLED: fn = thread_log_files[threading.current_thread().name] elif not fn: @@ -319,7 +300,7 @@ def log( if isinstance(text, list): pprint(text) - if is_write: + if is_write and IS_WRITE: ll.console[fn].print(text) elif isinstance(text, dict): if max_depth: @@ -327,10 +308,10 @@ def log( else: pprint(text) - if is_write: + if is_write and IS_WRITE: ll.console[fn].print(text) else: - _log(text, color, is_bold, flush, fn, end, is_write, is_output) + _log(text, color, is_bold, fn, end, is_write, is_output) def WHERE(back=0): diff --git a/broker/_utils/tools.py b/broker/_utils/tools.py index 823f86d8..baf49407 100755 --- a/broker/_utils/tools.py +++ b/broker/_utils/tools.py @@ -138,7 +138,7 @@ def print_tb(message=None, is_print_exc=True) -> None: sep_terminate = "raise Terminate" tb_text = "".join(traceback.format_exc()) if sep_terminate in tb_text: - tb_text = tb_text.split(sep_terminate, 1)[0] + "raise [magenta]Terminate[/magenta]()" + tb_text = tb_text.split(sep_terminate, 1)[0] + "raise [m]Terminate[/m]()" if is_print_exc and tb_text != "NoneType: None\n": log(tb_text.rstrip(), "bold", where_back=1) @@ -167,12 +167,12 @@ def _remove(path: str, is_verbose=False) -> None: shutil.rmtree(path) else: if is_verbose: - log(f"warning: {WHERE(1)} Nothing removed, following path does not exist:\n[magenta]{path}") + log(f"warning: {WHERE(1)} Nothing removed, following path does not exist:\n[m]{path}") return if is_verbose: - log(f"#> {WHERE(1)} following path:\n[magenta]{path}[/magenta] is removed") + log(f"#> {WHERE(1)} following path:\n[m]{path}[/m] is removed") except OSError as e: # Suppress the exception if it is a file not found error. # Otherwise, re-raise the exception. @@ -251,7 +251,7 @@ def _percent_change(initial: float, final=None, change=None, decimal: int = 2): return 0.0 -def percent_change(initial, change, _decimal=8, end=None, is_arrow_print=True): +def percent_change(initial, change, _decimal=8, end=None, is_arrow_print=True, color=None): """Calculate the changed percent.""" try: initial = float(initial) @@ -263,11 +263,14 @@ def percent_change(initial, change, _decimal=8, end=None, is_arrow_print=True): percent = _percent_change(initial=initial, change=change, decimal=_decimal) if percent == -0.0: change = 0.0 - color = "white" + if not color: + color = "white" elif percent > 0: - color = "green" + if not color: + color = "green" else: - color = "red" + if not color: + color = "red" if abs(float(change)) < 0.1: change = "{0:.8f}".format(float(change)) @@ -395,7 +398,7 @@ def is_process_on(process_name, name="", process_count=0, port=None, is_print=Tr name = name.replace("\\", "").replace(">", "").replace("<", "") if is_print: - print_tb(f"[bold green]{name}[/bold green] is not running on the background {WHERE(1)}") + print_tb(f"[bold green]{name}[/bold green] is not running on the background {WHERE(1)}") return False diff --git a/broker/bash_scripts/clean_for_new_test.sh b/broker/bash_scripts/clean_for_new_test.sh index a510462c..55cfbfe3 100755 --- a/broker/bash_scripts/clean_for_new_test.sh +++ b/broker/bash_scripts/clean_for_new_test.sh @@ -65,7 +65,7 @@ rm -f $BASE/package-lock.json rm -rf docs/_build_html/ rm -rf docs/_build/ -rm /tmp/run/driver_popen.pid +rm -f /tmp/run/driver_popen.pid >/dev/null 2>&1 rm -f ~/.ebloc-broker/.oc_client.pckl rm -f /var/ebloc-broker/cache/*.tar.gz # rm -f .oc.pckl @@ -79,7 +79,7 @@ echo "#> Running: ~/ebloc-broker/broker/python_scripts/clean_gdrive.py" ~/ebloc-broker/broker/python_scripts/clean_gdrive.py echo "[ OK ]" -for i in `gpg --list-keys --with-colons --fingerprint | sed -n 's/^fpr:::::::::\([[:alnum:]]\+\):/\1/p'`; do +for i in `gpg --list-keys --with-colons --fingerprint | sed -n 's/^fpr:::::::::\([[:alnum:]]\+\):/\1/p'`; do gpg --batch --delete-key "$i" 2>/dev/null done diff --git a/broker/bash_scripts/decrypt_gpg.sh b/broker/bash_scripts/decrypt_gpg.sh index 66b14126..5151c39c 100755 --- a/broker/bash_scripts/decrypt_gpg.sh +++ b/broker/bash_scripts/decrypt_gpg.sh @@ -1,6 +1,7 @@ #!/bin/bash -for dir in */*; do - gpg --verbose --batch --yes --output=$(echo $dir | rev | cut -c5- | rev) --pinentry-mode loopback --passphrase-file=/home/alper/.ebloc-broker/.gpg_pass.txt --decrypt "$dir" +for fn in */*; do + echo "$fn" + gpg --verbose --batch --yes --output=$(echo $fn | rev | cut -c5- | rev) --pinentry-mode loopback --passphrase-file=/home/alper/.ebloc-broker/.gpg_pass.txt --decrypt "$fn" done rm */*.diff.gz.gpg diff --git a/broker/bash_scripts/fetch_ipfs_hashes.sh b/broker/bash_scripts/fetch_ipfs_hashes.sh index 4545f3ee..ba5bcf0a 100755 --- a/broker/bash_scripts/fetch_ipfs_hashes.sh +++ b/broker/bash_scripts/fetch_ipfs_hashes.sh @@ -1,5 +1,6 @@ #!/bin/bash +# run at requester-node while read p; do ipfs get "$p" done /dev/null 2>&1 cd .. + +cd ipfs_gpg +for fn in */*; do + echo "$fn" + gpg --verbose --batch --yes --output=$(echo $fn | rev | cut -c5- | rev) --pinentry-mode loopback \ + --passphrase-file=/home/alper/.ebloc-broker/.gpg_pass.txt --decrypt "$fn" +done +rm */*.diff.gz.gpg +cd .. diff --git a/broker/bash_scripts/folder_setup.sh b/broker/bash_scripts/folder_setup.sh index d2cc019a..2bb5cef5 100755 --- a/broker/bash_scripts/folder_setup.sh +++ b/broker/bash_scripts/folder_setup.sh @@ -89,6 +89,8 @@ if [ ! -d $LOG_DIR ]; then mkdir -p $LOG_DIR fi +touch $LOG_DIR/.eudat_client.txt + mkdir -p $LOG_DIR/private mkdir -p $LOG_DIR/drivers_output mkdir -p $LOG_DIR/links diff --git a/broker/bash_scripts/killall.sh b/broker/bash_scripts/killall.sh index db1cbe1c..0d340cd4 100755 --- a/broker/bash_scripts/killall.sh +++ b/broker/bash_scripts/killall.sh @@ -19,4 +19,4 @@ killall python 2> /dev/null killall python3 2> /dev/null echo "## killall all jobs in squeue" squeue | tail -n+2 | awk '{print $1}' | xargs scancel 2> /dev/null -printf "killall for ebloc-broker test [ ${GREEN}OK${NC} ] \n" +printf "killall for ebloc-broker test [ ${GREEN}OK${NC} ]\n" diff --git a/broker/bash_scripts/slurm_mail_prog.sh b/broker/bash_scripts/slurm_mail_prog.sh index 0c0a6606..c527f93b 100755 --- a/broker/bash_scripts/slurm_mail_prog.sh +++ b/broker/bash_scripts/slurm_mail_prog.sh @@ -42,7 +42,7 @@ if [[ $event == *"COMPLETED"* ]] || [[ $event == *"FAILED"* ]]; then fi arg0=$(echo $name | cut -d "$SEP" -f 1) # job_key arg1=$(echo $name | cut -d "$SEP" -f 2) # index - arg2=$(echo $name | cut -d "$SEP" -f 3) # received_block_number + arg2=$(echo $name | cut -d "$SEP" -f 3) # received_bn msg="$state fn=$name\n" msg="${msg}./end_code.py $arg0 $arg1 $arg2 \"$name\" $slurm_job_id" echo $msg | mail -s "Message Subject" $EMAIL @@ -57,7 +57,7 @@ if [[ $event == *"TIMEOUT"* ]]; then name=$(echo "$c" | grep -o -P '(?<=Name=).*(?=.sh Failed)') arg0=$(echo $name | cut -d "$SEP" -f 1) # job_key arg1=$(echo $name | cut -d "$SEP" -f 2) # index - arg2=$(echo $name | cut -d "$SEP" -f 3) # received_block_number + arg2=$(echo $name | cut -d "$SEP" -f 3) # received_bn msg="TIMEOUT fn=$name\n" msg="${msg}./end_code.py $arg0 $arg1 $arg2 \"$name\" $slurm_job_id" echo $msg | mail -s "Message Subject" $EMAIL @@ -72,7 +72,7 @@ if [[ $event == *"CANCELLED"* ]]; then name=$(echo "$c" | grep -o -P '(?<=Name=).*(?=.sh Ended)') arg0=$(echo $name | cut -d "$SEP" -f 1) # job_key arg1=$(echo $name | cut -d "$SEP" -f 2) # index - arg2=$(echo $name | cut -d "$SEP" -f 3) # received_block_number + arg2=$(echo $name | cut -d "$SEP" -f 3) # received_bn msg="CANCELLED fn=$name\n" msg="${msg}./end_code.py $arg0 $arg1 $arg2 \"$name\" $slurm_job_id" echo $msg | mail -s "Message Subject" $EMAIL diff --git a/broker/bash_scripts/ubuntu_clean.sh b/broker/bash_scripts/ubuntu_clean.sh new file mode 100755 index 00000000..91b41f0f --- /dev/null +++ b/broker/bash_scripts/ubuntu_clean.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# Adapted from 71529-ubucleaner.sh - http://www.opendesktop.org/CONTENT/content-files/71529-ubucleaner.sh +YELLOW="\033[1;33m"; RED="\033[0;31m"; NC="\033[0m" +OLDCONF=$(dpkg -l | grep "^rc" | awk '{print $2}') +LINUXPKG="linux-(image|headers|ubuntu-modules|restricted-modules)" +METALINUXPKG="linux-(image|headers|restricted-modules)-(generic|i386|server|common|rt|xen)" +if [ "$USER" != root ]; then + echo -e $RED"Error: must be root! Exiting..."$NC + exit 0 +fi + +echo -e $YELLOW"Cleaning apt ..."$NC +sudo apt-get autoremove blueman bluez-utils bluez bluetooth -y >/dev/null 2>&1 +# sudo apt-get remove --purge libreoffice* -y >/dev/null 2>&1 +sudo apt-get purge aisleriot gnome-sudoku mahjongg ace-of-penguins gnomine gbrainy -y >/dev/null 2>&1 +sudo aptitude clean +sudo apt-get clean -y +sudo apt-get autoremove -y +sudo apt-get autoclean -y + +if [ "$OLDCONF" != "" ]; then + echo -e $YELLOW"Those packages were uninstalled without --purge:"$NC + echo $OLDCONF + #apt-get purge "$OLDCONF" # fixes the error in the original script + for PKGNAME in $OLDCONF ; do # a better way to handle errors + echo -e $YELLOW"Purge package $PKGNAME" + apt-cache show "$PKGNAME" | grep Description: -A3 + apt-get -y purge "$PKGNAME" + done +fi + +echo -e $YELLOW"Emptying every trashes..."$NC +rm -rf ~/*/.local/share/Trash/*/** &> /dev/null +rm -rf /root/.local/share/Trash/*/** &> /dev/null +echo -e $YELLOW"Script Finished!"$NC diff --git a/broker/cfg.py b/broker/cfg.py index 907d7637..93e31ff5 100644 --- a/broker/cfg.py +++ b/broker/cfg.py @@ -6,7 +6,7 @@ """ from rich.console import Console -__version__ = "2.0.0" +__version__ = "2.1.0" ZERO_ADDRESS = "0x0000000000000000000000000000000000000000" IS_BROWNIE_TEST = False IS_THREADING_ENABLED = True diff --git a/broker/cfg_temp.yaml b/broker/cfg_temp.yaml index 14d27df6..268deab8 100644 --- a/broker/cfg_temp.yaml +++ b/broker/cfg_temp.yaml @@ -10,7 +10,7 @@ cfg: gdrive: /usr/local/bin/gdrive rpc_port: 8545 provider: - slurmuser: alper + slurm_user: alper is_bloxberg: true is_thread: true is_ipfs_use: true diff --git a/broker/config.py b/broker/config.py index 944821fb..3f3a3d97 100755 --- a/broker/config.py +++ b/broker/config.py @@ -55,7 +55,7 @@ def __init__(self) -> None: if "provider" in self.cfg: self.IS_PROVIDER = True - self.SLURMUSER = self.cfg["provider"]["slurmuser"] + self.SLURMUSER = self.cfg["provider"]["slurm_user"] self.IS_IPFS_USE = self.cfg["provider"]["is_ipfs_use"] self.IS_EUDAT_USE = self.cfg["provider"]["is_eudat_use"] self.IS_GDRIVE_USE = self.cfg["provider"]["is_gdrive_use"] @@ -139,7 +139,7 @@ def setup_logger(log_path="", is_brownie=False): ipfs = cfg.ipfs RECONNECT_ATTEMPTS = 5 RECONNECT_SLEEP = 15 -ebb = None # eBlocBroker Contract on the blockchain +ebb = None # ebloc-broker contract on the blockchain contract = None chain = None w3_ebb = None diff --git a/broker/drivers/driver_gc.py b/broker/drivers/driver_gc.py index ff108602..08c3648f 100644 --- a/broker/drivers/driver_gc.py +++ b/broker/drivers/driver_gc.py @@ -19,12 +19,10 @@ def main(): for document in cursor: # print(document) # TODO: requester paramer as get_storage_duration - received_block_number, storage_duration = Ebb.get_job_storage_duration( - env.PROVIDER_ID, document["sourceCodeHash"] - ) - end_block_time = received_block_number + storage_duration * cfg.ONE_HOUR_BLOCK_DURATION + received_bn, storage_duration = Ebb.get_job_storage_duration(env.PROVIDER_ID, document["sourceCodeHash"]) + end_block_time = received_bn + storage_duration * cfg.ONE_HOUR_BLOCK_DURATION storageID = document["storageID"] - if end_block_time < block_number and received_block_number != 0: + if end_block_time < block_number and received_bn != 0: if storageID in (StorageID.IPFS, StorageID.IPFS_GPG): ipfsHash = document["jobKey"] print(run(["ipfs", "pin", "rm", ipfsHash])) @@ -39,7 +37,7 @@ def main(): print(cached_file_name) _remove(cached_file_name) - print(received_block_number) + print(received_bn) coll.delete_one({"jobKey": ipfsHash}) diff --git a/broker/drivers/gdrive.py b/broker/drivers/gdrive.py index cf9da71a..dce38f21 100755 --- a/broker/drivers/gdrive.py +++ b/broker/drivers/gdrive.py @@ -192,7 +192,7 @@ def get_data_init(self, key, _id, is_job_key=False): mime_type = gdrive.get_file_info(gdrive_output, _type="Mime") folder_name = gdrive.get_file_info(gdrive_output, _type="Name") - log(f"==> mime_type=[magenta]{mime_type}") + log(f"==> mime_type=[m]{mime_type}") if is_job_key: # key for the sourceCode tar.gz file is obtained try: @@ -241,7 +241,7 @@ def get_data(self, key, _id, is_job_key=False): # folder is already stored by its code_hash code_hash = name.replace(".tar.gz", "") log(f"==> name={name}") - log(f"==> mime_type=[magenta]{mime_type}") + log(f"==> mime_type=[m]{mime_type}") if _id == 0: # source code folder, ignore downloading result-* name = f"{name}.tar.gz" diff --git a/broker/drivers/storage_class.py b/broker/drivers/storage_class.py index 4998c369..162295f9 100755 --- a/broker/drivers/storage_class.py +++ b/broker/drivers/storage_class.py @@ -229,14 +229,13 @@ def is_run_exists_in_tar(self, tar_path) -> bool: ) if output.count("/") == 1: # main folder should contain the 'run.sh' file - log(f"[magenta]./run.sh[/magenta] exists under the parent folder{ok()}", "bold") + log(f"[m]./run.sh[/m] exists under the parent folder{ok()}", "bold") return True else: log("E: run.sh does not exist under the parent folder") return False except: - breakpoint() # DEBUG - log(f"E: run.sh does not exist under the tar={tar_path}") + log(f"E: `run.sh` file does not exist under the tar={tar_path}") return False def check_run_sh(self) -> bool: @@ -349,7 +348,7 @@ def _sbatch_call(self) -> bool: timestamp = p2.communicate()[0].decode("utf-8").strip() log(f"timestamp={timestamp}, ", "bold", end="") write_to_file(self.results_folder_prev / "timestamp.txt", timestamp) - log(f"job_received_block_number={job_block_number}", "bold") + log(f"job_received_bn={job_block_number}", "bold") log("## Adding recevied job into the mongoDB database") self.Ebb.mongo_broker.add_item( job_key, diff --git a/broker/eblocbroker_scripts/Contract.py b/broker/eblocbroker_scripts/Contract.py index 266bbb9f..56923a51 100644 --- a/broker/eblocbroker_scripts/Contract.py +++ b/broker/eblocbroker_scripts/Contract.py @@ -38,8 +38,7 @@ class Contract: get_job_code_hashes, get_job_info, get_job_info_print, - get_job_owner, - set_job_received_block_number, + set_job_received_bn, update_job_cores, ) from broker.eblocbroker_scripts.get_provider_info import get_provider_info @@ -319,6 +318,10 @@ def timeout_wrapper(self, method, *args): try: return self.timeout(method, *args) except ValueError as e: + if "Sequence has incorrect length" in str(e): + print_tb(e) + raise QuietExit from e + if "There is another transaction with same nonce in the queue" in str(e): log(f"warning: Tx: {e}") log("#> sleeping for 15 seconds, will try again") diff --git a/broker/eblocbroker_scripts/abi.json b/broker/eblocbroker_scripts/abi.json index 224f1bb5..3f943cf3 100644 --- a/broker/eblocbroker_scripts/abi.json +++ b/broker/eblocbroker_scripts/abi.json @@ -1 +1 @@ -[{"inputs": [], "stateMutability": "nonpayable", "type": "constructor", "name": "constructor"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "address", "name": "owner", "type": "address"}, {"indexed": false, "internalType": "bytes32", "name": "requestedHash", "type": "bytes32"}], "name": "LogDataStorageRequest", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "paidAddress", "type": "address"}, {"indexed": false, "internalType": "uint256", "name": "payment", "type": "uint256"}], "name": "LogDepositStorage", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": true, "internalType": "address", "name": "owner", "type": "address"}, {"indexed": false, "internalType": "string", "name": "jobKey", "type": "string"}, {"indexed": false, "internalType": "uint32", "name": "index", "type": "uint32"}, {"indexed": false, "internalType": "uint8[]", "name": "cloudStorageID", "type": "uint8[]"}, {"indexed": false, "internalType": "bytes32[]", "name": "sourceCodeHash", "type": "bytes32[]"}, {"indexed": false, "internalType": "uint8[]", "name": "cacheType", "type": "uint8[]"}, {"indexed": false, "internalType": "uint16[]", "name": "core", "type": "uint16[]"}, {"indexed": false, "internalType": "uint16[]", "name": "runTime", "type": "uint16[]"}, {"indexed": false, "internalType": "uint256", "name": "received", "type": "uint256"}, {"indexed": false, "internalType": "uint256", "name": "refunded", "type": "uint256"}], "name": "LogJob", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "address", "name": "requester", "type": "address"}, {"indexed": false, "internalType": "string", "name": "jobKey", "type": "string"}, {"indexed": false, "internalType": "string", "name": "jobDesc", "type": "string"}], "name": "LogJobDescription", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "string", "name": "jobKey", "type": "string"}, {"indexed": false, "internalType": "uint32", "name": "index", "type": "uint32"}, {"indexed": false, "internalType": "uint32", "name": "jobID", "type": "uint32"}, {"indexed": false, "internalType": "address", "name": "recipient", "type": "address"}, {"indexed": false, "internalType": "uint256", "name": "receivedGwei", "type": "uint256"}, {"indexed": false, "internalType": "uint256", "name": "refundedGwei", "type": "uint256"}, {"indexed": false, "internalType": "uint256", "name": "completionTime", "type": "uint256"}, {"indexed": false, "internalType": "bytes32", "name": "resultIpfsHash", "type": "bytes32"}, {"indexed": false, "internalType": "uint256", "name": "dataTransferIn", "type": "uint256"}, {"indexed": false, "internalType": "uint256", "name": "dataTransferOut", "type": "uint256"}], "name": "LogProcessPayment", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": true, "internalType": "bytes32", "name": "gpgFingerprint", "type": "bytes32"}, {"indexed": false, "internalType": "string", "name": "gmail", "type": "string"}, {"indexed": false, "internalType": "string", "name": "fID", "type": "string"}, {"indexed": false, "internalType": "string", "name": "ipfsID", "type": "string"}], "name": "LogProviderInfo", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "string", "name": "jobKey", "type": "string"}, {"indexed": false, "internalType": "uint32", "name": "index", "type": "uint32"}, {"indexed": false, "internalType": "uint32", "name": "jobID", "type": "uint32"}, {"indexed": false, "internalType": "uint256", "name": "refundedGwei", "type": "uint256"}], "name": "LogRefundRequest", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "bytes32", "name": "registeredDataHash", "type": "bytes32"}], "name": "LogRegisterData", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "bytes32", "name": "registeredDataHash", "type": "bytes32"}], "name": "LogRegisteredDataRequestToUse", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "requester", "type": "address"}, {"indexed": true, "internalType": "bytes32", "name": "gpgFingerprint", "type": "bytes32"}, {"indexed": false, "internalType": "string", "name": "gmail", "type": "string"}, {"indexed": false, "internalType": "string", "name": "fID", "type": "string"}, {"indexed": false, "internalType": "string", "name": "ipfsID", "type": "string"}], "name": "LogRequester", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "string", "name": "jobKey", "type": "string"}, {"indexed": false, "internalType": "uint32", "name": "index", "type": "uint32"}, {"indexed": false, "internalType": "uint32", "name": "jobID", "type": "uint32"}, {"indexed": false, "internalType": "uint8", "name": "stateCodes", "type": "uint8"}], "name": "LogSetJob", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "previousOwner", "type": "address"}, {"indexed": true, "internalType": "address", "name": "newOwner", "type": "address"}], "name": "OwnershipTransferred", "type": "event"}, {"inputs": [{"internalType": "address", "name": "user", "type": "address"}, {"internalType": "bytes32", "name": "orcid", "type": "bytes32"}], "name": "authenticateOrcID", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "owner", "type": "address"}], "name": "balanceOf", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "", "type": "address"}], "name": "balances", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "dataOwner", "type": "address"}, {"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}], "name": "depositStorage", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}], "name": "doesProviderExist", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "requester", "type": "address"}], "name": "doesRequesterExist", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "getContractBalance", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint32", "name": "index", "type": "uint32"}, {"internalType": "uint256", "name": "jobID", "type": "uint256"}], "name": "getJobInfo", "outputs": [{"components": [{"internalType": "enum Lib.JobStateCodes", "name": "stateCode", "type": "uint8"}, {"internalType": "uint32", "name": "startTime", "type": "uint32"}], "internalType": "struct Lib.Job", "name": "", "type": "tuple"}, {"internalType": "uint256", "name": "", "type": "uint256"}, {"internalType": "address", "name": "", "type": "address"}, {"internalType": "uint256", "name": "", "type": "uint256"}, {"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "string", "name": "key", "type": "string"}], "name": "getJobSize", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "user", "type": "address"}], "name": "getOrcID", "outputs": [{"internalType": "bytes32", "name": "", "type": "bytes32"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "getOwner", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "uint32", "name": "pricesSetBn", "type": "uint32"}], "name": "getProviderInfo", "outputs": [{"internalType": "uint32", "name": "", "type": "uint32"}, {"components": [{"internalType": "uint32", "name": "availableCore", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}, {"internalType": "uint32", "name": "priceCoreMin", "type": "uint32"}, {"internalType": "uint32", "name": "priceDataTransfer", "type": "uint32"}, {"internalType": "uint32", "name": "priceStorage", "type": "uint32"}, {"internalType": "uint32", "name": "priceCache", "type": "uint32"}], "internalType": "struct Lib.ProviderInfo", "name": "", "type": "tuple"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint256", "name": "index", "type": "uint256"}], "name": "getProviderPrices", "outputs": [{"components": [{"internalType": "uint32", "name": "availableCore", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}, {"internalType": "uint32", "name": "priceCoreMin", "type": "uint32"}, {"internalType": "uint32", "name": "priceDataTransfer", "type": "uint32"}, {"internalType": "uint32", "name": "priceStorage", "type": "uint32"}, {"internalType": "uint32", "name": "priceCache", "type": "uint32"}], "internalType": "struct Lib.ProviderInfo", "name": "", "type": "tuple"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "uint32", "name": "index", "type": "uint32"}], "name": "getProviderReceiptNode", "outputs": [{"internalType": "uint32", "name": "", "type": "uint32"}, {"internalType": "uint256", "name": "", "type": "uint256"}, {"internalType": "int32", "name": "", "type": "int32"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}], "name": "getProviderSetBlockNumbers", "outputs": [{"internalType": "uint32[]", "name": "", "type": "uint32[]"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "getProviders", "outputs": [{"internalType": "address[]", "name": "", "type": "address[]"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}], "name": "getRegisteredDataBlockNumbers", "outputs": [{"internalType": "uint32[]", "name": "", "type": "uint32[]"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}, {"internalType": "uint32", "name": "pricesSetBn", "type": "uint32"}], "name": "getRegisteredDataPrice", "outputs": [{"components": [{"internalType": "uint32", "name": "price", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}], "internalType": "struct Lib.DataInfo", "name": "", "type": "tuple"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "requester", "type": "address"}], "name": "getRequesterCommittmedBlock", "outputs": [{"internalType": "uint32", "name": "", "type": "uint32"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "address", "name": "requester", "type": "address"}, {"internalType": "bytes32", "name": "codeHash", "type": "bytes32"}], "name": "getStorageInfo", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}, {"components": [{"internalType": "uint32", "name": "receivedBlock", "type": "uint32"}, {"internalType": "uint32", "name": "storageDuration", "type": "uint32"}, {"internalType": "bool", "name": "isPrivate", "type": "bool"}, {"internalType": "bool", "name": "isVerifiedUsed", "type": "bool"}], "internalType": "struct Lib.JobStorage", "name": "", "type": "tuple"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}], "name": "getUpdatedProviderPricesBlocks", "outputs": [{"internalType": "uint32[]", "name": "", "type": "uint32[]"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "user", "type": "address"}], "name": "isOrcIDVerified", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "owner", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "string", "name": "key", "type": "string"}, {"components": [{"internalType": "uint32", "name": "index", "type": "uint32"}, {"internalType": "uint32", "name": "jobID", "type": "uint32"}, {"internalType": "uint32", "name": "completionTime", "type": "uint32"}, {"internalType": "uint32", "name": "dataTransferIn", "type": "uint32"}, {"internalType": "uint32", "name": "dataTransferOut", "type": "uint32"}, {"internalType": "uint256[]", "name": "core", "type": "uint256[]"}, {"internalType": "uint256[]", "name": "runTime", "type": "uint256[]"}, {"internalType": "bool", "name": "endJob", "type": "bool"}], "internalType": "struct Lib.JobIndexes", "name": "args", "type": "tuple"}, {"internalType": "uint32", "name": "elapsedTime", "type": "uint32"}, {"internalType": "bytes32", "name": "resultIpfsHash", "type": "bytes32"}], "name": "processPayment", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint32", "name": "index", "type": "uint32"}, {"internalType": "uint32", "name": "jobID", "type": "uint32"}, {"internalType": "uint256[]", "name": "core", "type": "uint256[]"}, {"internalType": "uint256[]", "name": "elapsedTime", "type": "uint256[]"}], "name": "refund", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "address payable", "name": "requester", "type": "address"}, {"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}], "name": "refundStorageDeposit", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}, {"internalType": "uint32", "name": "price", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}], "name": "registerData", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "gpgFingerprint", "type": "bytes32"}, {"internalType": "string", "name": "gmail", "type": "string"}, {"internalType": "string", "name": "fcID", "type": "string"}, {"internalType": "string", "name": "ipfsID", "type": "string"}, {"internalType": "uint32", "name": "availableCore", "type": "uint32"}, {"internalType": "uint32[]", "name": "prices", "type": "uint32[]"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}], "name": "registerProvider", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "gpgFingerprint", "type": "bytes32"}, {"internalType": "string", "name": "gmail", "type": "string"}, {"internalType": "string", "name": "fcID", "type": "string"}, {"internalType": "string", "name": "ipfsID", "type": "string"}], "name": "registerRequester", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}], "name": "removeRegisteredData", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "resumeProvider", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint32", "name": "index", "type": "uint32"}, {"internalType": "bytes32[]", "name": "sourceCodeHash", "type": "bytes32[]"}, {"internalType": "uint8[]", "name": "cacheType", "type": "uint8[]"}], "name": "setDataPublic", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32[]", "name": "sourceCodeHash", "type": "bytes32[]"}], "name": "setDataVerified", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "string", "name": "key", "type": "string"}, {"internalType": "string", "name": "desc", "type": "string"}], "name": "setJobDescription", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint32", "name": "index", "type": "uint32"}, {"internalType": "uint32", "name": "jobID", "type": "uint32"}, {"internalType": "uint32", "name": "startTime", "type": "uint32"}], "name": "setJobStateRunning", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint32[]", "name": "dataTransferIn", "type": "uint32[]"}, {"components": [{"internalType": "address payable", "name": "provider", "type": "address"}, {"internalType": "uint32", "name": "priceBlockIndex", "type": "uint32"}, {"internalType": "uint8[]", "name": "cloudStorageID", "type": "uint8[]"}, {"internalType": "uint8[]", "name": "cacheType", "type": "uint8[]"}, {"internalType": "uint32[]", "name": "dataPricesSetBlockNum", "type": "uint32[]"}, {"internalType": "uint16[]", "name": "core", "type": "uint16[]"}, {"internalType": "uint16[]", "name": "runTime", "type": "uint16[]"}, {"internalType": "uint32", "name": "dataTransferOut", "type": "uint32"}], "internalType": "struct Lib.JobArgument", "name": "args", "type": "tuple"}, {"internalType": "uint32[]", "name": "storageDuration", "type": "uint32[]"}, {"internalType": "bytes32[]", "name": "sourceCodeHash", "type": "bytes32[]"}], "name": "submitJob", "outputs": [], "stateMutability": "payable", "type": "function"}, {"inputs": [], "name": "suspendProvider", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "newOwner", "type": "address"}], "name": "transferOwnership", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}, {"internalType": "uint32", "name": "price", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}], "name": "updataDataPrice", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "gpgFingerprint", "type": "bytes32"}, {"internalType": "string", "name": "gmail", "type": "string"}, {"internalType": "string", "name": "fcID", "type": "string"}, {"internalType": "string", "name": "ipfsID", "type": "string"}], "name": "updateProviderInfo", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "uint32", "name": "availableCore", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}, {"internalType": "uint32[]", "name": "prices", "type": "uint32[]"}], "name": "updateProviderPrices", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "withdraw", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}] \ No newline at end of file +[{"inputs": [], "stateMutability": "nonpayable", "type": "constructor", "name": "constructor"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "address", "name": "owner", "type": "address"}, {"indexed": false, "internalType": "bytes32", "name": "requestedHash", "type": "bytes32"}], "name": "LogDataStorageRequest", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "paidAddress", "type": "address"}, {"indexed": false, "internalType": "uint256", "name": "payment", "type": "uint256"}], "name": "LogDepositStorage", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": true, "internalType": "address", "name": "owner", "type": "address"}, {"indexed": false, "internalType": "string", "name": "jobKey", "type": "string"}, {"indexed": false, "internalType": "uint32", "name": "index", "type": "uint32"}, {"indexed": false, "internalType": "uint8[]", "name": "cloudStorageID", "type": "uint8[]"}, {"indexed": false, "internalType": "bytes32[]", "name": "sourceCodeHash", "type": "bytes32[]"}, {"indexed": false, "internalType": "uint8[]", "name": "cacheType", "type": "uint8[]"}, {"indexed": false, "internalType": "uint16[]", "name": "core", "type": "uint16[]"}, {"indexed": false, "internalType": "uint16[]", "name": "runTime", "type": "uint16[]"}, {"indexed": false, "internalType": "uint256", "name": "received", "type": "uint256"}, {"indexed": false, "internalType": "uint256", "name": "refunded", "type": "uint256"}], "name": "LogJob", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "address", "name": "requester", "type": "address"}, {"indexed": false, "internalType": "string", "name": "jobKey", "type": "string"}, {"indexed": false, "internalType": "string", "name": "jobDesc", "type": "string"}], "name": "LogJobDescription", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "string", "name": "jobKey", "type": "string"}, {"indexed": false, "internalType": "uint32", "name": "index", "type": "uint32"}, {"indexed": false, "internalType": "uint32", "name": "jobID", "type": "uint32"}, {"indexed": false, "internalType": "uint32", "name": "elapsedTime", "type": "uint32"}, {"indexed": false, "internalType": "address", "name": "recipient", "type": "address"}, {"indexed": false, "internalType": "uint256", "name": "receivedGwei", "type": "uint256"}, {"indexed": false, "internalType": "uint256", "name": "refundedGwei", "type": "uint256"}, {"indexed": false, "internalType": "bytes32", "name": "resultIpfsHash", "type": "bytes32"}, {"indexed": false, "internalType": "uint256", "name": "dataTransferIn", "type": "uint256"}, {"indexed": false, "internalType": "uint256", "name": "dataTransferOut", "type": "uint256"}], "name": "LogProcessPayment", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": true, "internalType": "bytes32", "name": "gpgFingerprint", "type": "bytes32"}, {"indexed": false, "internalType": "string", "name": "gmail", "type": "string"}, {"indexed": false, "internalType": "string", "name": "fID", "type": "string"}, {"indexed": false, "internalType": "string", "name": "ipfsID", "type": "string"}], "name": "LogProviderInfo", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "string", "name": "jobKey", "type": "string"}, {"indexed": false, "internalType": "uint32", "name": "index", "type": "uint32"}, {"indexed": false, "internalType": "uint32", "name": "jobID", "type": "uint32"}, {"indexed": false, "internalType": "uint256", "name": "refundedGwei", "type": "uint256"}], "name": "LogRefundRequest", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "bytes32", "name": "registeredDataHash", "type": "bytes32"}], "name": "LogRegisterData", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "bytes32", "name": "registeredDataHash", "type": "bytes32"}], "name": "LogRegisteredDataRequestToUse", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "requester", "type": "address"}, {"indexed": true, "internalType": "bytes32", "name": "gpgFingerprint", "type": "bytes32"}, {"indexed": false, "internalType": "string", "name": "gmail", "type": "string"}, {"indexed": false, "internalType": "string", "name": "fID", "type": "string"}, {"indexed": false, "internalType": "string", "name": "ipfsID", "type": "string"}], "name": "LogRequester", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "provider", "type": "address"}, {"indexed": false, "internalType": "string", "name": "jobKey", "type": "string"}, {"indexed": false, "internalType": "uint32", "name": "index", "type": "uint32"}, {"indexed": false, "internalType": "uint32", "name": "jobID", "type": "uint32"}, {"indexed": false, "internalType": "uint8", "name": "stateCodes", "type": "uint8"}], "name": "LogSetJob", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "previousOwner", "type": "address"}, {"indexed": true, "internalType": "address", "name": "newOwner", "type": "address"}], "name": "OwnershipTransferred", "type": "event"}, {"inputs": [{"internalType": "address", "name": "user", "type": "address"}, {"internalType": "bytes32", "name": "orcid", "type": "bytes32"}], "name": "authenticateOrcID", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "owner", "type": "address"}], "name": "balanceOf", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "", "type": "address"}], "name": "balances", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "dataOwner", "type": "address"}, {"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}], "name": "depositStorage", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}], "name": "doesProviderExist", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "requester", "type": "address"}], "name": "doesRequesterExist", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "getContractBalance", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint32", "name": "index", "type": "uint32"}, {"internalType": "uint256", "name": "jobID", "type": "uint256"}], "name": "getJobInfo", "outputs": [{"components": [{"internalType": "enum Lib.JobStateCodes", "name": "stateCode", "type": "uint8"}, {"internalType": "uint32", "name": "startTimestamp", "type": "uint32"}], "internalType": "struct Lib.Job", "name": "", "type": "tuple"}, {"internalType": "uint256", "name": "", "type": "uint256"}, {"internalType": "address", "name": "", "type": "address"}, {"internalType": "uint256", "name": "", "type": "uint256"}, {"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "string", "name": "key", "type": "string"}], "name": "getJobSize", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "user", "type": "address"}], "name": "getOrcID", "outputs": [{"internalType": "bytes32", "name": "", "type": "bytes32"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "getOwner", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "uint32", "name": "pricesSetBn", "type": "uint32"}], "name": "getProviderInfo", "outputs": [{"internalType": "uint32", "name": "", "type": "uint32"}, {"components": [{"internalType": "uint32", "name": "availableCore", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}, {"internalType": "uint32", "name": "priceCoreMin", "type": "uint32"}, {"internalType": "uint32", "name": "priceDataTransfer", "type": "uint32"}, {"internalType": "uint32", "name": "priceStorage", "type": "uint32"}, {"internalType": "uint32", "name": "priceCache", "type": "uint32"}], "internalType": "struct Lib.ProviderInfo", "name": "", "type": "tuple"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint256", "name": "index", "type": "uint256"}], "name": "getProviderPrices", "outputs": [{"components": [{"internalType": "uint32", "name": "availableCore", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}, {"internalType": "uint32", "name": "priceCoreMin", "type": "uint32"}, {"internalType": "uint32", "name": "priceDataTransfer", "type": "uint32"}, {"internalType": "uint32", "name": "priceStorage", "type": "uint32"}, {"internalType": "uint32", "name": "priceCache", "type": "uint32"}], "internalType": "struct Lib.ProviderInfo", "name": "", "type": "tuple"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "uint32", "name": "index", "type": "uint32"}], "name": "getProviderReceiptNode", "outputs": [{"internalType": "uint32", "name": "", "type": "uint32"}, {"internalType": "uint256", "name": "", "type": "uint256"}, {"internalType": "int32", "name": "", "type": "int32"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}], "name": "getProviderSetBlockNumbers", "outputs": [{"internalType": "uint32[]", "name": "", "type": "uint32[]"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "getProviders", "outputs": [{"internalType": "address[]", "name": "", "type": "address[]"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}], "name": "getRegisteredDataBlockNumbers", "outputs": [{"internalType": "uint32[]", "name": "", "type": "uint32[]"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}, {"internalType": "uint32", "name": "pricesSetBn", "type": "uint32"}], "name": "getRegisteredDataPrice", "outputs": [{"components": [{"internalType": "uint32", "name": "price", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}], "internalType": "struct Lib.DataInfo", "name": "", "type": "tuple"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "requester", "type": "address"}], "name": "getRequesterCommittmedBlock", "outputs": [{"internalType": "uint32", "name": "", "type": "uint32"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "address", "name": "requester", "type": "address"}, {"internalType": "bytes32", "name": "codeHash", "type": "bytes32"}], "name": "getStorageInfo", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}, {"components": [{"internalType": "uint32", "name": "receivedBlock", "type": "uint32"}, {"internalType": "uint32", "name": "storageDuration", "type": "uint32"}, {"internalType": "bool", "name": "isPrivate", "type": "bool"}, {"internalType": "bool", "name": "isVerifiedUsed", "type": "bool"}], "internalType": "struct Lib.JobStorage", "name": "", "type": "tuple"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}], "name": "getUpdatedProviderPricesBlocks", "outputs": [{"internalType": "uint32[]", "name": "", "type": "uint32[]"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "user", "type": "address"}], "name": "isOrcIDVerified", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "owner", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "string", "name": "key", "type": "string"}, {"components": [{"internalType": "uint32", "name": "index", "type": "uint32"}, {"internalType": "uint32", "name": "jobID", "type": "uint32"}, {"internalType": "uint32", "name": "endTimestamp", "type": "uint32"}, {"internalType": "uint32", "name": "dataTransferIn", "type": "uint32"}, {"internalType": "uint32", "name": "dataTransferOut", "type": "uint32"}, {"internalType": "uint32", "name": "elapsedTime", "type": "uint32"}, {"internalType": "uint256[]", "name": "core", "type": "uint256[]"}, {"internalType": "uint256[]", "name": "runTime", "type": "uint256[]"}, {"internalType": "bool", "name": "endJob", "type": "bool"}], "internalType": "struct Lib.JobIndexes", "name": "args", "type": "tuple"}, {"internalType": "bytes32", "name": "resultIpfsHash", "type": "bytes32"}], "name": "processPayment", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint32", "name": "index", "type": "uint32"}, {"internalType": "uint32", "name": "jobID", "type": "uint32"}, {"internalType": "uint256[]", "name": "core", "type": "uint256[]"}, {"internalType": "uint256[]", "name": "elapsedTime", "type": "uint256[]"}], "name": "refund", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "address payable", "name": "requester", "type": "address"}, {"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}], "name": "refundStorageDeposit", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}, {"internalType": "uint32", "name": "price", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}], "name": "registerData", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "gpgFingerprint", "type": "bytes32"}, {"internalType": "string", "name": "gmail", "type": "string"}, {"internalType": "string", "name": "fcID", "type": "string"}, {"internalType": "string", "name": "ipfsID", "type": "string"}, {"internalType": "uint32", "name": "availableCore", "type": "uint32"}, {"internalType": "uint32[]", "name": "prices", "type": "uint32[]"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}], "name": "registerProvider", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "gpgFingerprint", "type": "bytes32"}, {"internalType": "string", "name": "gmail", "type": "string"}, {"internalType": "string", "name": "fcID", "type": "string"}, {"internalType": "string", "name": "ipfsID", "type": "string"}], "name": "registerRequester", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}], "name": "removeRegisteredData", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "resumeProvider", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint32", "name": "index", "type": "uint32"}, {"internalType": "bytes32[]", "name": "sourceCodeHash", "type": "bytes32[]"}, {"internalType": "uint8[]", "name": "cacheType", "type": "uint8[]"}], "name": "setDataPublic", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32[]", "name": "sourceCodeHash", "type": "bytes32[]"}], "name": "setDataVerified", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "provider", "type": "address"}, {"internalType": "string", "name": "key", "type": "string"}, {"internalType": "string", "name": "desc", "type": "string"}], "name": "setJobDescription", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint32", "name": "index", "type": "uint32"}, {"internalType": "uint32", "name": "jobID", "type": "uint32"}, {"internalType": "uint32", "name": "startTimestamp", "type": "uint32"}], "name": "setJobStateRunning", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "string", "name": "key", "type": "string"}, {"internalType": "uint32[]", "name": "dataTransferIn", "type": "uint32[]"}, {"components": [{"internalType": "address payable", "name": "provider", "type": "address"}, {"internalType": "uint32", "name": "priceBlockIndex", "type": "uint32"}, {"internalType": "uint8[]", "name": "cloudStorageID", "type": "uint8[]"}, {"internalType": "uint8[]", "name": "cacheType", "type": "uint8[]"}, {"internalType": "uint32[]", "name": "dataPricesSetBlockNum", "type": "uint32[]"}, {"internalType": "uint16[]", "name": "core", "type": "uint16[]"}, {"internalType": "uint16[]", "name": "runTime", "type": "uint16[]"}, {"internalType": "uint32", "name": "dataTransferOut", "type": "uint32"}], "internalType": "struct Lib.JobArgument", "name": "args", "type": "tuple"}, {"internalType": "uint32[]", "name": "storageDuration", "type": "uint32[]"}, {"internalType": "bytes32[]", "name": "sourceCodeHash", "type": "bytes32[]"}], "name": "submitJob", "outputs": [], "stateMutability": "payable", "type": "function"}, {"inputs": [], "name": "suspendProvider", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "newOwner", "type": "address"}], "name": "transferOwnership", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "sourceCodeHash", "type": "bytes32"}, {"internalType": "uint32", "name": "price", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}], "name": "updataDataPrice", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "gpgFingerprint", "type": "bytes32"}, {"internalType": "string", "name": "gmail", "type": "string"}, {"internalType": "string", "name": "fcID", "type": "string"}, {"internalType": "string", "name": "ipfsID", "type": "string"}], "name": "updateProviderInfo", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "uint32", "name": "availableCore", "type": "uint32"}, {"internalType": "uint32", "name": "commitmentBlockDur", "type": "uint32"}, {"internalType": "uint32[]", "name": "prices", "type": "uint32[]"}], "name": "updateProviderPrices", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "withdraw", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}] \ No newline at end of file diff --git a/broker/eblocbroker_scripts/contract.yaml b/broker/eblocbroker_scripts/contract.yaml index 252f897a..a04b61b1 100644 --- a/broker/eblocbroker_scripts/contract.yaml +++ b/broker/eblocbroker_scripts/contract.yaml @@ -1,8 +1,8 @@ networks: bloxberg: project_dir: ~/ebloc-broker/contract - address: '0xB5698bAFc55812d299c9Cb62a4549550b3176803' - tx_hash: '0xc5bc960f777a41dc76d4a67b8cd4b6eb9893d2a05fffba21ff7e33a21dd84400' + address: '0xa0Fac3232234478E6A0d4d5564ed239c956A21f0' + tx_hash: '0xbf23cc88cee87882452b6a0c484d1144c5ec1503fb85c2451a7e477d39f53593' # eblocpoa: # project_dir: ~/ebloc-broker/contract # address: '0x1B5DD5B6FD259C19767a47CA0cADFecb25786FCC' diff --git a/broker/eblocbroker_scripts/data.py b/broker/eblocbroker_scripts/data.py index 023a2e6b..c960a61f 100755 --- a/broker/eblocbroker_scripts/data.py +++ b/broker/eblocbroker_scripts/data.py @@ -63,7 +63,7 @@ def get_data_info(self, provider) -> None: raise e -if __name__ == "__main__": +def main(): if len(sys.argv) == 2: provider = str(sys.argv[1]) else: @@ -74,3 +74,7 @@ def get_data_info(self, provider) -> None: except Exception as e: print_tb(e) sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/broker/eblocbroker_scripts/get_data_price.py b/broker/eblocbroker_scripts/get_data_price.py index 02f3fe07..d524a3b5 100755 --- a/broker/eblocbroker_scripts/get_data_price.py +++ b/broker/eblocbroker_scripts/get_data_price.py @@ -6,21 +6,63 @@ Ebb = cfg.Ebb -def get_data_price(provider, source_code_hash): - code_hash_bytes = cfg.w3.toBytes(text=source_code_hash) - data_block_numbers = Ebb.get_registered_data_bn(provider, code_hash_bytes) - (price, commitment_block_duration) = Ebb.get_registered_data_price( - provider, code_hash_bytes, data_block_numbers[-1] +def get_data_price(provider, source_code_hash, is_verbose=True): + bn = Ebb.get_block_number() + note_msg = "" + if isinstance(source_code_hash, bytes): + code_hash_bytes = source_code_hash + else: + code_hash_bytes = cfg.w3.toBytes(text=source_code_hash) + + registered_data_bn_list = Ebb.get_registered_data_bn(provider, code_hash_bytes) + if bn > registered_data_bn_list[-1]: + data_price_set_bn = registered_data_bn_list[-1] + else: + data_price_set_bn = registered_data_bn_list[-2] + if is_verbose: + remaining_min = (registered_data_bn_list[-1] - bn) * 6 / 60 + note_msg = f"{remaining_min} minutes remaining for new price to take place" + + (price, commitment_block_dur) = Ebb.get_registered_data_price(provider, code_hash_bytes, data_price_set_bn) + if is_verbose: + log(f" * price={price}") + log(f" * commitment_block_dur={commitment_block_dur}") + + prices = [] + for _bn in registered_data_bn_list: + (price, commitment_block_dur) = Ebb.get_registered_data_price(provider, code_hash_bytes, _bn) + prices.append(price) + + log(f" * registered_data_bn_list={registered_data_bn_list}") + log(f" * prices={prices}") + if note_msg: + log(f"## {note_msg}") + + return price, commitment_block_dur + + +def get_latest_data_price(provider, source_code_hash, is_verbose=True): + if isinstance(source_code_hash, bytes): + code_hash_bytes = source_code_hash + else: + code_hash_bytes = cfg.w3.toBytes(text=source_code_hash) + + registered_data_bn_list = Ebb.get_registered_data_bn(provider, code_hash_bytes) + (price, commitment_block_dur) = Ebb.get_registered_data_price( + provider, code_hash_bytes, registered_data_bn_list[-1] ) - log(f"==> price={price}") - log(f"==> commitment_block_duration={commitment_block_duration}") - log(f"==> data_block_numbers={data_block_numbers}") + if is_verbose: + log(f" * price={price}") + log(f" * commitment_block_dur={commitment_block_dur}") + log(f" * registered_data_bn_list={registered_data_bn_list}") + + return price, commitment_block_dur def main(): - address = "0x29e613b04125c16db3f3613563bfdd0ba24cb629" - code_hash = "9d5d892a63b5758090258300a59eb389" - get_data_price(address, code_hash) + provider_address = "0x29e613b04125c16db3f3613563bfdd0ba24cb629" + code_hash = "050e6cc8dd7e889bf7874689f1e1ead6" + get_data_price(provider_address, code_hash) if __name__ == "__main__": diff --git a/broker/eblocbroker_scripts/get_job_info.py b/broker/eblocbroker_scripts/get_job_info.py index 4cdeaaa0..44018164 100755 --- a/broker/eblocbroker_scripts/get_job_info.py +++ b/broker/eblocbroker_scripts/get_job_info.py @@ -1,8 +1,6 @@ #!/usr/bin/env python3 import sys -import traceback -from math import ceil from broker import cfg from broker._utils._log import br, log @@ -14,7 +12,7 @@ def analyze_data(self, key, provider=None): """Obtain information related to source-code data.""" - current_block_number = cfg.Ebb.get_block_number() + current_bn = cfg.Ebb.get_block_number() self.received_block = [] self.storage_duration = [] self.job_info["is_cached"] = {} @@ -36,10 +34,10 @@ def analyze_data(self, key, provider=None): self.job_info["is_cached"][code_hash_str] = False # FIXME double check # if remaining time to cache is 0, then caching is requested for the # related code_hash - if ds.received_block + ds.storage_duration >= current_block_number: - if ds.received_block < current_block_number: + if ds.received_block + ds.storage_duration >= current_bn: + if ds.received_block < current_bn: self.job_info["is_cached"][code_hash_str] = True - elif ds.received_block == current_block_number: + elif ds.received_block == current_bn: if code_hash in self.job_info["is_cached"]: self.job_info["is_cached"][code_hash_str] = True else: @@ -49,57 +47,57 @@ def analyze_data(self, key, provider=None): log(f" * code_hash{br(idx)}=[green]{code_hash_str}") log(f"==> received_block={ds.received_block}") - log(f"==> storage_duration{br(self.job_info['received_block_number'])}={ds.storage_duration}") + log(f"==> storage_duration{br(self.job_info['received_bn'])}={ds.storage_duration}") log(f"==> cloud_storage_id{br(idx)}={StorageID(self.job_info['cloudStorageID'][idx]).name}") log(f"==> cached_type={CacheType(self.job_info['cacheType'][idx]).name}") log(f"==> is_cached={self.job_info['is_cached'][code_hash_str]}") -def set_job_received_block_number(self, received_block_number): - if not received_block_number: - received_block_number = self.deployed_block_number +def set_job_received_bn(self, received_bn): + if not received_bn: + received_bn = self.deployed_block_number self.to_block = "latest" else: - self.to_block = int(received_block_number) + self.to_block = int(received_bn) - if int(received_block_number) > int(self.job_info["received_block_number"]): - self.job_info["received_block_number"] = received_block_number + if int(received_bn) > int(self.job_info["received_bn"]): + self.job_info["received_bn"] = received_bn def update_job_cores(self, provider, job_key, index=0, received_bn=0) -> int: """Update job cores.""" - self.set_job_received_block_number(received_bn) + self.set_job_received_bn(received_bn) try: event_filter = self._eblocbroker.events.LogJob.createFilter( argument_filters={"provider": str(provider)}, - fromBlock=int(self.job_info["received_block_number"]), + fromBlock=int(self.job_info["received_bn"]), toBlock=self.to_block, ) for logged_job in event_filter.get_all_entries(): if logged_job.args["jobKey"] == job_key and logged_job.args["index"] == int(index): - self.job_info["received_block_number"] = received_bn = int(logged_job["blockNumber"]) + self.job_info["received_bn"] = received_bn = int(logged_job["blockNumber"]) self.job_info.update({"core": logged_job.args["core"]}) self.job_info.update({"run_time": logged_job.args["runTime"]}) self.job_info.update({"cloudStorageID": logged_job.args["cloudStorageID"]}) self.job_info.update({"cacheType": logged_job.args["cacheType"]}) break else: - log(f"E: failed to find job({job_key}) to update") + log(f"E: failed to find and update job({job_key})") return received_bn except Exception as e: - print_tb(f"E: Failed to update_job_cores.\n{e}") + print_tb(f"E: Failed to update job cores.\n{e}") raise e -def get_job_code_hashes(self, provider, job_key, index, received_block_number=0): - """code_hashes of the completed job is obtained from its event.""" - # job_info["received_block_number"] - self.set_job_received_block_number(received_block_number) +def get_job_code_hashes(self, provider, job_key, index, received_bn=0): + """Return code hashes of the completed job is obtained from its event.""" + # job_info["received_bn"] + self.set_job_received_bn(received_bn) try: event_filter = self._eblocbroker.events.LogJob.createFilter( argument_filters={"provider": str(provider)}, - fromBlock=int(self.job_info["received_block_number"]), + fromBlock=int(self.job_info["received_bn"]), toBlock=self.to_block, ) for logged_job in event_filter.get_all_entries(): @@ -113,23 +111,19 @@ def get_job_code_hashes(self, provider, job_key, index, received_block_number=0) raise e -def get_job_info_print(self, provider, job_key, index, received_block_number): +def get_job_info_print(self, provider, job_key, index, received_bn): Ebb = cfg.Ebb - elapsed_time = 0 result_ipfs_hash = "" if self.job_info["result_ipfs_hash"] != empty_bytes32 and self.job_info["result_ipfs_hash"] != "": result_ipfs_hash = bytes32_to_ipfs(self.job_info["result_ipfs_hash"]) - if self.job_info["end_timestamp"]: - elapsed_time = int(self.job_info["end_timestamp"]) - int(self.job_info["start_timestamp"]) - if isinstance(self.job_info, dict): log(f"==> state_code={state.inv_code[self.job_info['stateCode']]}({self.job_info['stateCode']})") log(self.job_info) if result_ipfs_hash: log(f"==> result_ipfs_hash={result_ipfs_hash}") - Ebb.get_job_code_hashes(provider, job_key, index, received_block_number) + Ebb.get_job_code_hashes(provider, job_key, index, received_bn) if self.job_info["code_hashes"]: log("code_hashes:", "bold blue") for idx, code_hash in enumerate(self.job_info["code_hashes"]): @@ -152,21 +146,12 @@ def get_job_info_print(self, provider, job_key, index, received_block_number): else: print(self.job_info) - assert elapsed_time >= 0, "elapsed_time is negative" - - -def get_job_owner(self, provider, job_key, index, job_id=0): - job, received, job_owner, data_transfer_in, data_transfer_out = self._get_job_info( - provider, job_key, int(index), int(job_id) - ) - return job_owner - -def get_job_info(self, provider, job_key, index, job_id, received_block_number=0, is_print=True, is_log_print=False): +def get_job_info(self, provider, job_key, index, job_id, received_bn=0, is_print=True, is_log_print=False): """Return information of the job.""" if is_print: fn = "~/ebloc-broker/broker/eblocbroker_scripts/get_job_info.py" - log(f"$ {fn} {provider} {job_key} {index} {job_id} {received_block_number}", "bold cyan", is_code=True) + log(f"$ {fn} {provider} {job_key} {index} {job_id} {received_bn}", "bold cyan", is_code=True) try: provider = cfg.w3.toChecksumAddress(provider) @@ -192,62 +177,58 @@ def get_job_info(self, provider, job_key, index, job_id, received_block_number=0 "price_data_transfer": job_prices[3], "price_storage": job_prices[4], "price_cache": job_prices[5], - "received_block_number": received_block_number, + "received_bn": received_bn, "core": None, "run_time": None, - "actual_run_time": None, + "actual_elapsed_time": None, "cloudStorageID": None, "result_ipfs_hash": "", - "end_timestamp": None, - "refundedGwei": None, - "receivedGwei": None, + "refunded_gwei": None, + "received_gwei": None, "code_hashes": None, "data_transfer_in_to_download": None, "data_transfer_out_used": None, "storage_duration": None, } - received_block_number = self.update_job_cores(provider, job_key, index, received_block_number) - if not received_block_number or received_block_number == self.deployed_block_number: + received_bn = self.update_job_cores(provider, job_key, index, received_bn) + if not received_bn or received_bn == self.deployed_block_number: # First reading from the mongoDB, this will increase the speed to fetch from the logged data - received_block_number_temp = self.mongo_broker.get_job_block_number( - self.job_info["job_owner"], job_key, index - ) - if received_block_number == 0 and received_block_number_temp == 0: - received_block_number = self.deployed_block_number - - if received_block_number > self.deployed_block_number: - self.job_info["received_block_number"] = received_block_number + received_bn_temp = self.mongo_broker.get_job_block_number(self.job_info["job_owner"], job_key, index) + if received_bn == 0 and received_bn_temp == 0: + received_bn = self.deployed_block_number + + if received_bn > self.deployed_block_number: + self.job_info["received_bn"] = received_bn # else: - # to_block = int(received_block_number) + # to_block = int(received_bn) event_filter = self._eblocbroker.events.LogProcessPayment.createFilter( argument_filters={"provider": str(provider)}, - fromBlock=int(received_block_number), + fromBlock=int(received_bn), toBlock="latest", ) for logged_receipt in event_filter.get_all_entries(): if logged_receipt.args["jobKey"] == job_key and logged_receipt.args["index"] == int(index): self.job_info.update({"result_ipfs_hash": logged_receipt.args["resultIpfsHash"]}) - # self.job_info.update({"end_timestamp": logged_receipt.args["endTimestamp"]}) - self.job_info.update({"receivedGwei": logged_receipt.args["receivedGwei"]}) - self.job_info.update({"refundedGwei": logged_receipt.args["refundedGwei"]}) + self.job_info.update({"received_gwei": logged_receipt.args["receivedGwei"]}) + self.job_info.update({"refunded_gwei": logged_receipt.args["refundedGwei"]}) self.job_info.update({"data_transfer_in_to_download": logged_receipt.args["dataTransferIn"]}) self.job_info.update({"data_transfer_out_used": logged_receipt.args["dataTransferOut"]}) self.job_info.update({"data_transfer_out_used": logged_receipt.args["dataTransferOut"]}) - # self.job_info["actual_run_time"] = ceil( - # self.job_info["end_timestamp"] - self.job_info["start_timestamp"] - # ) + self.job_info.update({"actual_elapsed_time": logged_receipt.args["elapsedTime"]}) + if self.job_info["result_ipfs_hash"] == empty_bytes32: + self.job_info.update({"result_ipfs_hash": b""}) + break except Exception as e: - log(f"E: Failed to get_job_info: {traceback.format_exc()}") raise e if str(self.job_info["core"]) == "0": raise Exception("Failed to get_job_info: Out of index") if is_log_print: - self.get_job_info_print(provider, job_key, index, received_block_number) + self.get_job_info_print(provider, job_key, index, received_bn) - if self.job_info["storage_duration"] is None: + if not self.job_info["storage_duration"]: self.job_info["storage_duration"] = [] for _ in range(len(self.job_info["cacheType"])): self.job_info["storage_duration"].append(0) @@ -256,7 +237,7 @@ def get_job_info(self, provider, job_key, index, job_id, received_block_number=0 def main(): - received_block_number = 0 + received_bn = 0 job_id = 0 if len(sys.argv) > 3: provider = str(sys.argv[1]) @@ -266,16 +247,12 @@ def main(): job_id = int(sys.argv[4]) if len(sys.argv) == 6: - received_block_number = int(sys.argv[5]) + received_bn = int(sys.argv[5]) else: - log("E: Provide as arguments") + log("E: Provide as arguments") sys.exit(1) - try: - Ebb = cfg.Ebb - Ebb.get_job_info(provider, job_key, index, job_id, received_block_number, is_log_print=True) - except Exception as e: - raise e + cfg.Ebb.get_job_info(provider, job_key, index, job_id, received_bn, is_log_print=True) if __name__ == "__main__": diff --git a/broker/eblocbroker_scripts/get_requester_info.py b/broker/eblocbroker_scripts/get_requester_info.py index 885abd90..56c40555 100755 --- a/broker/eblocbroker_scripts/get_requester_info.py +++ b/broker/eblocbroker_scripts/get_requester_info.py @@ -14,8 +14,8 @@ def get_requester_info(self, requester): if not self.does_requester_exist(requester): log( f"E: Requester({requester}) is not registered.\n" - "Please try again with registered Ethereum Address as requester. \n" - "You can register your requester using: [blue]./broker/eblocbroker_scripts/register_requester.py", + "Please try again with registered Ethereum Address as requester.\n" + "You can register requester using: [blue]./broker/eblocbroker_scripts/register_requester.py", ) raise QuietExit @@ -46,7 +46,6 @@ def get_requester_info(self, requester): Ebb = cfg.Ebb if len(sys.argv) == 1: requester = "0xD118b6EF83ccF11b34331F1E7285542dDf70Bc49" - # requester = "0x12ba09353d5C8aF8Cb362d6FF1D782C1E195b571" elif len(sys.argv) == 2: requester = str(sys.argv[1]) diff --git a/broker/eblocbroker_scripts/job.py b/broker/eblocbroker_scripts/job.py index 07c50769..7c0d9efc 100755 --- a/broker/eblocbroker_scripts/job.py +++ b/broker/eblocbroker_scripts/job.py @@ -276,7 +276,7 @@ def add_empty_data_item(self): self.storage_hours.append(0) self.storage_ids.append(StorageID.NONE) self.data_transfer_ins.append(0) - self.data_prices_set_block_numbers.append(0) # TODO: calculate from the contract + self.data_prices_set_block_numbers.append(0) def print_before_submit(self): for idx, code_hash in enumerate(self.code_hashes_str): @@ -305,18 +305,17 @@ def _search_best_provider(self, requester, is_verbose=False): selected_provider = provider selected_price = _price - is_all_same = all(x == price_list[0] for x in price_list) - return selected_provider, selected_price, is_all_same + is_all_equal = all(x == price_list[0] for x in price_list) + return selected_provider, selected_price, is_all_equal def search_best_provider(self, requester): - provider_to_share, best_price, is_all_same = self._search_best_provider(requester, is_verbose=True) + provider_to_share, best_price, is_all_equal = self._search_best_provider(requester, is_verbose=True) self.price, *_ = self.cost(provider_to_share, requester) if self.price != best_price: raise Exception(f"job_price={self.price} and best_price={best_price} does not match") - if is_all_same: # force to submit given provider address + if is_all_equal: # force to submit given provider address provider_to_share = self.Ebb.w3.toChecksumAddress(self.provider_addr) - # breakpoint() # DEBUG log(f"[green]##[/green] provider_to_share={provider_to_share} | price={best_price}", "bold") return self.Ebb.w3.toChecksumAddress(provider_to_share) @@ -424,25 +423,24 @@ def set_storage_cost(self, is_verbose=False): and ds.is_verified_used ): if is_verbose: - log(f"==> For {bytes32_to_ipfs(code_hash)} cost of storage is not paid") + log(f"==> for {bytes32_to_ipfs(code_hash)} cost of storage is not paid") else: if self.job.data_prices_set_block_numbers[idx] > 0 or self.job.storage_ids[idx] == StorageID.NONE: if self.job.data_prices_set_block_numbers[idx] == 0: registered_data_bn_list = self.Ebb.get_registered_data_bn(self.job.provider, code_hash) if bn > registered_data_bn_list[-1]: - data_price_set_block_number = registered_data_bn_list[-1] + data_price_set_bn = registered_data_bn_list[-1] else: - data_price_set_block_number = registered_data_bn_list[-2] + data_price_set_bn = registered_data_bn_list[-2] else: - data_price_set_block_number = self.job.data_prices_set_block_numbers[idx] + data_price_set_bn = self.job.data_prices_set_block_numbers[idx] # if true, registered data's price should be considered for storage - output = self.ebb.getRegisteredDataPrice( + (data_price, *_) = self.Ebb.get_registered_data_price( self.job.provider, code_hash, - data_price_set_block_number, + data_price_set_bn, ) - data_price = output[0] self.storage_cost += data_price self.registered_data_cost_list[_code_hash] = data_price self.registered_data_cost += data_price diff --git a/broker/eblocbroker_scripts/log_job.py b/broker/eblocbroker_scripts/log_job.py index 1f9e36d9..f9cdfd45 100755 --- a/broker/eblocbroker_scripts/log_job.py +++ b/broker/eblocbroker_scripts/log_job.py @@ -35,7 +35,7 @@ def handle_event(logged_jobs): log(f"received={job.args['received']}") for value in job.args["sourceCodeHash"]: sourceCodeHash = job.args["sourceCodeHash"][value] - log(f"source_code_hash{br(value)} => {bytes32_to_ipfs(sourceCodeHash)}") + log(f"code_hash{br(value)} => {bytes32_to_ipfs(sourceCodeHash)}") console_ruler() @@ -103,8 +103,8 @@ def main(): from_block = int(sys.argv[1]) provider = str(sys.argv[2]) # Only obtains jobs that are submitted to the provider. else: - from_block = 13172386 - provider = "0x57b60037b82154ec7149142c606ba024fbb0f991" + from_block = 15867616 + provider = "0x1926b36af775e1312fdebcc46303ecae50d945af" handle_event(logged_jobs=Ebb.run_log_job(from_block, provider)) diff --git a/broker/eblocbroker_scripts/process_payment.py b/broker/eblocbroker_scripts/process_payment.py index 9411e835..327b8b7a 100755 --- a/broker/eblocbroker_scripts/process_payment.py +++ b/broker/eblocbroker_scripts/process_payment.py @@ -23,12 +23,17 @@ def process_payment( data_transfer_out, core, run_time, - received_block_number=0, + received_bn=0, ): """Process payment of the received job.""" + if not result_ipfs_hash: + _result_ipfs_hash = '""' + else: + _result_ipfs_hash = result_ipfs_hash + log( f"~/ebloc-broker/broker/eblocbroker_scripts/process_payment.py {job_key} {index} {job_id} {elapsed_time}" - f" {result_ipfs_hash} '{cloud_storage_ids}' {ended_timestamp} {data_transfer_in} {data_transfer_out} '{core}'" + f" {_result_ipfs_hash} '{cloud_storage_ids}' {ended_timestamp} {data_transfer_in} {data_transfer_out} '{core}'" f" '{run_time}'", "bold blue", ) @@ -37,7 +42,7 @@ def process_payment( if len(result_ipfs_hash) != 46 and cloud_storage_id in (StorageID.IPFS, StorageID.IPFS_GPG): raise Exception("Result ipfs's length does not match with its original length, check your job_key") - self.get_job_info(env.PROVIDER_ID, job_key, index, job_id, received_block_number, is_print=False) + self.get_job_info(env.PROVIDER_ID, job_key, index, job_id, received_bn, is_print=False) if self.job_info["stateCode"] == state.code["COMPLETED"]: log(f"warning: job ({job_key},{index},{job_id}) is completed and already get paid") sys.exit(1) @@ -60,13 +65,12 @@ def process_payment( int(ended_timestamp), int(data_transfer_in), int(data_transfer_out), - # int(elapsed_time), # TODO + int(elapsed_time), core, run_time, final_job, ] - tx = self._process_payment(job_key, args, int(elapsed_time), result_ipfs_hash) - # tx = self._process_payment(job_key, args, result_ipfs_hash) # TODO + tx = self._process_payment(job_key, args, result_ipfs_hash) except Exception as e: print_tb(e) raise e @@ -74,8 +78,7 @@ def process_payment( return self.tx_id(tx) -if __name__ == "__main__": - Ebb = cfg.Ebb +def main(): if len(sys.argv) == 12: args = sys.argv[1:] my_args = [] # type: Union[Any] @@ -106,7 +109,7 @@ def process_payment( sys.exit(1) try: - tx_hash = Ebb.process_payment( + tx_hash = cfg.Ebb.process_payment( job_key, index, job_id, @@ -119,6 +122,10 @@ def process_payment( core, run_time, ) - log(f"tx_hash={tx_hash}") + log(f"tx_hash={tx_hash}", "bold") except: sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/broker/eblocbroker_scripts/register_data.py b/broker/eblocbroker_scripts/register_data.py index 4919ebf8..3b4340cc 100755 --- a/broker/eblocbroker_scripts/register_data.py +++ b/broker/eblocbroker_scripts/register_data.py @@ -1,11 +1,10 @@ #!/usr/bin/env python3 -from contextlib import suppress - from broker import cfg from broker._utils.tools import log from broker._utils.web3_tools import get_tx_status from broker.config import env +from broker.eblocbroker_scripts.get_data_price import get_latest_data_price from broker.errors import QuietExit from broker.utils import print_tb @@ -23,18 +22,26 @@ def _register_data(source_code_hash, data_price, commitment_dur): log(f"warning: provider [green]{env.PROVIDER_ID}[/green]'s orcid id is not authenticated yet") raise QuietExit - source_code_hash_bytes = cfg.w3.toBytes(text=source_code_hash) - with suppress(Exception): - (price, _commitment_dur) = cfg.Ebb.get_registered_data_price(env.PROVIDER_ID, source_code_hash_bytes, 0) + code_hash_bytes = cfg.w3.toBytes(text=source_code_hash) + try: + (price, _commitment_dur) = get_latest_data_price(env.PROVIDER_ID, code_hash_bytes, is_verbose=False) + bn = cfg.Ebb.get_registered_data_bn(env.PROVIDER_ID, code_hash_bytes) + if bn[0] == 0: + log(f"E: registered block number returns zero for {code_hash_bytes}") + is_exit = True + log( - f"## data([green]{source_code_hash}[/green]) is already registerered.\n" - "Use [blue]./update_data_price.py[/blue] to update its price" + f"## data([green]{source_code_hash}[/green]) is already registerered" + # "\nUse [blue]./update_data_price.py[/blue] to update its price" ) if data_price == price: is_exit = True else: log("## Update price") is_update = True + except Exception as e: + print_tb(e) + breakpoint() # DEBUG if is_exit: raise QuietExit @@ -45,9 +52,9 @@ def _register_data(source_code_hash, data_price, commitment_dur): try: if not is_update: - tx = Ebb.register_data(source_code_hash_bytes, data_price, commitment_dur) + tx = Ebb.register_data(code_hash_bytes, data_price, commitment_dur) else: - tx = Ebb.update_data_price(source_code_hash_bytes, data_price, commitment_dur) + tx = Ebb.update_data_price(code_hash_bytes, data_price, commitment_dur) get_tx_status(Ebb.tx_id(tx)) except QuietExit as e: diff --git a/broker/eblocbroker_scripts/register_provider.py b/broker/eblocbroker_scripts/register_provider.py index 19eab9fe..88259d3c 100755 --- a/broker/eblocbroker_scripts/register_provider.py +++ b/broker/eblocbroker_scripts/register_provider.py @@ -61,14 +61,14 @@ def get_ipfs_id() -> str: except ipfshttpclient.exceptions.ConnectionError: log( "E: Failed to establish a new connection to IPFS, please run it on the background.\n" - "Please run [magenta]~/ebloc-broker/broker/_daemons/ipfs.py" + "Please run [m]~/ebloc-broker/broker/_daemons/ipfs.py" ) sys.exit(1) except Exception as e: print_tb(e) log( "E: Failed to establish a new connection to IPFS, please run it on the background.\n" - "Please run [magenta]~/ebloc-broker/broker/_daemons/ipfs.py" + "Please run [m]~/ebloc-broker/broker/_daemons/ipfs.py" ) sys.exit(1) @@ -80,7 +80,7 @@ def get_ipfs_id() -> str: def error_msg(key, yaml_fn): - log(f"E: [blue]{key}[/blue] is empty in [magenta]{yaml_fn}") + log(f"E: [blue]{key}[/blue] is empty in [m]{yaml_fn}") def register_provider_wrapper(yaml_fn): diff --git a/broker/eblocbroker_scripts/update_data_price.py b/broker/eblocbroker_scripts/update_data_price.py index 66a4b93b..206a5f7a 100755 --- a/broker/eblocbroker_scripts/update_data_price.py +++ b/broker/eblocbroker_scripts/update_data_price.py @@ -14,22 +14,20 @@ def _update_data_price(): log(f"warning: Provider {env.PROVIDER_ID} is not registered.\n") raise QuietExit - source_code_hash = "b6aaf03752dc68d625fc57b451faa2bf" - new_data_price = 21 + code_hash = "050e6cc8dd7e889bf7874689f1e1ead6" + new_data_price = 20 commitment_block_duration = 600 - source_code_hash_bytes = cfg.w3.toBytes(text=source_code_hash) + code_hash_bytes = cfg.w3.toBytes(text=code_hash) try: - (price, _commitment_block_duration) = cfg.Ebb.get_registered_data_price( - env.PROVIDER_ID, source_code_hash_bytes, 0 - ) + (price, _commitment_block_duration) = cfg.Ebb.get_registered_data_price(env.PROVIDER_ID, code_hash_bytes, 0) if price == new_data_price and _commitment_block_duration == commitment_block_duration: - log(f"## data([green]{source_code_hash}[/green]) already registerered with the given values") + log(f"## data([green]{code_hash}[/green]) already registerered with the given values") raise QuietExit except: raise QuietExit try: - tx = Ebb.update_data_price(source_code_hash_bytes, new_data_price, commitment_block_duration) + tx = Ebb.update_data_price(code_hash_bytes, new_data_price, commitment_block_duration) get_tx_status(Ebb.tx_id(tx)) except QuietExit: pass diff --git a/broker/eblocbroker_scripts/update_provider_info.py b/broker/eblocbroker_scripts/update_provider_info.py index 643d3fe0..3f52468e 100755 --- a/broker/eblocbroker_scripts/update_provider_info.py +++ b/broker/eblocbroker_scripts/update_provider_info.py @@ -20,7 +20,7 @@ def is_provider_info_match(self, gmail, ipfs_id, gpg_fingerprint, f_id): and provider_info["ipfs_id"] == ipfs_id ): log(provider_info) - raise QuietExit("warning: Given information is same with the cluster's saved info. Nothing to do.") + raise QuietExit("warning: Given information is same as the cluster's saved info. Nothing to do.") tx = self._update_provider_info(f"0x{gpg_fingerprint}", gmail, f_id, ipfs_id) return self.tx_id(tx) @@ -29,9 +29,9 @@ def is_provider_info_match(self, gmail, ipfs_id, gpg_fingerprint, f_id): def update_provider_info(self, gpg_fingerprint, gmail, f_id, ipfs_id): - """Update provider info.""" + """Update provider information.""" if len(f_id) >= 128: - raise Exception("federation_cloud_id could be lesser than 128") + raise Exception("federation_cloud_id should be less than 128") if len(gmail) >= 128: raise Exception("e-mail should be less than 128") @@ -57,10 +57,10 @@ def update_provider_info(self, gpg_fingerprint, gmail, f_id, ipfs_id): gpg_fingerprint = cfg.ipfs.get_gpg_fingerprint(env.GMAIL) f_id = env.OC_USER - log(f"## gmail=[magenta]{env.GMAIL}") + log(f"## gmail=[m]{env.GMAIL}") log(f"## gpg_fingerprint={gpg_fingerprint}") - log(f"## ipfs_id=[magenta]{ipfs_id}") - log(f"## fid=[magenta]{f_id}") + log(f"## ipfs_id=[m]{ipfs_id}") + log(f"## fid=[m]{f_id}") try: cfg.ipfs.is_gpg_published(gpg_fingerprint) tx_hash = Ebb.update_provider_info(gpg_fingerprint, env.GMAIL, f_id, ipfs_id) diff --git a/broker/end_code.py b/broker/end_code.py index 07d9f221..7ebf43e1 100755 --- a/broker/end_code.py +++ b/broker/end_code.py @@ -136,10 +136,10 @@ def upload(self, key, is_job_key): cmd = [env.GDRIVE, "info", "--bytes", key, "-c", env.GDRIVE_METADATA] gdrive_info = subprocess_call(cmd, 5, sleep_time=30) except Exception as e: - raise Exception(f"{WHERE(1)} E: {key} does not have a match. meta_data={meta_data}. {e}") from e + raise Exception(f"{WHERE(1)} E: {key} does not have a match, meta_data={meta_data}. {e}") from e mime_type = gdrive.get_file_info(gdrive_info, "Mime") - log(f"mime_type=[magenta]{mime_type}", "bold") + log(f"mime_type=[m]{mime_type}", "bold") self.data_transfer_out += calculate_size(self.patch_file) log(f"data_transfer_out={self.data_transfer_out} MB =>" f" rounded={int(self.data_transfer_out)} MB", "bold") if "folder" in mime_type: @@ -218,17 +218,17 @@ def __init__(self, **kwargs) -> None: self.patch_dir_ipfs = Path(self.results_folder_prev) / "patch_ipfs" mkdirs([self.patch_dir, self.patch_dir_ipfs]) remove_empty_files_and_folders(self.results_folder) - log(f"==> whoami={getpass.getuser()} | id={os.getegid()}") - log(f"==> home={env.HOME}") - log(f"==> pwd={os.getcwd()}") - log(f"==> results_folder={self.results_folder}") - log(f"==> provider_id={env.PROVIDER_ID}") - log(f"==> job_key={self.job_key}") - log(f"==> index={self.index}") - log(f"==> storage_ids={self.storage_ids}") - log(f"==> folder_name=[white]{self.folder_name}") - log(f"==> requester_id_address={self.requester_id_address}") - log(f"==> received={self.job_info['received']}") + log(f" * whoami={getpass.getuser()} | id={os.getegid()}") + log(f" * home={env.HOME}") + log(f" * pwd={os.getcwd()}") + log(f" * results_folder={self.results_folder}") + log(f" * provider_id={env.PROVIDER_ID}") + log(f" * job_key={self.job_key}") + log(f" * index={self.index}") + log(f" * storage_ids={self.storage_ids}") + log(f" * folder_name=[white]{self.folder_name}") + log(f" * requester_id_address={self.requester_id_address}") + log(f" * received={self.job_info['received']}") self.job_state_running_pid = Ebb.mongo_broker.get_job_state_running_pid(self.job_key, self.index) with suppress(Exception): p = psutil.Process(int(self.job_state_running_pid)) @@ -237,7 +237,7 @@ def __init__(self, **kwargs) -> None: if not pid_exists(self.job_state_running_pid): break else: - log("#> job_state_running() is still running, sleeping for 15 seconds") + log("#> job_state_running() is still running; sleeping for 15 seconds") sleep(15) self.job_state_running_tx = Ebb.mongo_broker.get_job_state_running_tx(self.job_key, self.index) @@ -350,7 +350,7 @@ def remove_source_code(self) -> None: run(["find", self.results_folder, "-type", "f", "!", "-newer", timestamp_fn, "-delete"]) - def git_diff_patch_and_upload(self, source_fn: Path, name, storage_class, is_job_key): + def git_diff_patch_and_upload(self, source_fn: Path, name, storage_class, is_job_key) -> None: if is_job_key: log(f"==> base_patch={self.patch_dir}") log(f"==> source_code_patch={name}") @@ -534,7 +534,7 @@ def run(self): print_tb(e) raise e - log("## Receive state of the running job [ OK ]", "bold green") + log("## receive state of the running job [ OK ]", "bold green") try: self.job_info = eblocbroker_function_call( lambda: Ebb.get_job_code_hashes( @@ -559,7 +559,7 @@ def run(self): self.elapsed_time = run_time[self.job_id] log(f"finalized_elapsed_time={self.elapsed_time}", "bold") - log("## job_info=", "bold magenta", end="") + log("## job_info=", "info", end="") log(pprint.pformat(self.job_info), "bold") try: self.get_cloud_storage_class(0).initialize(self) diff --git a/broker/env.py b/broker/env.py index 14bf6f8d..d9ef4e12 100644 --- a/broker/env.py +++ b/broker/env.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import os +import sys from pathlib import Path from sys import platform @@ -28,15 +29,16 @@ def __init__(self) -> None: self._HOME = Path("/Users") / self.WHOAMI elif platform == "win32": print("E: does not work in windows") - exit(1) + sys.exit(1) self.EBLOCPATH = Path(self.cfg["ebloc_path"]) - self.CONTRACT_PROJECT_PATH = self._HOME / "ebloc-broker" / "contract" + self.CONTRACT_PROJECT_PATH = self.EBLOCPATH / "contract" self.IS_BLOXBERG = True if self.IS_BLOXBERG: self.IS_EBLOCPOA = False # eblocpoa is not in use self.IS_GETH_TUNNEL = False + self.EBB_SCRIPTS = self.EBLOCPATH / "broker" / "eblocbroker_scripts" self.CONTRACT_YAML_FILE = self.EBLOCPATH / "broker" / "eblocbroker_scripts" / "contract.yaml" try: _yaml = Yaml(self.CONTRACT_YAML_FILE) diff --git a/broker/gdrive/README.org b/broker/gdrive/README.org index e528627b..d4e33b36 100644 --- a/broker/gdrive/README.org +++ b/broker/gdrive/README.org @@ -1,7 +1,7 @@ * Installation -link: -https://stackoverflow.com/questions/65396850/how-to-handle-app-is-temporarily-blocked-from-logging-in-with-your-google-accou/65507155#65507155 +- [[https://console.cloud.google.com/apis/credentials]] +- https://stackoverflow.com/questions/65396850/how-to-handle-app-is-temporarily-blocked-from-logging-in-with-your-google-accou/65507155#65507155 ~@tellowkrinkle's [comment][1] help me to solve the issue~ diff --git a/broker/gdrive/submit.py b/broker/gdrive/submit.py index 21f4b49e..a41876a9 100755 --- a/broker/gdrive/submit.py +++ b/broker/gdrive/submit.py @@ -48,7 +48,7 @@ def _submit(job, provider, key, requester, required_confs): def _share_folders(folder_ids_to_share, provider_gmail): for folder_id in folder_ids_to_share: cmd = ["gdrive", "share", folder_id, "--role", "writer", "--type", "user", "--email", provider_gmail] - log(f"share_output=[magenta]{run(cmd)}", "bold") + log(f"share_output=[m]{run(cmd)}", "bold") def submit_gdrive(job: Job, is_pass=False, required_confs=1): @@ -78,7 +78,7 @@ def submit_gdrive(job: Job, is_pass=False, required_confs=1): try: job.Ebb.is_provider_valid(provider_addr_to_submit) provider_info = job.Ebb.get_provider_info(provider_addr_to_submit) - # log(f"## provider_addr_to_submit=[magenta]{provider_addr_to_submit}") + # log(f"## provider_addr_to_submit=[m]{provider_addr_to_submit}") log(f"==> provider's available_core_num={provider_info['available_core_num']}") log(f"==> provider's price_core_min={provider_info['price_core_min']}") provider_gmail = provider_info["gmail"] diff --git a/broker/imports.py b/broker/imports.py index 4202d0ba..a5491f1f 100755 --- a/broker/imports.py +++ b/broker/imports.py @@ -43,20 +43,20 @@ def _connect_into_web3() -> None: else: cfg.w3 = Web3(HTTPProvider(f"http://localhost:{env.RPC_PORT}")) else: - web3_ipc_path = env.DATADIR.joinpath("geth.ipc") - cfg.w3 = Web3(IPCProvider(web3_ipc_path)) + web3_ipc_fn = env.DATADIR.joinpath("geth.ipc") + cfg.w3 = Web3(IPCProvider(web3_ipc_fn)) # inject the poa compatibility middleware to the innermost layer cfg.w3.middleware_onion.inject(geth_poa_middleware, layer=0) def connect_into_web3() -> None: - """Connect into private network using web3. + """Connect into private ethereum network using web3. Note that you should create only one RPC Provider per process, as it recycles underlying TCP/IP network connections between your process and Ethereum node """ - web3_ipc_path = env.DATADIR.joinpath("geth.ipc") + web3_ipc_fn = env.DATADIR.joinpath("geth.ipc") for _ in range(5): _connect_into_web3() if not cfg.w3.isConnected(): @@ -80,17 +80,25 @@ def connect_into_web3() -> None: "to /private/geth.ipc file doing: ", end="", ) - log(f"sudo chown $(logname) {web3_ipc_path}", "green") - log(f"#> Running `sudo chown $(whoami) {web3_ipc_path}`") - run(["sudo", "chown", env.WHOAMI, web3_ipc_path]) + log(f"sudo chown $(logname) {web3_ipc_fn}", "green") + log(f"#> Running `sudo chown $(whoami) {web3_ipc_fn}`") + run(["sudo", "chown", env.WHOAMI, web3_ipc_fn]) else: break else: terminate(is_traceback=False) +def read_abi_file(): + try: + abi_file = env.EBLOCPATH / "broker" / "eblocbroker_scripts" / "abi.json" + return read_json(abi_file, is_dict=False) + except Exception as e: + raise Exception(f"unable to read the abi.json file: {abi_file}") from e + + def connect_into_eblocbroker() -> None: - """Connect into ebloc-broker contract in the given blockchain.""" + """Connect into ebloc-broker smart contract in the given private blockchain.""" if config.ebb: return @@ -102,40 +110,31 @@ def connect_into_eblocbroker() -> None: raise QuietExit try: - abi_file = env.EBLOCPATH / "broker" / "eblocbroker_scripts" / "abi.json" - abi = read_json(abi_file, is_dict=False) - except Exception as e: - raise Exception(f"could not read the abi.json file: {abi_file}") from e - - try: - if env.IS_BLOXBERG: - if not cfg.IS_BROWNIE_TEST: - from brownie import network, project + if env.IS_EBLOCPOA: + config.ebb = cfg.w3.eth.contract(env.CONTRACT_ADDRESS, abi=read_abi_file()) + config._eblocbroker = config.ebb + config.ebb.contract_address = cfg.w3.toChecksumAddress(env.CONTRACT_ADDRESS) + elif env.IS_BLOXBERG and not cfg.IS_BROWNIE_TEST: + from brownie import network, project + try: + network.connect("bloxberg") + except: + add_bloxberg_into_network_config.main() try: + log( + "warning: [green]bloxberg[/green] key is added into the " + "[m]~/.brownie/network-config.yaml[/m] file. Please try again." + ) network.connect("bloxberg") - except Exception as e: - print_tb(e) - add_bloxberg_into_network_config.main() - # network.connect("bloxberg") - try: - log( - "warning: [green]bloxberg[/green] key is added into the " - "[magenta]~/.brownie/network-config.yaml[/magenta] yaml file. Please try again." - ) - network.connect("bloxberg") - except KeyError: - sys.exit(1) - - project = project.load(env.CONTRACT_PROJECT_PATH) - config.ebb = project.eBlocBroker.at(env.CONTRACT_ADDRESS) - config.ebb.contract_address = cfg.w3.toChecksumAddress(env.CONTRACT_ADDRESS) - #: for the contract's events - config._eblocbroker = cfg.w3.eth.contract(env.CONTRACT_ADDRESS, abi=abi) - elif env.IS_EBLOCPOA: - config.ebb = cfg.w3.eth.contract(env.CONTRACT_ADDRESS, abi=abi) - config._eblocbroker = config.ebb + except KeyError: + sys.exit(1) + + project = project.load(env.CONTRACT_PROJECT_PATH) + config.ebb = project.eBlocBroker.at(env.CONTRACT_ADDRESS) config.ebb.contract_address = cfg.w3.toChecksumAddress(env.CONTRACT_ADDRESS) + #: required for to fetch the contract's events + config._eblocbroker = cfg.w3.eth.contract(env.CONTRACT_ADDRESS, abi=read_abi_file()) except Exception as e: print_tb(e) raise e diff --git a/broker/ipfs/submit.py b/broker/ipfs/submit.py index e6d36299..2a33417b 100755 --- a/broker/ipfs/submit.py +++ b/broker/ipfs/submit.py @@ -59,7 +59,7 @@ def pre_check(job: Job, requester): sys.exit() if not os.path.isfile(env.GPG_PASS_FILE): - log(f"E: Please store your gpg password in the [magenta]{env.GPG_PASS_FILE}[/magenta]\nfile for decrypting") + log(f"E: Please store your gpg password in the [m]{env.GPG_PASS_FILE}[/m]\nfile for decrypting") raise QuietExit start_ipfs_daemon() diff --git a/broker/lib.py b/broker/lib.py index ad34eb41..6f095feb 100755 --- a/broker/lib.py +++ b/broker/lib.py @@ -30,7 +30,7 @@ def enum(*sequential, **named): class State: - """State code of the Slurm jobs, add keys into the hashmap. + """Set state code of the Slurm jobs and add their keys into the hashmap. Hashmap keys: - SUBMITTED: Initial state. @@ -80,13 +80,13 @@ def session_start_msg(slurm_user, block_number, pid): else: PROVIDER_ID = env.PROVIDER_ID - log(f"==> driver process has the PID={pid}") - log(f"==> provider_address={PROVIDER_ID}") - log(f"==> slurm_user={slurm_user}") - log(f"==> left_of_block_number={block_number}") - log(f"==> latest__block_number={cfg.Ebb.get_block_number()}") + log(f" * driver_process_PID={pid}") + log(f" * provider_address={PROVIDER_ID}") + log(f" * slurm_user={slurm_user}") + log(f" * left_of_block_number={block_number}") + log(f" * latest__block_number={cfg.Ebb.get_block_number()}") if PROVIDER_ID == cfg.ZERO_ADDRESS: - raise QuietExit(f"provider_address={cfg.ZERO_ADDRESS}") + raise QuietExit(f"provider_address={cfg.ZERO_ADDRESS} is invalid") def run_driver_cancel(): diff --git a/broker/libs/_git.py b/broker/libs/_git.py index db30d9a1..425f86d1 100644 --- a/broker/libs/_git.py +++ b/broker/libs/_git.py @@ -127,7 +127,7 @@ def diff_patch(path: Path, source_code_hash, index, target_path, home_dir): patch_upload_fn = f"{patch_name}.gz" # file to be uploaded as zip patch_file = f"{target_path}/{patch_upload_fn}" - log(f"patch_path=[magenta]{patch_upload_fn}", "bold") + log(f"patch_path=[m]{patch_upload_fn}", "bold") try: run(["env", f"HOME={home_dir}", "git", "add", "-A"]) diff_and_gzip(patch_file, home_dir) @@ -242,7 +242,7 @@ def apply_patch(git_folder, patch_file, is_gpg=False): with cd(git_folder): base_name = path_leaf(patch_file) - log(f"==> [magenta]{base_name}") + log(f"==> [m]{base_name}") # folder_name = base_name_split[2] # # base_name_split = base_name.split("_") diff --git a/broker/libs/eudat.py b/broker/libs/eudat.py index e0f73b2b..c2dd6396 100755 --- a/broker/libs/eudat.py +++ b/broker/libs/eudat.py @@ -130,9 +130,7 @@ def login(user, password_path: Path, fn: str) -> None: f = open(fn, "rb") config.oc = pickle.load(f) try: - status_str = ( - f"[bold]Login into owncloud from the dumped_object=[magenta]{fn}[/magenta] [yellow]...[/yellow]" - ) + status_str = f"[bold]Login into owncloud from the dumped_object=[m]{fn}[/m] [yellow]...[/yellow]" with cfg.console.status(status_str): config.oc.get_config() @@ -308,7 +306,7 @@ def _submit(provider, requester, job, required_confs=1): # provider_addr_to_submit = provider provider_addr_to_submit = job.search_best_provider(requester) provider_info = job.Ebb.get_provider_info(provider_addr_to_submit) - log(f"==> provider_fid=[magenta]{provider_info['f_id']}") + log(f"==> provider_fid=[m]{provider_info['f_id']}") _share_folders(provider_info, requester_name, folders_hash) # print(job.code_hashes) try: diff --git a/broker/libs/gdrive.py b/broker/libs/gdrive.py index 088f98ca..7e50f140 100755 --- a/broker/libs/gdrive.py +++ b/broker/libs/gdrive.py @@ -298,7 +298,7 @@ def size(key, mime_type, folder_name, gdrive_info, results_folder_prev, code_has try: output = get_file_id(key) - log(f"==> data_id=[magenta]{key}") + log(f"==> data_id=[m]{key}") log(output, "bold green") data_files_id = fetch_grive_output(output, "meta_data.json") if not data_files_id: diff --git a/broker/libs/ipfs.py b/broker/libs/ipfs.py index 271e73aa..04e09e80 100755 --- a/broker/libs/ipfs.py +++ b/broker/libs/ipfs.py @@ -168,7 +168,7 @@ def gpg_encrypt(self, from_gpg_fingerprint, recipient_gpg_fingerprint, target): for attempt in range(5): try: cmd = ["gpg", "--keyserver", "hkps://keyserver.ubuntu.com", "--recv-key", recipient_gpg_fingerprint] - log(f"{br(attempt)} cmd: [magenta]{' '.join(cmd)}", "bold") + log(f"{br(attempt)} cmd: [m]{' '.join(cmd)}", "bold") run(cmd, suppress_stderr=True) # this may not work if it is requested too much in short time break except Exception as e: @@ -189,7 +189,7 @@ def gpg_encrypt(self, from_gpg_fingerprint, recipient_gpg_fingerprint, target): encrypt_target, ] run(cmd) - log(f"==> gpg_file=[magenta]{encrypted_file_target}") + log(f"==> gpg_file=[m]{encrypted_file_target}") return encrypted_file_target except Exception as e: print_tb(e) diff --git a/broker/libs/mongodb.py b/broker/libs/mongodb.py index b497e43e..a3b4751f 100755 --- a/broker/libs/mongodb.py +++ b/broker/libs/mongodb.py @@ -28,15 +28,18 @@ def add_item(self, key, item): res = self.collection.replace_one({"key": key}, item, True) return res.acknowledged - def find_all(self, sort_str=""): + def find_all(self, sort_str="", is_return=False): """Find all records.""" if not sort_str: cursor = self.collection.find({}, {"_id": False}) else: cursor = self.collection.find({}, {"_id": False}).sort(sort_str) - for document in cursor: - log(document) + if not is_return: + for document in cursor: + log(document) + else: + return cursor class MongoBroker(BaseMongoClass): @@ -54,7 +57,7 @@ def add_item(self, job_key, index, source_code_hash_list, requester_id, timestam "requester_addr": job_info["job_owner"], "requester_id": requester_id, "source_code_hash": source_code_hash_list, - "received_block_number": job_info["received_block_number"], + "received_bn": job_info["received_bn"], "timestamp": timestamp, "cloudStorageID": cloud_storage_id, "storage_duration": job_info["storage_duration"], @@ -121,7 +124,7 @@ def add_item_share_id(self, key, share_id, share_token): def get_job_block_number(self, requester_addr, key, index) -> int: cursor = self.collection.find({"requester_addr": requester_addr.lower(), "job_key": key, "index": index}) for document in cursor: - return document["received_block_number"] + return document["received_bn"] return 0 diff --git a/broker/libs/user_setup.py b/broker/libs/user_setup.py index 7e6dae9e..db70791f 100755 --- a/broker/libs/user_setup.py +++ b/broker/libs/user_setup.py @@ -62,7 +62,7 @@ def set_folder_permission(path, user_name, slurm_user): def user_add(user_address, basedir, slurm_user): user_address = user_address.lower() - log(f"#> adding user=[magenta]{user_address}[/magenta]", end="") + log(f"#> adding user=[m]{user_address}[/m]", end="") try: # convert ethereum user address into 32-bits user_name = hashlib.md5(user_address.encode("utf-8")).hexdigest() log(f" | user_name={user_name}", "bold") @@ -92,7 +92,7 @@ def user_add(user_address, basedir, slurm_user): add_user_to_slurm(user_name) # force to add user to slurm mkdir(f"{user_dir}/cache") else: - log(f"## [magenta]{user_address}[/magenta] => [blue]{user_name}[/blue] has already been created") + log(f"## [m]{user_address}[/m] => [blue]{user_name}[/blue] has already been created") def main(): diff --git a/broker/link.py b/broker/link.py index 53f90e99..f2528148 100755 --- a/broker/link.py +++ b/broker/link.py @@ -30,7 +30,7 @@ def umount(self, data_hashes): run(["sudo", "umount", "-f", dest], is_quiet=True) def link(self, path, dest, is_read_only=False): - """Create links between folders. + """Create link between folders. You can create a read-only bind-mount(https://lwn.net/Articles/281157/). mount --bind /path/to/source/ /path/to/dest/ @@ -89,7 +89,7 @@ def link_folders(self, paths=None): log() folder_new_hash = generate_md5sum(dest) - assert folder_hash == folder_new_hash, "hash of original and linked folder does not match" + assert folder_hash == folder_new_hash, "hash of the original and the linked folder does not match" def check_link_folders(folders_to_share, registered_data_files, source_code_path, is_pass=False): @@ -114,7 +114,7 @@ def check_link_folders(folders_to_share, registered_data_files, source_code_path print("") if not is_pass: question_yes_no( - "#> Would you like to continue with linked folder path in your `[magenta]run.sh[/magenta]` file?\n" + "#> Would you like to continue with linked folder path in your `[m]run.sh[/m]` file?\n" "If no, please feel free to update your run.sh file and continue", is_exit=True, ) @@ -123,11 +123,11 @@ def check_link_folders(folders_to_share, registered_data_files, source_code_path def test_with_small_dataset(value): fn = os.path.expanduser("~/test_eblocbroker/run_cppr/run.sh") with open(fn, "r+") as file: - filedata = file.read() + file_data = file.read() - changed_filedata = filedata.replace("DATA_HASH='change_folder_hash'", f"DATA_HASH='{value}'") + changed_file_data = file_data.replace("DATA_HASH='change_folder_hash'", f"DATA_HASH='{value}'") with open(fn, "w+") as file: - file.write(changed_filedata) + file.write(changed_file_data) def check_linked_data(folders_target, folder_link, source_code_path="", is_pass=False): @@ -147,7 +147,7 @@ def check_linked_data(folders_target, folder_link, source_code_path="", is_pass= if not is_pass: print("") question_yes_no( - "#> Would you like to continue with linked folder path in your `[magenta]run.sh[/magenta]` file?\n" + "#> Would you like to continue with the linked folder path in your `[m]run.sh[/m]` file?\n" "If no, feel free to update your run.sh file and continue", is_exit=True, ) diff --git a/broker/python_scripts/apply_patch.py b/broker/python_scripts/apply_patch.py index 7f06b668..2cb44e6f 100755 --- a/broker/python_scripts/apply_patch.py +++ b/broker/python_scripts/apply_patch.py @@ -26,7 +26,7 @@ def appy_patch(base_dir, patch_fn): if patch_file.endswith(".diff.gz"): extract_gzip(patch_file) else: - log(f"==> [magenta]{diff_file_name}[/magenta] exists") + log(f"==> [m]{diff_file_name}[/m] exists") try: git.apply_patch(base_dir, patch_file.replace(".gz", ""), is_gpg=False) diff --git a/broker/python_scripts/get_transaction_log.py b/broker/python_scripts/get_transaction_log.py old mode 100644 new mode 100755 index 937815de..8e5fb975 --- a/broker/python_scripts/get_transaction_log.py +++ b/broker/python_scripts/get_transaction_log.py @@ -7,7 +7,8 @@ from broker import cfg from broker.utils import log -if __name__ == "__main__": + +def main(): if len(sys.argv) == 2: tx_hash = str(sys.argv[1]) event = "LogJob" @@ -25,3 +26,7 @@ processed_logs = cfg.Ebb.eBlocBroker.events.LogReceipt().processReceipt(tx_receipt, errors=DISCARD) log(vars(processed_logs[0].args)) log("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-") + + +if __name__ == "__main__": + main() diff --git a/broker/python_scripts/init.py b/broker/python_scripts/init.py index 878e4a28..17ce5281 100755 --- a/broker/python_scripts/init.py +++ b/broker/python_scripts/init.py @@ -11,6 +11,11 @@ from broker._utils.yaml import Yaml +def is_docker() -> bool: + path = "/proc/self/cgroup" + return os.path.exists("/.dockerenv") or os.path.isfile(path) and any("docker" in line for line in open(path)) + + def get_git_root(path): git_repo = git.Repo(path, search_parent_directories=True) git_root = git_repo.git.rev_parse("--show-toplevel") @@ -31,6 +36,8 @@ def main(): yaml_file["datadir"] = str(home_dir / ".eblocpoa") yaml_file["log_path"] = str(home_dir / ".ebloc-broker") yaml_file["ebloc_path"] = git_root + if is_docker(): + yaml_file["provider"]["slurm_user"] = "slurm" if __name__ == "__main__": diff --git a/broker/start_code.py b/broker/start_code.py index 5f2c23ea..3c47601a 100755 --- a/broker/start_code.py +++ b/broker/start_code.py @@ -29,7 +29,7 @@ def start_call(job_key, index, slurm_job_id) -> None: Ebb.mongo_broker.set_job_state_pid(str(job_key), int(index), pid) _log.ll.LOG_FILENAME = env.LOG_PATH / "transactions" / env.PROVIDER_ID.lower() / f"{job_key}_{index}.txt" # _log.ll.IS_PRINT = False - log(f"~/ebloc-broker/broker/start_code.py {job_key} {index} {slurm_job_id}", "bold magenta") + log(f"~/ebloc-broker/broker/start_code.py {job_key} {index} {slurm_job_id}", "info") job_id = 0 # TODO: should be obtained from the user's input _, _, error = popen_communicate(["scontrol", "show", "job", slurm_job_id]) if "slurm_load_jobs error: Invalid job id specified" in str(error): @@ -46,11 +46,8 @@ def start_call(job_key, index, slurm_job_id) -> None: ) p2.stdout.close() # type: ignore date = p3.communicate()[0].decode("utf-8").strip() - start_timestamp = check_output(["date", "-d", date, "+'%s'"]).strip().decode("utf-8").strip("'") - log( - f"{env.EBLOCPATH}/broker/eblocbroker_scripts/set_job_state_running.py {job_key} {index} {job_id} {start_timestamp}", - "bold white", - ) + start_ts = check_output(["date", "-d", date, "+'%s'"]).strip().decode("utf-8").strip("'") + log(f"{env.EBB_SCRIPTS}/set_job_state_running.py {job_key} {index} {job_id} {start_ts}", "bold white") log(f"#> pid={pid}") for attempt in range(10): if attempt > 0: @@ -64,11 +61,11 @@ def start_call(job_key, index, slurm_job_id) -> None: sys.exit(1) try: - tx = Ebb.set_job_state_running(job_key, index, job_id, start_timestamp) + tx = Ebb.set_job_state_running(job_key, index, job_id, start_ts) tx_hash = Ebb.tx_id(tx) log(f"tx_hash={tx_hash}", "bold") d = datetime.now().strftime("%Y-%m-%d %H:%M:%S") - log(f"==> set_job_state_running_started {start_timestamp} | attempt_date={d}") + log(f"==> set_job_state_running_started {start_ts} | attempt_date={d}") log("## mongo.set_job_state_running_tx", end="") if Ebb.mongo_broker.set_job_state_running_tx(str(job_key), int(index), str(tx_hash)): log(ok()) @@ -86,7 +83,7 @@ def start_call(job_key, index, slurm_job_id) -> None: log(f"warning: {e}") sys.exit(1) - log("E: All start_code() function call attempts failed, ABORT") + log("E: All of the start_code() function call attempts failed, ABORT") sys.exit(1) diff --git a/broker/test_setup/README.org b/broker/test_setup/README.org index 79d8d1c0..d8bd6192 100644 --- a/broker/test_setup/README.org +++ b/broker/test_setup/README.org @@ -2,8 +2,55 @@ ** gdrive - - *For providers:* Download from ~My Driver~ as folders. -:IMPORTANT: DO NOT download from ~Storage~ section, it download ~meta_data.json~ separately. +:IMPORTANT: DO NOT download from ~Storage~ section, it download ~meta_data.json~ separately + +- *For requesters:* Download from ~Storage~ zipped patches + +---------------------------------------------------------------------------- + +* NAS Parallel Benchmarks + +site: [[https://www.nas.nasa.gov/software/npb.html]] + +#+BEGIN_SRC bash +wget https://www.nas.nasa.gov/assets/npb/NPB3.3.1.tar.gz +tar -xvf NPB3.3.1.tar.gz +cd NPB3.3.1/NPB3.3-SER +cp config/make.def.template config/make.def + +make bt CLASS=B +/usr/bin/time -v bin/bt.B.x inputbt.data + +make sp CLASS=B +/usr/bin/time -v bin/sp.B.x inputsp.data + +make ua CLASS=B +/usr/bin/time -v bin/ua.B.x inputua.data + +make lu CLASS=B +/usr/bin/time -v bin/lu.B.x inputlu.data +#+END_SRC + +~(/usr/bin/time -v bin/lu.B.x inputlu.data) >> output_.txt 2>&1~ + +~CLASSES: [A and B], [C]~ + +#+BEGIN_SRC markdown +To make a NAS benchmark type + + make CLASS= + +where is "bt", "cg", "ep", "ft", "is", "lu", + "lu-hp", "mg", "sp", or "ua" + is "S", "W", "A", "B", "C" or "D" +#+END_SRC + +** pre-requirements -- *For requesters:* Download from ~Storage~ zipped patches. +#+begin_src bash +sudo apt-get install -y openmpi-bin +sudo apt-get install -y libopenmpi-dev +sudo apt-get install -y fort77 +sudo apt-get install -y gfortran +#+end_src diff --git a/broker/test_setup/authenticate_requesters.py b/broker/test_setup/authenticate_requesters.py index 1ac9f451..2a763a83 100755 --- a/broker/test_setup/authenticate_requesters.py +++ b/broker/test_setup/authenticate_requesters.py @@ -12,11 +12,11 @@ def main(): Ebb = cfg.Ebb - owner_address = Ebb.get_owner() + owner = Ebb.get_owner() for idx, requester in enumerate(requesters): try: - log(br(idx)) - tx_hash = Ebb.authenticate_orc_id(requester, "0000-0001-7642-0552", owner_address) + log(f"{br(idx)} ", end="") + tx_hash = Ebb.authenticate_orc_id(requester, "0000-0001-7642-0552", owner) if tx_hash: get_tx_status(tx_hash) time.sleep(1) diff --git a/broker/test_setup/check_list.org b/broker/test_setup/check_list.org index 366935c5..ca855695 100644 --- a/broker/test_setup/check_list.org +++ b/broker/test_setup/check_list.org @@ -2,20 +2,20 @@ * TASKS -- [X] Download test_eblocbroker -- [X] contract transactions +- [X] download ~test_eblocbroker~ +- [X] contract's transactions from bloxberg | tasks | req | goo0 | goo1 | goo2 | goo3 | |------------------+-----+------+------+------+------| -| tx_from_bloxberg | [X] | [X] | [X] | [X] | [X] | -| ebb.tar.gz | | | | | | -| mongo_fetch | | | | | | +| tx_from_bloxberg | [-] | [] | [] | [] | [] | +| ebb.tar.gz | [X] | [] | [] | [] | [] | +| mongo_fetch | [] | [] | [] | [] | [] | |------------------+-----+------+------+------+------| | patches | req | goo0 | goo1 | goo2 | goo3 | |----------+-----+------+------+------+------| | gdrive | [X] | [X] | [X] | [X] | [X] | -| eudat | [] | [] | [] | [] | [] | +| eudat | [X] | [-] | [-] | [-] | [-] | | ipfs | [] | [] | [] | [] | [] | | ipfs_gpg | [] | [] | [] | [] | [] | |----------+-----+------+------+------+------| diff --git a/broker/test_setup/datasets.org b/broker/test_setup/datasets.org index e0bf2e99..117f2144 100644 --- a/broker/test_setup/datasets.org +++ b/broker/test_setup/datasets.org @@ -2,7 +2,6 @@ #+begin_src bash b6aaf03752dc68d625fc57b451faa2bf BL06-gargoyle-sml.tbz2 -f1de03edab51f281815c3c1e5ecb88c6 babyface.n6c100.tbz2 082d2a71d86a64250f06be14c55ca27e babyface.n6c10.tbz2 03919732a417cb1d14049844b9de0f47 BL06-camel-med.tbz2 983b9fe8a85b543dd5a4a75d031f1091 BL06-camel-sml.tbz2 @@ -10,7 +9,6 @@ f71df9d36cd519d80a3302114779741d BL06-gargoyle-med.tbz2 c0fee5472f3c956ba759fd54f1fe843e BVZ-sawtooth.tbz2 63ffd1da6122e3fe9f63b1e7fcac1ff5 BVZ-tsukuba.tbz2 9e8918ff9903e3314451bf2943296d31 BVZ-venus.tbz2 -bea2685fa4977823bdfd6d92e4dcc2fd doo.sh eaf488aea87a13a0bea5b83a41f3d49a KZ2-sawtooth.tbz2 e62593609805db0cd3a028194afb43b1 KZ2-tsukuba.tbz2 3b0f75445e662dc87e28d60a5b13cd43 KZ2-venus.tbz2 @@ -18,8 +16,27 @@ ebe53bd498a9f6446cd77d9252a9847c LB07-bunny-med.tbz2 f82aa511f8631bfc9a82fe6fa30f4b52 LB07-bunny-sml.tbz2 761691119cedfb9836a78a08742b14cc liver.n6c100.tbz2 f93b9a9f63447e0e086322b8416d4a39 liver.n6c10.tbz2 +bfc83d9f6d5c3d68ca09499190851e86 bone.n26c10.tbz2 #+end_src +#+begin_src bash +wget -bqc https://vision.cs.uwaterloo.ca/files/BL06-gargoyle-sml.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/babyface.n6c10.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BL06-camel-sml.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/LB07-bunny-sml.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BVZ-sawtooth.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BVZ-tsukuba.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BVZ-venus.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/KZ2-tsukuba.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/LB07-bunny-sml.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/KZ2-sawtooth.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/KZ2-venus.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/liver.n6c10.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/liver.n6c100.tbz2 +#+end_src + +----------------------------------------------------------------------- + * Medium size data-sets #+begin_src bash @@ -31,7 +48,42 @@ fe801973c5b22ef6861f2ea79dc1eb9c babyface.n26c10.tbz2 4613abc322e8f2fdeae9a5dd10f17540 BL06-gargoyle-lrg.tbz2 dd0fbccccf7a198681ab838c67b68fbf bone.n6c100.tbz2 45281dfec4618e5d20570812dea38760 bone.n6c10.tbz2 -fa64e96bcee96dbc480a1495bddbf53c LB07-bunny-lrg.tbz2 8f6faf6cfd245cae1b5feb11ae9eb3cf liver.n26c100.tbz2 1bfca57fe54bc46ba948023f754521d6 liver.n26c10.tbz2 #+end_src + +* Download all Medium data files using ~wget~ + +#+begin_src bash +wget -bqc https://vision.cs.uwaterloo.ca/files/adhead.n6c100.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/adhead.n6c10.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/babyface.n26c10.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/babyface.n26c100.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BL06-camel-lrg.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BL06-gargoyle-lrg.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/bone.n6c100.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/bone.n6c10.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/liver.n26c100.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/liver.n26c10.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/babyface.n6c100.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/babyface.n6c10.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BL06-camel-sml.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BL06-camel-med.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BL06-gargoyle-med.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BL06-gargoyle-sml.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BVZ-tsukuba.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BVZ-sawtooth.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/BVZ-venus.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/KZ2-tsukuba.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/KZ2-sawtooth.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/KZ2-venus.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/LB07-bunny-sml.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/LB07-bunny-med.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/liver.n6c10.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/liver.n6c100.tbz2 +wget -bqc https://vision.cs.uwaterloo.ca/files/bone.n26c10.tbz2 +#+end_src + +#+begin_src bash +for i in `command ls *.tbz2` ; do tarx "$i" ; done +#+end_src diff --git a/broker/test_setup/nas/job_cppr.yaml b/broker/test_setup/job_cppr.yaml similarity index 51% rename from broker/test_setup/nas/job_cppr.yaml rename to broker/test_setup/job_cppr.yaml index 161b1dfd..1ff51e03 100644 --- a/broker/test_setup/nas/job_cppr.yaml +++ b/broker/test_setup/job_cppr.yaml @@ -1,23 +1,24 @@ config: - requester_address: '0x64b570f0e7c019dc750c4a75c33dca55bdc51845' - provider_address: '0x29e613b04125c16db3f3613563bfdd0ba24cb629' + requester_address: '0x8b7356a95c8ba1846eb963fd127741730f666ba8' + provider_address: '0x51e2b36469cdbf58863db70cc38652da84d20c67' source_code: - storage_id: ipfs_gpg + storage_id: eudat cache_type: public path: ~/test_eblocbroker/run_cppr storage_hours: 0 data: data1: - hash: 050e6cc8dd7e889bf7874689f1e1ead6 + hash: f71df9d36cd519d80a3302114779741d data2: - hash: 779745f315060d1bc0cd44b7266fb4da + hash: 1bfca57fe54bc46ba948023f754521d6 data3: cache_type: public - path: /home/alper/test_eblocbroker/dataset_zip/small/BVZ-tsukuba + path: /home/alper/test_eblocbroker/dataset_zip/small/BL06-gargoyle-sml storage_hours: 1 - storage_id: ipfs_gpg + storage_id: eudat data_transfer_out: 5 jobs: job1: cores: 1 run_time: 60 + provider_addr: '0x4934a70ba8c1c3acfa72e809118bdd9048563a24' diff --git a/broker/test_setup/nas/job_nas.yaml b/broker/test_setup/job_nas.yaml similarity index 56% rename from broker/test_setup/nas/job_nas.yaml rename to broker/test_setup/job_nas.yaml index 7a322634..267ed983 100644 --- a/broker/test_setup/nas/job_nas.yaml +++ b/broker/test_setup/job_nas.yaml @@ -1,6 +1,6 @@ config: - requester_address: '0xee516025bf14f7118aa9ced5eb2adacfd5827d14' - provider_address: '0x1926b36af775e1312fdebcc46303ecae50d945af' + requester_address: '0xe2969f599cb904e9a808ec7218bc14fcfa346965' + provider_address: '0x4934a70ba8c1c3acfa72e809118bdd9048563a24' source_code: storage_id: ipfs_gpg cache_type: public @@ -11,3 +11,4 @@ config: job1: cores: 1 run_time: 60 + provider_addr: '0x1926b36af775e1312fdebcc46303ecae50d945af' diff --git a/broker/test_setup/nas/README.org b/broker/test_setup/nas/README.org deleted file mode 100644 index dff51397..00000000 --- a/broker/test_setup/nas/README.org +++ /dev/null @@ -1,45 +0,0 @@ -* NAS Parallel Benchmarks - -site: [[https://www.nas.nasa.gov/software/npb.html]] - -#+BEGIN_SRC bash -wget https://www.nas.nasa.gov/assets/npb/NPB3.3.1.tar.gz -tar -xvf NPB3.3.1.tar.gz -cd NPB3.3.1/NPB3.3-SER -cp config/make.def.template config/make.def - -make bt CLASS=B -/usr/bin/time -v bin/bt.B.x inputbt.data - -make sp CLASS=B -/usr/bin/time -v bin/sp.B.x inputsp.data - -make ua CLASS=B -/usr/bin/time -v bin/ua.B.x inputua.data - -make lu CLASS=B -/usr/bin/time -v bin/lu.B.x inputlu.data -#+END_SRC - -~(/usr/bin/time -v bin/lu.B.x inputlu.data) >> output_.txt 2>&1~ - -~CLASSES: [A and B], [C]~ - -#+BEGIN_SRC markdown -To make a NAS benchmark type - - make CLASS= - -where is "bt", "cg", "ep", "ft", "is", "lu", - "lu-hp", "mg", "sp", or "ua" - is "S", "W", "A", "B", "C" or "D" -#+END_SRC - -** pre-requirements - -#+begin_src bash -sudo apt-get install -y openmpi-bin -sudo apt-get install -y libopenmpi-dev -sudo apt-get install -y fort77 -sudo apt-get install -y gfortran -#+end_src diff --git a/broker/test_setup/prepare_data.sh b/broker/test_setup/prepare_data.sh index 84c50eb8..285afb05 100755 --- a/broker/test_setup/prepare_data.sh +++ b/broker/test_setup/prepare_data.sh @@ -9,7 +9,6 @@ rename_all() { mv -v BL06-gargoyle-lrg 4613abc322e8f2fdeae9a5dd10f17540 mv -v bone.n6c100 dd0fbccccf7a198681ab838c67b68fbf mv -v bone.n6c10 45281dfec4618e5d20570812dea38760 - mv -v LB07-bunny-lrg fa64e96bcee96dbc480a1495bddbf53c mv -v liver.n26c100 8f6faf6cfd245cae1b5feb11ae9eb3cf mv -v liver.n26c10 1bfca57fe54bc46ba948023f754521d6 mv -v babyface.n6c100 f1de03edab51f281815c3c1e5ecb88c6 @@ -28,6 +27,7 @@ rename_all() { mv -v LB07-bunny-sml f82aa511f8631bfc9a82fe6fa30f4b52 mv -v liver.n6c100 761691119cedfb9836a78a08742b14cc mv -v liver.n6c10 f93b9a9f63447e0e086322b8416d4a39 + mv -v bone.n26c10 bfc83d9f6d5c3d68ca09499190851e86 } extract () { diff --git a/broker/test_setup/register_data_files.py b/broker/test_setup/register_data_files.py index 3bc097ef..74d8b97e 100755 --- a/broker/test_setup/register_data_files.py +++ b/broker/test_setup/register_data_files.py @@ -3,7 +3,9 @@ import time from contextlib import suppress -from broker._utils._log import log, ok +from broker._utils._log import log +from broker.config import env +from broker.eblocbroker_scripts.get_data_price import get_data_price from broker.eblocbroker_scripts.register_data import _register_data hashes_small = [ @@ -24,40 +26,48 @@ "761691119cedfb9836a78a08742b14cc", ] -hashes_medium = [ - "050e6cc8dd7e889bf7874689f1e1ead6", # A - "9d5d892a63b5758090258300a59eb389", # A - "779745f315060d1bc0cd44b7266fb4da", # A +hashes_medium_1 = [ + "fe801973c5b22ef6861f2ea79dc1eb9c", # A + "0d6c3288ef71d89fb93734972d4eb903", # A + "4613abc322e8f2fdeae9a5dd10f17540", # A ] hashes_medium_2 = [ - "fe801973c5b22ef6861f2ea79dc1eb9c", # B - "0d6c3288ef71d89fb93734972d4eb903", # B - "4613abc322e8f2fdeae9a5dd10f17540", # B + "050e6cc8dd7e889bf7874689f1e1ead6", # B + "9d5d892a63b5758090258300a59eb389", # B + "779745f315060d1bc0cd44b7266fb4da", # B "dd0fbccccf7a198681ab838c67b68fbf", # C "45281dfec4618e5d20570812dea38760", # C - "fa64e96bcee96dbc480a1495bddbf53c", # C + "bfc83d9f6d5c3d68ca09499190851e86", # C "8f6faf6cfd245cae1b5feb11ae9eb3cf", # D "1bfca57fe54bc46ba948023f754521d6", # D "f71df9d36cd519d80a3302114779741d", # D ] -def register_data_files(data_price, accounts): - commitment_dur = 600 - for code_hash in accounts: +def print_prices(hashes): + for code_hash in hashes: + (price, _commitment_dur) = get_data_price(env.PROVIDER_ID, code_hash, is_verbose=False) + log(f"{code_hash}={price}") + + +def register_data_files(data_price, hashes): + log(f"#> registering data {len(hashes)} files") + for code_hash in hashes: with suppress(Exception): - _register_data(code_hash, data_price, commitment_dur) + _register_data(code_hash, data_price, commitment_dur=600) time.sleep(1) - log() - log(f"#> registering data {len(hashes_small)} files{ok()}") - def main(): - # register_data_files(data_price=1, accounts=hashes_small) - register_data_files(data_price=20, accounts=hashes_medium) - register_data_files(data_price=30, accounts=hashes_medium_2) + # register_data_files(data_price=1, hashes=hashes_small) + register_data_files(data_price=20, hashes=hashes_medium_1) + log() + register_data_files(data_price=30, hashes=hashes_medium_2) + log() + print_prices(hashes_medium_1) + log() + print_prices(hashes_medium_2) if __name__ == "__main__": diff --git a/broker/test_setup/nas/submit_jobs.py b/broker/test_setup/submit_jobs.py similarity index 89% rename from broker/test_setup/nas/submit_jobs.py rename to broker/test_setup/submit_jobs.py index bf6b853b..08d69e5d 100755 --- a/broker/test_setup/nas/submit_jobs.py +++ b/broker/test_setup/submit_jobs.py @@ -22,7 +22,6 @@ from broker.test_setup.user_set import providers, requesters from broker.utils import print_tb -# yaml_files = ["job_nas.yaml"] Ebb = cfg.Ebb cfg.IS_FULL_TEST = True is_mini_test = True @@ -35,8 +34,8 @@ storage_ids = ["eudat", "gdrive", "ipfs"] ipfs_ids = ["ipfs", "ipfs_gpg"] -# for provider_address in providers: -# mini_tests_submit(storage_ids, provider_address) +# for provider_addr in providers: +# mini_tests_submit(storage_ids, provider_addr) # if is_mini_test: # benchmarks = ["cppr"] @@ -44,7 +43,7 @@ # ipfs_ids = ["ipfs"] # providers = ["0x29e613b04125c16db3f3613563bfdd0ba24cb629"] -test_dir = Path.home() / "ebloc-broker" / "broker" / "test_setup" / "nas" +test_dir = Path.home() / "ebloc-broker" / "broker" / "test_setup" nas_yaml_fn = test_dir / "job_nas.yaml" cppr_yam_fn = test_dir / "job_cppr.yaml" @@ -85,19 +84,19 @@ def create_cppr_job_script(idx): # ] registered_data_hashes_medium = {} registered_data_hashes_medium[0] = [ - "050e6cc8dd7e889bf7874689f1e1ead6", # A - "9d5d892a63b5758090258300a59eb389", # A - "779745f315060d1bc0cd44b7266fb4da", # A + "fe801973c5b22ef6861f2ea79dc1eb9c", # A + "0d6c3288ef71d89fb93734972d4eb903", # A + "4613abc322e8f2fdeae9a5dd10f17540", # A ] registered_data_hashes_medium[1] = [ - "fe801973c5b22ef6861f2ea79dc1eb9c", # B - "0d6c3288ef71d89fb93734972d4eb903", # B - "4613abc322e8f2fdeae9a5dd10f17540", # B + "050e6cc8dd7e889bf7874689f1e1ead6", # B + "9d5d892a63b5758090258300a59eb389", # B + "779745f315060d1bc0cd44b7266fb4da", # B ] registered_data_hashes_medium[2] = [ "dd0fbccccf7a198681ab838c67b68fbf", # C "45281dfec4618e5d20570812dea38760", # C - "fa64e96bcee96dbc480a1495bddbf53c", # C + "bfc83d9f6d5c3d68ca09499190851e86", # C ] registered_data_hashes_medium[3] = [ "8f6faf6cfd245cae1b5feb11ae9eb3cf", # D @@ -138,6 +137,7 @@ def create_cppr_job_script(idx): f.write(" echo $file >> output.log\n") f.write(" (/usr/bin/time -v cppr -a pr $file) >> output.log 2>&1\n") f.write("done\n") + # adding cppr to run with data hashes f.write("DATA_HASH='change_folder_hash'\n") f.write("if [[ '$DATA_HASH' != 'change_folder_hash' ]]; then\n") @@ -187,12 +187,12 @@ def create_nas_job_script(is_small=False): return benchmark_name -def mini_tests_submit(storage_ids, provider_address): +def mini_tests_submit(storage_ids, provider_addr): is_pass = True required_confs = 0 - yaml_fn = Path.home() / "ebloc-broker" / "broker" / "test_setup" / "nas" / "job_nas.yaml" + yaml_fn = Path.home() / "ebloc-broker" / "broker" / "test_setup" / "job_nas.yaml" yaml_cfg = Yaml(yaml_fn) - yaml_cfg["config"]["provider_address"] = provider_address + yaml_cfg["config"]["provider_address"] = provider_addr for storage_id in storage_ids: yaml_cfg["config"]["source_code"]["storage_id"] = storage_id benchmark_name = create_nas_job_script(is_small=True) @@ -206,7 +206,7 @@ def mini_tests_submit(storage_ids, provider_address): if processed_logs: job_result = vars(processed_logs[0].args) job_result["tx_hash"] = tx_hash - job_result["submitted_job_kind"] = f"nas_{benchmark_name}" + job_result["job_kind"] = f"nas_{benchmark_name}" log(job_result) except IndexError: log(f"E: Tx({tx_hash}) is reverted") @@ -217,7 +217,7 @@ def run_job(counter) -> None: :param counter: counter index to keep track of submitted job number """ - for idx, provider_address in enumerate(providers): + for idx, provider_addr in enumerate(providers): # yaml_cfg["config"]["data"]["data3"]["storage_id"] = random.choice(storage_ids) storage_id = (idx + counter) % len(storage_ids) selected_benchmark = random.choice(benchmarks) @@ -226,12 +226,13 @@ def run_job(counter) -> None: storage = random.choice(ipfs_ids) if selected_benchmark == "nas": - log(f" * Submitting job from [cyan]NAS Benchmark[/cyan] to [green]{provider_address}", "bold blue") + log(f" * Submitting job from [cyan]NAS Benchmark[/cyan] to [green]{provider_addr}", "bold blue") yaml_cfg = Yaml(nas_yaml_fn) benchmark_name = create_nas_job_script() elif selected_benchmark == "cppr": - log(f" * Submitting [cyan]job with cppr datasets[/cyan] to provider=[green]{provider_address}", "bold blue") + log(f" * Submitting [cyan]job with cppr datasets[/cyan] to provider=[green]{provider_addr}", "bold blue") yaml_cfg = Yaml(cppr_yam_fn) + log(f"data_set_idx={idx}") hash_medium_data_0, hash_medium_data = create_cppr_job_script(idx) yaml_cfg["config"]["data"]["data1"]["hash"] = hash_medium_data_0 yaml_cfg["config"]["data"]["data2"]["hash"] = hash_medium_data @@ -242,7 +243,7 @@ def run_job(counter) -> None: yaml_cfg["config"]["data"]["data3"]["path"] = str(small_datasets / dir_name) yaml_cfg["config"]["source_code"]["storage_id"] = storage - yaml_cfg["config"]["provider_address"] = provider_address + yaml_cfg["config"]["provider_address"] = provider_addr try: submit_base = SubmitBase(yaml_cfg.path) submission_date = _date() @@ -260,9 +261,9 @@ def run_job(counter) -> None: job_result["submit_timestamp"] = submission_timestamp job_result["tx_hash"] = tx_hash if selected_benchmark == "nas": - job_result["submitted_job_kind"] = f"{selected_benchmark}_{benchmark_name}" + job_result["job_kind"] = f"{selected_benchmark}_{benchmark_name}" elif selected_benchmark == "cppr": - job_result["submitted_job_kind"] = f"{selected_benchmark}_{hash_medium_data_0}_{hash_medium_data}" + job_result["job_kind"] = f"{selected_benchmark}_{hash_medium_data_0}_{hash_medium_data}" ebb_mongo.add_item(tx_hash, job_result) log(job_result) @@ -270,7 +271,6 @@ def run_job(counter) -> None: countdown(seconds=5, is_verbose=True) except Exception as e: print_tb(e) - breakpoint() # DEBUG def main(): @@ -279,17 +279,19 @@ def main(): console_ruler(f"NEW_TEST {Ebb.get_block_number()}") log(f" * {datetime.now().strftime('%Y-%m-%d %H:%M')}") - if not is_process_on("mongod", "mongod"): + if not is_process_on("mongod"): raise Exception("mongodb is not running in the background") counter = 0 - for _ in range(60): + for _ in range(80): for _ in range(2): # submitted as batch is faster run_job(counter) counter += 1 - sleep_time = randint(200, 400) - countdown(sleep_time) + sleep_duration = randint(250, 450) + countdown(sleep_duration) + + log(f"#> number_of_submitted_jobs={counter}") if __name__ == "__main__": diff --git a/broker/test_setup/users.py b/broker/test_setup/users.py index 2c6073e0..f66fa6b4 100755 --- a/broker/test_setup/users.py +++ b/broker/test_setup/users.py @@ -15,7 +15,7 @@ _collect_account = collect_account.replace("0x", "") fn = str(Path(expanduser("~/.brownie/accounts")) / _collect_account) _collect_account = Ebb.brownie_load_account(fn, "alper") -log(f"collect_account={Ebb._get_balance(collect_account)}", "bold") +log(f"## collect_account={Ebb._get_balance(collect_account)}") def balances(accounts, is_verbose=False): @@ -27,7 +27,7 @@ def balances(accounts, is_verbose=False): print(fn) account = Ebb.brownie_load_account(str(fn), "alper") - log(Ebb._get_balance(account), "magenta") + log(Ebb._get_balance(account), "m") def collect_all_into_base(): diff --git a/broker/test_setup/watch_tests.sh b/broker/test_setup/watch_tests.sh index 7c895221..569730d0 100755 --- a/broker/test_setup/watch_tests.sh +++ b/broker/test_setup/watch_tests.sh @@ -1,22 +1,19 @@ #!/bin/bash -VENV=$HOME/venv -source $VENV/bin/activate -num=$(ps aux | grep -E "[w]atch.py" | grep -v -e "grep" -e "emacsclient" -e "flycheck_" | wc -l) provider_1="0x29e613b04125c16db3f3613563bfdd0ba24cb629" provider_2="0x1926b36af775e1312fdebcc46303ecae50d945af" provider_3="0x4934a70ba8c1c3acfa72e809118bdd9048563a24" provider_4="0x51e2b36469cdbf58863db70cc38652da84d20c67" +num=$(ps aux | grep -E "[w]atch.py" | grep -v -e "grep" -e "emacsclient" -e "flycheck_" | wc -l) if [ $num -ge 1 ]; then - echo "warning: watch.py is already running, count="$num + echo "warning: `watch.py` is already running" else - rm -f ~/.ebloc-broker/watch_*.out - ~/ebloc-broker/broker/eblocbroker_scripts/watch.py $provider_1 >/dev/null & - ~/ebloc-broker/broker/eblocbroker_scripts/watch.py $provider_2 >/dev/null & - ~/ebloc-broker/broker/eblocbroker_scripts/watch.py $provider_3 >/dev/null & - ~/ebloc-broker/broker/eblocbroker_scripts/watch.py $provider_4 >/dev/null & + rm -f ~/.ebloc-broker/watch.out ~/.ebloc-broker/watch_*.out + ~/ebloc-broker/broker/watch/watch.py $provider_1 >/dev/null & + ~/ebloc-broker/broker/watch/watch.py $provider_2 >/dev/null & + ~/ebloc-broker/broker/watch/watch.py $provider_3 >/dev/null & + ~/ebloc-broker/broker/watch/watch.py $provider_4 >/dev/null & fi - watch --color head -n 15 \ ~/.ebloc-broker/watch_$provider_1.out \ ~/.ebloc-broker/watch_$provider_2.out \ diff --git a/broker/utils.py b/broker/utils.py index 451885a0..5368eeac 100755 --- a/broker/utils.py +++ b/broker/utils.py @@ -15,7 +15,7 @@ import traceback from contextlib import suppress from enum import IntEnum -from subprocess import PIPE, CalledProcessError, Popen, check_output +from subprocess import PIPE, Popen, check_output import base58 @@ -258,6 +258,9 @@ def string_to_bytes32(hash_str: str): def bytes32_to_ipfs(bytes_array): """Convert bytes_array into IPFS hash format.""" + if bytes_array in (b"", ""): + return "" + if isinstance(bytes_array, bytes): merge = Qm + bytes_array return base58.b58encode(merge).decode("utf-8") @@ -337,7 +340,7 @@ def is_gzip_file_empty(fn): if bool(int(size)): return False - log(f"==> Created gzip file is empty:\n [magenta]{fn}[/magenta]") + log(f"==> Created gzip file is empty:\n [m]{fn}[/m]") return True @@ -514,7 +517,7 @@ def question_yes_no(message, is_exit=False): if "[Y/n]:" not in message: message = f"{message} [Y/n]: " - log(text=message, end="", flush=True) + log(text=message, end="") getch = _Getch() while True: choice = getch().lower() diff --git a/broker/eblocbroker_scripts/watch.py b/broker/watch/watch.py similarity index 69% rename from broker/eblocbroker_scripts/watch.py rename to broker/watch/watch.py index 96c7712f..9085440c 100755 --- a/broker/eblocbroker_scripts/watch.py +++ b/broker/watch/watch.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 +import os import sys import time from pathlib import Path @@ -8,6 +9,8 @@ from broker._utils import _log from broker._utils._log import _console_clear from broker._utils.tools import _date, log, print_tb +from broker._utils.yaml import Yaml +from broker.errors import QuietExit from broker.lib import state # from broker.test_setup.user_set import providers @@ -16,19 +19,32 @@ watch_only_jobs = True -def watch(eth_address="", from_block=None): - from_block = 15394725 - # if not eth_address: - # # TODO: pull from cfg - # eth_address = "0xeab50158e8e51de21616307a99c9604c1c453a02" +def get_eth_address_from_cfg(): + hidden_base_dir = Path.home() / ".ebloc-broker" + fn = hidden_base_dir / "cfg.yaml" + if not os.path.isfile(fn): + if not os.path.isdir(hidden_base_dir): + raise QuietExit(f"E: {hidden_base_dir} is not initialized") - if not eth_address: - log("E: eth_address is empty, run as: ./watch.py ") - sys.exit(1) + raise QuietExit(f"E: {fn} is not created") + cfg_yaml = Yaml(fn) + cfg = cfg_yaml["cfg"] + return cfg.w3.toChecksumAddress(cfg["eth_address"].lower()) + + +def watch(eth_address="", from_block=None): if not from_block: from_block = Ebb.get_block_number() - cfg.ONE_DAY_BLOCK_DURATION + from_block = 15867616 + if not eth_address: + try: + eth_address = get_eth_address_from_cfg() + except Exception as e: + log(f"E: {e}\neth_address is empty, run as: ./watch.py ") + sys.exit(1) + is_provider = True watch_fn = Path.home() / ".ebloc-broker" / f"watch_{eth_address}.out" _log.ll.LOG_FILENAME = watch_fn @@ -58,6 +74,7 @@ def watch(eth_address="", from_block=None): columns = 80 columns_size = int(int(columns) / 2 - 9) + header = f" [bold yellow]{'{:<44}'.format('KEY')} INDEX STATUS[/bold yellow]" job_full = "" job_count = 0 completed_count = 0 @@ -78,23 +95,23 @@ def watch(eth_address="", from_block=None): is_print=False, ) state_val = state.inv_code[_job["stateCode"]] - _color = "magenta" + c = "magenta" if state_val == "COMPLETED": - _color = "green" + c = "green" completed_count += 1 job_full = ( - f" [bold blue]*[/bold blue] [bold]{_job['job_key']}[/bold] {_job['index']} {_job['provider']} " - f"[bold {_color}]{state_val}[/bold {_color}]\n{job_full}" + f" [bold blue]*[/bold blue] [bold white]{'{:<48}'.format(_job['job_key'])}[/bold white] " + f"{_job['index']} [bold {c}]{state_val}[/bold {c}]\n{job_full}" ) if not watch_only_jobs: job_ruler = ( "[green]" + "=" * columns_size + "[bold cyan] jobs [/bold cyan]" + "=" * columns_size + "[/green]" ) - job_full = f"{job_ruler}\n{job_full}".rstrip() + job_full = f"{job_ruler}\n{header}\n{job_full}".rstrip() else: - job_full = job_full.rstrip() + job_full = f"{header}\n{job_full}".rstrip() is_connected = Ebb.is_web3_connected() _console_clear() @@ -106,7 +123,7 @@ def watch(eth_address="", from_block=None): if not watch_only_jobs: providers = Ebb.get_providers() columns_size = int(int(columns) / 2 - 12) - log("\r" + "=" * columns_size + "[bold cyan] providers [/bold cyan]" + "=" * columns_size, "green") + log("\r" + "=" * columns_size + "[bold] providers [/bold]" + "=" * columns_size, "green") for k, v in providers_info.items(): log(f"** provider_address={k}", end="\r") log(v, end="\r") @@ -116,13 +133,17 @@ def watch(eth_address="", from_block=None): time.sleep(2) +def main(): + eth_address = None + if len(sys.argv) == 2: + eth_address = sys.argv[1] + + watch(eth_address) + + if __name__ == "__main__": try: - eth_address = None - if len(sys.argv) == 2: - eth_address = sys.argv[1] - - watch(eth_address) + main() except KeyboardInterrupt: sys.exit(1) except Exception as e: diff --git a/broker/eblocbroker_scripts/watch.sh b/broker/watch/watch.sh similarity index 61% rename from broker/eblocbroker_scripts/watch.sh rename to broker/watch/watch.sh index b1e5f728..3692e8b9 100755 --- a/broker/eblocbroker_scripts/watch.sh +++ b/broker/watch/watch.sh @@ -1,14 +1,11 @@ #!/bin/bash -VENV=$HOME/venv -source $VENV/bin/activate -num=$(ps aux | grep -E "[w]atch.py" | grep -v -e "grep" -e "emacsclient" -e "flycheck_" | wc -l) address="0x378181ce7b07e8dd749c6f42772574441b20e35f" +num=$(ps aux | grep -E "[w]atch.py" | grep -v -e "grep" -e "emacsclient" -e "flycheck_" | wc -l) if [ $num -ge 1 ]; then - echo "warning: watch.py is already running, count="$num + echo "warning: `watch.py` is already running" else - rm -f ~/.ebloc-broker/watch.out - rm -f ~/.ebloc-broker/watch_*.out + rm -f ~/.ebloc-broker/watch.out ~/.ebloc-broker/watch_*.out ./watch.py $address >/dev/null & fi watch --color cat ~/.ebloc-broker/watch_$address.out diff --git a/broker/eblocbroker_scripts/watch_jobs.py b/broker/watch/watch_jobs.py similarity index 87% rename from broker/eblocbroker_scripts/watch_jobs.py rename to broker/watch/watch_jobs.py index a2f11c2f..69ba7ac9 100755 --- a/broker/eblocbroker_scripts/watch_jobs.py +++ b/broker/watch/watch_jobs.py @@ -14,7 +14,7 @@ def watch(eth_address="", from_block=None): - from_block = 15394725 + from_block = 15867616 # if not eth_address: # # TODO: pull from cfg # eth_address = "0xeab50158e8e51de21616307a99c9604c1c453a02" @@ -61,21 +61,24 @@ def watch(eth_address="", from_block=None): is_print=False, ) if print_only_ipfs_result_hashes: - if _job["result_ipfs_hash"] != empty_bytes32 and _job["result_ipfs_hash"] != "": - result_ipfs_hash = bytes32_to_ipfs(_job["result_ipfs_hash"]) - log(result_ipfs_hash) + if _job["result_ipfs_hash"] != empty_bytes32 and _job["result_ipfs_hash"] not in (b"", ""): + log(bytes32_to_ipfs(_job["result_ipfs_hash"])) # log(f"{_job['job_key']} {_job['index']} {result_ipfs_hash}") else: log(_job) +def main(): + eth_address = None + if len(sys.argv) == 2: + eth_address = sys.argv[1] + + watch(eth_address) + + if __name__ == "__main__": try: - eth_address = None - if len(sys.argv) == 2: - eth_address = sys.argv[1] - - watch(eth_address) + main() except KeyboardInterrupt: sys.exit(1) except Exception as e: diff --git a/contract/migrate_to_new_brownie.sh b/contract/migrate_to_new_brownie.sh deleted file mode 100644 index 7c8ea5a6..00000000 --- a/contract/migrate_to_new_brownie.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -source $HOME/venv/bin/activate - -cd $HOME/eBlocBroker -rm -rf $HOME/eBlocBroker/token -brownie bake token -rm $HOME/eBlocBroker/token/contracts/* -rm $HOME/eBlocBroker/token/tests/* -mv token/brownie-config.yaml token/brownie-config_.yaml - -cp -a $HOME/eBlocBroker/contract/contracts/* $HOME/eBlocBroker/token/contracts -cp -a $HOME/eBlocBroker/contract/scripts/* $HOME/eBlocBroker/token/scripts -cp -a $HOME/eBlocBroker/contract/files $HOME/eBlocBroker/token -cp -a $HOME/eBlocBroker/contract/scripts/* $HOME/eBlocBroker/token/scripts -cp -a $HOME/eBlocBroker/contract/tests/* $HOME/eBlocBroker/token/tests -cp -a $HOME/eBlocBroker/contract/* $HOME/eBlocBroker/token/ diff --git a/contract/tests/test_job_overlap.py b/contract/tests/test_job_overlap.py index 6e5b07ae..2479f002 100644 --- a/contract/tests/test_job_overlap.py +++ b/contract/tests/test_job_overlap.py @@ -53,7 +53,7 @@ def my_own_session_run_at_beginning(_Ebb): cfg.IS_BROWNIE_TEST = True config.Ebb = Ebb = Contract.Contract(is_brownie=True) config.ebb = _Ebb - Contract.eblocbroker.eBlocBroker = _Ebb + cfg.Ebb.eBlocBroker = Contract.eblocbroker.eBlocBroker = _Ebb ebb = _Ebb Ebb.w3 = web3 if not config.chain: diff --git a/contract/tests/test_overall_eblocbroker.py b/contract/tests/test_overall_eblocbroker.py index d6ff5715..2bf228f9 100644 --- a/contract/tests/test_overall_eblocbroker.py +++ b/contract/tests/test_overall_eblocbroker.py @@ -747,212 +747,6 @@ def test_multiple_data(): withdraw(requester, refunded_sum) -def test_workflow(): - job = Job() - provider = accounts[0] - requester = accounts[1] - register_provider() - register_requester(requester) - job_key = "QmQv4AAL8DZNxZeK3jfJGJi63v1msLMZGan7vSsCDXzZud" - code_hash = ipfs_to_bytes32(job_key) - with brownie.reverts(): - ebb.updataDataPrice(code_hash, 20, 100, {"from": provider}) - - tx = ebb.registerData(code_hash, 20, cfg.ONE_HOUR_BLOCK_DURATION, {"from": provider}) - gas_costs["registerData"].append(tx.__dict__["gas_used"]) - - ebb.removeRegisteredData(code_hash, {"from": provider}) # should submitJob fail if it is not removed - code_hash1 = "0x68b8d8218e730fc2957bcb12119cb204" - ebb.registerData(code_hash1, 20, cfg.ONE_HOUR_BLOCK_DURATION, {"from": provider}) - gas_costs["registerData"].append(tx.__dict__["gas_used"]) - mine(6) - with brownie.reverts(): - ebb.registerData(code_hash1, 21, 1000, {"from": provider}) - - tx = ebb.updataDataPrice(code_hash1, 250, cfg.ONE_HOUR_BLOCK_DURATION, {"from": provider}) - gas_costs["updataDataPrice"].append(tx.__dict__["gas_used"]) - tx = ebb.updataDataPrice(code_hash1, 251, cfg.ONE_HOUR_BLOCK_DURATION + 1, {"from": provider}) - gas_costs["updataDataPrice"].append(tx.__dict__["gas_used"]) - data_block_numbers = ebb.getRegisteredDataBlockNumbers(provider, code_hash1) - log(f"get_registered_data_block_numbers={data_block_numbers[1]}", "bold") - get_block_number() - data_prices = ebb.getRegisteredDataPrice(provider, code_hash1, 0) - assert data_prices[0] == 20 - output = ebb.getRegisteredDataPrice(provider, code_hash1, data_block_numbers[-1]) - assert output[0] == 251 - mine(cfg.ONE_HOUR_BLOCK_DURATION - 10) - output = ebb.getRegisteredDataPrice(provider, code_hash1, 0) - log(f"register_data_price={output}", "bold") - assert output[0] == 20 - mine(1) - output = ebb.getRegisteredDataPrice(provider, code_hash1, 0) - log(f"register_data_price={output}", "bold") - assert output[0] == 251 - - job.code_hashes = [code_hash, code_hash1] # Hashed of the data file in array - job.storage_hours = [0, 0] - job.data_transfer_ins = [100, 0] - job.data_transfer_out = 100 - - # job.data_prices_set_block_numbers = [0, 253] # TODO: check this ex 253 exists or not - job.data_prices_set_block_numbers = [0, data_block_numbers[1]] # TODO: check this ex 253 exists or not - check_price_keys(job.data_prices_set_block_numbers, provider, code_hash1) - job.cores = [2, 4, 2] - job.run_time = [10, 15, 20] - job.storage_ids = [StorageID.IPFS.value, StorageID.NONE.value] - job.cache_types = [CacheType.PUBLIC.value, CacheType.PUBLIC.value] - args = [ - provider, - ebb.getProviderSetBlockNumbers(accounts[0])[-1], - job.storage_ids, - job.cache_types, - job.data_prices_set_block_numbers, - job.cores, - job.run_time, - job.data_transfer_out, - ] - - job_price, cost = job.cost(provider, requester) - tx = ebb.submitJob( # first submit - job_key, - job.data_transfer_ins, - args, - job.storage_hours, - job.code_hashes, - {"from": requester, "value": to_gwei(job_price)}, - ) - for idx in range(0, 3): - log(ebb.getJobInfo(provider, job_key, 0, idx)) - - console_ruler(character="-=") - assert ( - tx.events["LogRegisteredDataRequestToUse"][0]["registeredDataHash"] - == "0x0000000000000000000000000000000068b8d8218e730fc2957bcb12119cb204" - ), "registered data should be used" - - with brownie.reverts(): - log(ebb.getJobInfo(provider, job_key, 1, 2)) - log(ebb.getJobInfo(provider, job_key, 0, 3)) - - # setJobState for the workflow: - index = 0 - job_id = 0 - start_timestamp = 10 - tx = ebb.setJobStateRunning(job_key, index, job_id, start_timestamp, {"from": accounts[0]}) - gas_costs["setJobStateRunning"].append(tx.__dict__["gas_used"]) - index = 0 - job_id = 1 - start_timestamp = 20 - tx = ebb.setJobStateRunning(job_key, index, job_id, start_timestamp, {"from": accounts[0]}) - gas_costs["setJobStateRunning"].append(tx.__dict__["gas_used"]) - # process_payment for the workflow - index = 0 - job_id = 0 - elapsed_time = 10 - data_transfer = [100, 0] - ended_timestamp = 20 - result_ipfs_hash = ipfs_to_bytes32("QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve") - received_sums = [] - refunded_sums = [] - received_sum = 0 - refunded_sum = 0 - args = [ - index, - job_id, - ended_timestamp, - data_transfer[0], - data_transfer[1], - elapsed_time, - job.cores, - job.run_time, - False, - ] - tx = ebb.processPayment(job_key, args, result_ipfs_hash, {"from": accounts[0]}) - gas_costs["processPayment"].append(tx.__dict__["gas_used"]) - # log(tx.events['LogProcessPayment']) - received_sums.append(tx.events["LogProcessPayment"]["receivedGwei"]) - refunded_sums.append(tx.events["LogProcessPayment"]["refundedGwei"]) - received_sum += tx.events["LogProcessPayment"]["receivedGwei"] - refunded_sum += tx.events["LogProcessPayment"]["refundedGwei"] - log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}", "bold") - index = 0 - job_id = 1 - elapsed_time = 15 - data_transfer = [0, 0] - ended_timestamp = 39 - result_ipfs_hash = ipfs_to_bytes32("QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve") - args = [ - index, - job_id, - ended_timestamp, - data_transfer[0], - data_transfer[1], - elapsed_time, - job.cores, - job.run_time, - False, - ] - tx = ebb.processPayment(job_key, args, result_ipfs_hash, {"from": accounts[0]}) - assert tx.events["LogProcessPayment"]["elapsedTime"] == elapsed_time - gas_costs["processPayment"].append(tx.__dict__["gas_used"]) - received_sums.append(tx.events["LogProcessPayment"]["receivedGwei"]) - refunded_sums.append(tx.events["LogProcessPayment"]["refundedGwei"]) - received_sum += tx.events["LogProcessPayment"]["receivedGwei"] - refunded_sum += tx.events["LogProcessPayment"]["refundedGwei"] - log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}", "bold") - index = 0 - job_id = 2 - elapsed_time = 20 - data_transfer = [0, 100] - ended_timestamp = 39 - result_ipfs_hash = ipfs_to_bytes32("QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve") - with brownie.reverts(): # processPayment should revert, setRunning is not called for the job=2 - args = [ - index, - job_id, - ended_timestamp, - data_transfer[0], - data_transfer[1], - elapsed_time, - job.cores, - job.run_time, - False, - ] - tx = ebb.processPayment(job_key, args, result_ipfs_hash, {"from": accounts[0]}) - gas_costs["processPayment"].append(tx.__dict__["gas_used"]) - - index = 0 - job_id = 2 - start_timestamp = 20 - tx = ebb.setJobStateRunning(job_key, index, job_id, start_timestamp, {"from": accounts[0]}) - gas_costs["setJobStateRunning"].append(tx.__dict__["gas_used"]) - args = [ - index, - job_id, - ended_timestamp, - data_transfer[0], - data_transfer[1], - elapsed_time, - job.cores, - job.run_time, - True, - ] - tx = ebb.processPayment(job_key, args, result_ipfs_hash, {"from": accounts[0]}) - assert tx.events["LogProcessPayment"]["elapsedTime"] == elapsed_time - gas_costs["processPayment"].append(tx.__dict__["gas_used"]) - # log(tx.events['LogProcessPayment']) - received_sums.append(tx.events["LogProcessPayment"]["receivedGwei"]) - refunded_sums.append(tx.events["LogProcessPayment"]["refundedGwei"]) - received_sum += tx.events["LogProcessPayment"]["receivedGwei"] - refunded_sum += tx.events["LogProcessPayment"]["refundedGwei"] - log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}", "bold") - log(f"received_sums={received_sums}", "bold") - log(f"refunded_sums={refunded_sums}", "bold") - assert job_price - cost["storage"] == received_sum + refunded_sum - withdraw(accounts[0], received_sum) - withdraw(requester, refunded_sum) - - def test_simple_submit(): job = Job() provider = accounts[0] diff --git a/contract/tests/test_workflow.py b/contract/tests/test_workflow.py new file mode 100644 index 00000000..16b02679 --- /dev/null +++ b/contract/tests/test_workflow.py @@ -0,0 +1,320 @@ +#!/usr/bin/python3 + +import os +import sys +from os import path + +import pytest + +import brownie +from broker import cfg, config +from broker._utils._log import console_ruler +from broker.config import setup_logger +from broker.eblocbroker_scripts import Contract +from broker.eblocbroker_scripts.job import Job +from broker.utils import CacheType, StorageID, ipfs_to_bytes32, log +from brownie import accounts, web3 +from brownie.network.state import Chain +from contract.scripts.lib import mine, new_test +from contract.tests.test_overall_eblocbroker import register_provider, register_requester + +# from brownie.test import given, strategy + +COMMITMENT_BLOCK_NUM = 600 +Contract.eblocbroker = Contract.Contract(is_brownie=True) + +setup_logger("", True) +sys.path.append(path.dirname(path.dirname(path.abspath(__file__)))) +cwd = os.getcwd() +provider_gmail = "provider_test@gmail.com" +fid = "ee14ea28-b869-1036-8080-9dbd8c6b1579@b2drop.eudat.eu" + +available_core = 128 +price_core_min = 1 +price_data_transfer = 1 +price_storage = 1 +price_cache = 1 +prices = [price_core_min, price_data_transfer, price_storage, price_cache] + +GPG_FINGERPRINT = "0359190A05DF2B72729344221D522F92EFA2F330" +ipfs_address = "/ip4/79.123.177.145/tcp/4001/ipfs/QmWmZQnb8xh3gHf9ZFmVQC4mLEav3Uht5kHJxZtixG3rsf" +Ebb = None +chain = None +ebb = None + +gas_costs = {} +gas_costs["registerRequester"] = [] +gas_costs["registerProvider"] = [] +gas_costs["setJobStateRunning"] = [] +gas_costs["refund"] = [] +gas_costs["setDataVerified"] = [] +gas_costs["processPayment"] = [] +gas_costs["withdraw"] = [] +gas_costs["authenticateOrcID"] = [] +gas_costs["depositStorage"] = [] +gas_costs["updateProviderInfo"] = [] +gas_costs["updataDataPrice"] = [] +gas_costs["updateProviderPrices"] = [] +gas_costs["registerData"] = [] + + +def to_gwei(value): + return web3.toWei(value, "gwei") + + +@pytest.fixture(scope="module", autouse=True) +def my_own_session_run_at_beginning(_Ebb): + global Ebb # noqa + global chain # noqa + global ebb # noqa + + cfg.IS_BROWNIE_TEST = True + config.Ebb = Ebb = Contract.Contract(is_brownie=True) + config.ebb = _Ebb + cfg.Ebb.eBlocBroker = Contract.eblocbroker.eBlocBroker = _Ebb + ebb = _Ebb + Ebb.w3 = web3 + if not config.chain: + config.chain = Chain() + + chain = config.chain + + +@pytest.fixture(autouse=True) +def run_around_tests(): + new_test() + + +def check_price_keys(price_keys, provider, code_hash): + res = ebb.getRegisteredDataBlockNumbers(provider, code_hash) + for key in price_keys: + if key > 0: + assert key in res, f"{key} does no exist in price keys({res}) for the registered data{code_hash}" + + +def remove_zeros_gpg_fingerprint(_gpg_fingerprint): + return str(_gpg_fingerprint).replace("0x000000000000000000000000", "").upper() + + +def get_block_number(): + log(f"block_number={web3.eth.blockNumber} | contract_bn={web3.eth.blockNumber + 1}", "bold") + return web3.eth.blockNumber + + +def get_block_timestamp(): + return web3.eth.getBlock(get_block_number()).timestamp + + +def withdraw(address, amount): + temp = address.balance() + assert ebb.balanceOf(address) == amount + tx = ebb.withdraw({"from": address, "gas_price": 0}) + gas_costs["withdraw"].append(tx.__dict__["gas_used"]) + received = address.balance() - temp + assert to_gwei(amount) == received + assert ebb.balanceOf(address) == 0 + + +def test_workflow(): + job = Job() + provider = accounts[0] + requester = accounts[1] + register_provider() + register_requester(requester) + job_key = "QmQv4AAL8DZNxZeK3jfJGJi63v1msLMZGan7vSsCDXzZud" + code_hash = ipfs_to_bytes32(job_key) + with brownie.reverts(): + ebb.updataDataPrice(code_hash, 20, 100, {"from": provider}) + + tx = ebb.registerData(code_hash, 20, cfg.ONE_HOUR_BLOCK_DURATION, {"from": provider}) + gas_costs["registerData"].append(tx.__dict__["gas_used"]) + + ebb.removeRegisteredData(code_hash, {"from": provider}) # should submitJob fail if it is not removed + code_hash1 = "0x68b8d8218e730fc2957bcb12119cb204" + ebb.registerData(code_hash1, 20, cfg.ONE_HOUR_BLOCK_DURATION, {"from": provider}) + gas_costs["registerData"].append(tx.__dict__["gas_used"]) + mine(6) + with brownie.reverts(): + ebb.registerData(code_hash1, 21, 1000, {"from": provider}) + + tx = ebb.updataDataPrice(code_hash1, 250, cfg.ONE_HOUR_BLOCK_DURATION, {"from": provider}) + gas_costs["updataDataPrice"].append(tx.__dict__["gas_used"]) + tx = ebb.updataDataPrice(code_hash1, 251, cfg.ONE_HOUR_BLOCK_DURATION + 1, {"from": provider}) + gas_costs["updataDataPrice"].append(tx.__dict__["gas_used"]) + data_block_numbers = ebb.getRegisteredDataBlockNumbers(provider, code_hash1) + log(f"get_registered_data_block_numbers={data_block_numbers[1]}", "bold") + get_block_number() + data_prices = ebb.getRegisteredDataPrice(provider, code_hash1, 0) + assert data_prices[0] == 20 + output = ebb.getRegisteredDataPrice(provider, code_hash1, data_block_numbers[-1]) + assert output[0] == 251 + mine(cfg.ONE_HOUR_BLOCK_DURATION - 10) + output = ebb.getRegisteredDataPrice(provider, code_hash1, 0) + log(f"register_data_price={output}", "bold") + assert output[0] == 20 + mine(1) + output = ebb.getRegisteredDataPrice(provider, code_hash1, 0) + log(f"register_data_price={output}", "bold") + assert output[0] == 251 + + job.code_hashes = [code_hash, code_hash1] # Hashed of the data file in array + job.storage_hours = [0, 0] + job.data_transfer_ins = [100, 0] + job.data_transfer_out = 100 + + # job.data_prices_set_block_numbers = [0, 253] # TODO: check this ex 253 exists or not + job.data_prices_set_block_numbers = [0, data_block_numbers[1]] # TODO: check this ex 253 exists or not + check_price_keys(job.data_prices_set_block_numbers, provider, code_hash1) + job.cores = [2, 4, 2] + job.run_time = [10, 15, 20] + job.storage_ids = [StorageID.IPFS.value, StorageID.NONE.value] + job.cache_types = [CacheType.PUBLIC.value, CacheType.PUBLIC.value] + args = [ + provider, + ebb.getProviderSetBlockNumbers(accounts[0])[-1], + job.storage_ids, + job.cache_types, + job.data_prices_set_block_numbers, + job.cores, + job.run_time, + job.data_transfer_out, + ] + job_price, cost = job.cost(provider, requester) + tx = ebb.submitJob( # first submit + job_key, + job.data_transfer_ins, + args, + job.storage_hours, + job.code_hashes, + {"from": requester, "value": to_gwei(job_price)}, + ) + for idx in range(0, 3): + log(ebb.getJobInfo(provider, job_key, 0, idx)) + + console_ruler(character="-=") + assert ( + tx.events["LogRegisteredDataRequestToUse"][0]["registeredDataHash"] + == "0x0000000000000000000000000000000068b8d8218e730fc2957bcb12119cb204" + ), "registered data should be used" + + with brownie.reverts(): + log(ebb.getJobInfo(provider, job_key, 1, 2)) + log(ebb.getJobInfo(provider, job_key, 0, 3)) + + # setJobState for the workflow: + index = 0 + job_id = 0 + start_timestamp = 10 + tx = ebb.setJobStateRunning(job_key, index, job_id, start_timestamp, {"from": accounts[0]}) + gas_costs["setJobStateRunning"].append(tx.__dict__["gas_used"]) + index = 0 + job_id = 1 + start_timestamp = 20 + tx = ebb.setJobStateRunning(job_key, index, job_id, start_timestamp, {"from": accounts[0]}) + gas_costs["setJobStateRunning"].append(tx.__dict__["gas_used"]) + # process_payment for the workflow + index = 0 + job_id = 0 + elapsed_time = 10 + data_transfer = [100, 0] + ended_timestamp = 20 + result_ipfs_hash = ipfs_to_bytes32("QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve") + received_sums = [] + refunded_sums = [] + received_sum = 0 + refunded_sum = 0 + args = [ + index, + job_id, + ended_timestamp, + data_transfer[0], + data_transfer[1], + elapsed_time, + job.cores, + job.run_time, + False, + ] + tx = ebb.processPayment(job_key, args, result_ipfs_hash, {"from": accounts[0]}) + gas_costs["processPayment"].append(tx.__dict__["gas_used"]) + # log(tx.events['LogProcessPayment']) + received_sums.append(tx.events["LogProcessPayment"]["receivedGwei"]) + refunded_sums.append(tx.events["LogProcessPayment"]["refundedGwei"]) + received_sum += tx.events["LogProcessPayment"]["receivedGwei"] + refunded_sum += tx.events["LogProcessPayment"]["refundedGwei"] + log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}", "bold") + index = 0 + job_id = 1 + elapsed_time = 15 + data_transfer = [0, 0] + ended_timestamp = 39 + result_ipfs_hash = ipfs_to_bytes32("QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve") + args = [ + index, + job_id, + ended_timestamp, + data_transfer[0], + data_transfer[1], + elapsed_time, + job.cores, + job.run_time, + False, + ] + tx = ebb.processPayment(job_key, args, result_ipfs_hash, {"from": accounts[0]}) + assert tx.events["LogProcessPayment"]["elapsedTime"] == elapsed_time + gas_costs["processPayment"].append(tx.__dict__["gas_used"]) + received_sums.append(tx.events["LogProcessPayment"]["receivedGwei"]) + refunded_sums.append(tx.events["LogProcessPayment"]["refundedGwei"]) + received_sum += tx.events["LogProcessPayment"]["receivedGwei"] + refunded_sum += tx.events["LogProcessPayment"]["refundedGwei"] + log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}", "bold") + index = 0 + job_id = 2 + elapsed_time = 20 + data_transfer = [0, 100] + ended_timestamp = 39 + result_ipfs_hash = ipfs_to_bytes32("QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Ve") + with brownie.reverts(): # processPayment should revert, setRunning is not called for the job=2 + args = [ + index, + job_id, + ended_timestamp, + data_transfer[0], + data_transfer[1], + elapsed_time, + job.cores, + job.run_time, + False, + ] + tx = ebb.processPayment(job_key, args, result_ipfs_hash, {"from": accounts[0]}) + gas_costs["processPayment"].append(tx.__dict__["gas_used"]) + + index = 0 + job_id = 2 + start_timestamp = 20 + tx = ebb.setJobStateRunning(job_key, index, job_id, start_timestamp, {"from": accounts[0]}) + gas_costs["setJobStateRunning"].append(tx.__dict__["gas_used"]) + args = [ + index, + job_id, + ended_timestamp, + data_transfer[0], + data_transfer[1], + elapsed_time, + job.cores, + job.run_time, + True, + ] + tx = ebb.processPayment(job_key, args, result_ipfs_hash, {"from": accounts[0]}) + assert tx.events["LogProcessPayment"]["elapsedTime"] == elapsed_time + gas_costs["processPayment"].append(tx.__dict__["gas_used"]) + # log(tx.events['LogProcessPayment']) + received_sums.append(tx.events["LogProcessPayment"]["receivedGwei"]) + refunded_sums.append(tx.events["LogProcessPayment"]["refundedGwei"]) + received_sum += tx.events["LogProcessPayment"]["receivedGwei"] + refunded_sum += tx.events["LogProcessPayment"]["refundedGwei"] + log(f"received_sum={received_sum} | refunded_sum={refunded_sum} | job_price={job_price}", "bold") + log(f"received_sums={received_sums}", "bold") + log(f"refunded_sums={refunded_sums}", "bold") + assert job_price - cost["storage"] == received_sum + refunded_sum + withdraw(accounts[0], received_sum) + withdraw(requester, refunded_sum) diff --git a/docker-compose.yml b/docker-compose.yml index caec72db..82a17a6c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -9,7 +9,29 @@ services: ports: - "4001:4001" # Public IPFS Port volumes: - - /tmp/volumes/lib:/var/lib/slurmd - - /tmp/volumes/spool:/var/spool/slurm - - /tmp/volumes/log:/var/log/slurm - - /tmp/volumes/db:/var/lib/mysql + - ~/docker/volumes/lib:/var/lib/slurmd + - ~/docker/volumes/spool:/var/spool/slurm + - ~/docker/volumes/log:/var/log/slurm + - ~/docker/volumes/db:/var/lib/mysql + # slurm: + # build: + # dockerfile: Dockerfile + # context: ./docker/provider + # hostname: slurmctl + # stdin_open: true + # tty: true + # ports: + # - "4001:4001" # Public IPFS Port + # volumes: + # - ./volumes/lib:/var/lib/slurmd + # - ./volumes/spool:/var/spool/slurm + # - ./volumes/log:/var/log/slurm + # - ./volumes/db:/var/lib/mysql + # requester: + # build: + # dockerfile: Dockerfile + # context: ./docker/requester + # stdin_open: true + # tty: true + # ports: + # - "4002:4002" # Public IPFS Port diff --git a/broker/docker/README.org b/docker/README.org similarity index 54% rename from broker/docker/README.org rename to docker/README.org index 5708a31e..020490ed 100644 --- a/broker/docker/README.org +++ b/docker/README.org @@ -7,15 +7,15 @@ #+begin_src bash remove_dangling () { - docker rmi $(docker images -qa -f 'dangling=true') --force + docker images -qa -f 'dangling=true' | ifne docker rmi $(docker images -qa -f 'dangling=true') --force docker image ls } -sudo systemctl start docker -sudo docker run eblocbroker +# sudo systemctl start docker +# sudo docker run eblocbroker -# docker run --name test -it eblocbroker bash -docker run -it eblocbroker bash +# doocker run --name test -it eblocbroker bash +docker run --hostname slurmctl -it ebb bash #+end_src ** Clean complete docker cache @@ -33,12 +33,17 @@ NOTE: Now in order to push the image just make sure the path which you are using username included in the tag. #+begin_src bash -docker login -u eblocbroker -p -docker tag eblocbroker:latest eblocbroker/app +cat ~/.docker_password.txt | docker login --username eblocbroker --password-stdin +docker build -t ebb:latest . --progress plain +docker tag ebb:latest eblocbroker/app docker push eblocbroker/app #+end_src -~docker history --human --format "{{.CreatedBy}}: {{.Size}}" eblocbroker~ +~docker history --human --format "{{.CreatedBy}}: {{.Size}}" ebb~ + +#+begin_src bash +docker images ebb:latest --format "{{.Repository}}:{{.Tag}} -> {{.Size}}" +#+end_src ** Build @@ -53,3 +58,9 @@ docker-compose up -d # or docker-compose up --remove-orphans #+end_src + +** Image for service slurm was built because it did not already exist. To rebuild this image you must use: + +#+begin_src bash +docker-compose up --build +#+end_src diff --git a/docker/provider/Dockerfile b/docker/provider/Dockerfile new file mode 100644 index 00000000..94bed176 --- /dev/null +++ b/docker/provider/Dockerfile @@ -0,0 +1 @@ +FROM eblocbroker/app:latest diff --git a/docker/requester/Dockerfile b/docker/requester/Dockerfile new file mode 100644 index 00000000..7c2dc06d --- /dev/null +++ b/docker/requester/Dockerfile @@ -0,0 +1,6 @@ +FROM eblocbroker/app:latest +COPY docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh +COPY supervisord.conf /etc/ +# RUN unlink /var/run/supervisor/supervisor.sock + +# ENTRYPOINT ["/tini", "--", "/usr/local/bin/docker-entrypoint.sh"] diff --git a/docker/requester/docker-compose.yml b/docker/requester/docker-compose.yml new file mode 100644 index 00000000..d190d748 --- /dev/null +++ b/docker/requester/docker-compose.yml @@ -0,0 +1,9 @@ +version: '3' + +services: + requester: + build: . + stdin_open: true + tty: true + ports: + - "4002:4002" # Public IPFS Port diff --git a/docker/requester/docker-entrypoint.sh b/docker/requester/docker-entrypoint.sh new file mode 100755 index 00000000..c84428b1 --- /dev/null +++ b/docker/requester/docker-entrypoint.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +function check_running_status { + for count in {2..0}; do + STATUS=$(/usr/bin/supervisorctl status $1 | awk '{print $2}') + echo "#> $1 is in the $STATUS state." + if [[ "$STATUS" = "RUNNING" ]] + then + break + else + sleep 1 + fi + done +} + + +function start_service { + echo "## Starting $1" + /usr/bin/supervisorctl start $1 + check_running_status $1 +} + +echo "#> Starting supervisord process manager" +/usr/bin/supervisord --configuration /etc/supervisord.conf +for service in mongod_r +do + start_service $service +done +exec "$@" diff --git a/broker/_slurm/files/supervisord.conf b/docker/requester/supervisord.conf similarity index 89% rename from broker/_slurm/files/supervisord.conf rename to docker/requester/supervisord.conf index 94d1165d..093ab876 100644 --- a/broker/_slurm/files/supervisord.conf +++ b/docker/requester/supervisord.conf @@ -29,6 +29,20 @@ autostart=false autorestart=false priority=2 +[program:mongod_r] +user=mongodb --port 27019 +command=/usr/bin/mongod --bind_ip 127.0.0.1 +stdout_logfile=/var/log/supervisor/mongod.log +stdout_logfile_maxbytes=1MB +stdout_logfile_backups=5 +stderr_logfile=/var/log/supervisor/mongod.log +stderr_logfile_maxbytes=1MB +stderr_logfile_backups=5 +exitcodes=0,1,2 +autostart=false +autorestart=false +priority=2 + [program:munged] user=munge command=/usr/sbin/munged -F diff --git a/broker/_slurm/Dockerfile b/docker/slurm/Dockerfile similarity index 100% rename from broker/_slurm/Dockerfile rename to docker/slurm/Dockerfile diff --git a/broker/_slurm/docker-compose.yml b/docker/slurm/docker-compose.yml similarity index 100% rename from broker/_slurm/docker-compose.yml rename to docker/slurm/docker-compose.yml diff --git a/broker/_slurm/docker-entrypoint.sh b/docker/slurm/docker-entrypoint.sh similarity index 100% rename from broker/_slurm/docker-entrypoint.sh rename to docker/slurm/docker-entrypoint.sh diff --git a/broker/_slurm/files/slurm/gres.conf b/docker/slurm/files/slurm/gres.conf similarity index 100% rename from broker/_slurm/files/slurm/gres.conf rename to docker/slurm/files/slurm/gres.conf diff --git a/broker/_slurm/files/slurm/slurm.conf b/docker/slurm/files/slurm/slurm.conf similarity index 100% rename from broker/_slurm/files/slurm/slurm.conf rename to docker/slurm/files/slurm/slurm.conf diff --git a/broker/_slurm/files/slurm/slurmdbd.conf b/docker/slurm/files/slurm/slurmdbd.conf similarity index 100% rename from broker/_slurm/files/slurm/slurmdbd.conf rename to docker/slurm/files/slurm/slurmdbd.conf diff --git a/docker/slurm/files/supervisord.conf b/docker/slurm/files/supervisord.conf new file mode 100644 index 00000000..94faaa78 --- /dev/null +++ b/docker/slurm/files/supervisord.conf @@ -0,0 +1,119 @@ +[unix_http_server] +file=/var/run/supervisor/supervisor.sock + +[supervisord] +logfile=/var/log/supervisor/supervisord.log +logfile_maxbytes=5MB +logfile_backups=10 +loglevel=info +pidfile=/var/run/supervisord.pid +nodaemon=false + +[rpcinterface:supervisor] +supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface + +[supervisorctl] +serverurl=unix:///var/run/supervisor/supervisor.sock + +[program:mongod] +user=mongodb +command=/usr/bin/mongod --bind_ip 127.0.0.1 +stdout_logfile=/var/log/supervisor/mongod.log +stdout_logfile_maxbytes=1MB +stdout_logfile_backups=5 +stderr_logfile=/var/log/supervisor/mongod.log +stderr_logfile_maxbytes=1MB +stderr_logfile_backups=5 +exitcodes=0,1,2 +autostart=false +autorestart=false +priority=2 + +[program:mongod_r] +user=mongodb +command=/usr/bin/mongod --bind_ip 127.0.0.1 --port 27019 +stdout_logfile=/var/log/supervisor/mongod.log +stdout_logfile_maxbytes=1MB +stdout_logfile_backups=5 +stderr_logfile=/var/log/supervisor/mongod.log +stderr_logfile_maxbytes=1MB +stderr_logfile_backups=5 +exitcodes=0,1,2 +autostart=false +autorestart=false +priority=2 + +[program:munged] +user=munge +command=/usr/sbin/munged -F +autostart=false +autorestart=true +startsecs=5 +startretries=2 +exitcodes=0,1,2 +stdout_logfile=/var/log/supervisor/munged.log +stdout_logfile_maxbytes=1MB +stdout_logfile_backups=5 +stderr_logfile=/var/log/supervisor/munged.log +stderr_logfile_maxbytes=1MB +stderr_logfile_backups=5 +priority=1 +loglevel=trace + +[program:mysqld] +command=/usr/bin/pidproxy /var/run/mariadb/mariadb.pid /usr/bin/mysqld_safe +stdout_logfile=/var/log/supervisor/mysqld.log +stdout_logfile_maxbytes=1MB +stdout_logfile_backups=5 +stderr_logfile=/var/log/supervisor/mysqld.log +stderr_logfile_maxbytes=1MB +stderr_logfile_backups=5 +exitcodes=0,1,2 +autostart=false +autorestart=false +priority=2 + +[program:slurmdbd] +user=root +#command=/bin/bash -c "until echo 'SELECT 1' | mysql -h localhost -uslurm -ppassword &> /dev/null; do echo 'Waiting for DB'; sleep 1; done && /usr/sbin/slurmdbd -Dvvv" +command=/usr/sbin/slurmdbd -Dvvv +autostart=false +autorestart=false +exitcodes=0,1,2 +stdout_logfile=/var/log/supervisor/slurmdbd.log +stdout_logfile_maxbytes=1MB +stdout_logfile_backups=5 +stderr_logfile=/var/log/supervisor/slurmdbd.log +stderr_logfile_maxbytes=1MB +stderr_logfile_backups=5 +priority=10 + +[program:slurmctld] +user=root +#command=/bin/bash -c "until 2>/dev/null >/dev/tcp/localhost/6819; do echo 'Waiting for port 6819'; sleep 1; done && /usr/sbin/slurmctld -Dvvv" +command=/usr/sbin/slurmctld -Dvvv +autostart=false +autorestart=false +exitcodes=0,1,2 +stdout_logfile=/var/log/supervisor/slurmctld.log +stdout_logfile_maxbytes=1MB +stdout_logfile_backups=5 +stderr_logfile=/var/log/supervisor/slurmctld.log +stderr_logfile_maxbytes=1MB +stderr_logfile_backups=5 +priority=50 + +[program:slurmd] +user=root +# command=/bin/bash -c "until 2>/dev/null >/dev/tcp/localhost/6817; do echo 'Waiting for port 6817'; sleep 1; done && /usr/sbin/slurmd -Dvvv" +command=/usr/sbin/slurmd -Dvvv +autostart=false +autorestart=false +exitcodes=0,1,2 +stdout_logfile=/var/log/supervisor/slurmd.log +stdout_logfile_maxbytes=1MB +stdout_logfile_backups=5 +stderr_logfile=/var/log/supervisor/slurmd.log +stderr_logfile_maxbytes=1MB +stderr_logfile_backups=5 +priority=100 \ No newline at end of file diff --git a/docs/NOTES.org b/docs/NOTES.org new file mode 100755 index 00000000..6743254e --- /dev/null +++ b/docs/NOTES.org @@ -0,0 +1,12 @@ +* info + +- sphinx config is taken from Ethereum Alarm Clock documentation. +[[https://github.com/ethereum-alarm-clock/ethereum-alarm-clock]] +[[https://ethereum-alarm-clock.readthedocs.io/en/latest/]] + +* Convert + +#+begin_src bash +$HOME/ebloc-broker/docs/convert/run_convert.sh +./run +#+end_src diff --git a/docs/README.md b/docs/README.md deleted file mode 100755 index 30913001..00000000 --- a/docs/README.md +++ /dev/null @@ -1,6 +0,0 @@ -# Convert - -``` -$HOME/ebloc-broker/docs/convert/run_convert.sh -./run -``` diff --git a/docs/README.rst b/docs/README.rst new file mode 100755 index 00000000..8eea9372 --- /dev/null +++ b/docs/README.rst @@ -0,0 +1,147 @@ +eBlocBroker +=========== + +About +----- + +eBlocBroker is a blockchain based autonomous computational resource broker. + +Website: `http://ebloc.cmpe.boun.edu.tr `_ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Prerequisites +~~~~~~~~~~~~~ + +- `Slurm `_,`Geth `_, + `IPFS `_, + `prasmussen/gdrive `_, + `owncloud/pyocclient `_, + `eth-brownie `_, + `ganache-cli `_ + +Using Docker +~~~~~~~~~~~~ + +You can use a sandbox container provided in the `./docker-compose.yml <./docker-compose.yml>`_ file for testing inside a Docker +environment. + +This container provides everything you need to test using a Python 3.7 interpreter. + +Start the test environment: + +.. code:: bash + + docker build -t ebb:latest . --progress plain + docker-compose up -d + +To enter the shell of the running container in interactive mode, run: + +.. code:: bash + + docker exec -it ebloc-broker_slurm_1 /bin/bash + +To stop the cluster container, run: + +.. code:: bash + + docker-compose down + +Cloud Storages +~~~~~~~~~~~~~~ + +EUDAT +^^^^^ + +Create B2ACCESS user account and login into B2DROP: +::::::::::::::::::::::::::::::::::::::::::::::::::: + +First, from `B2ACCESS home page `_ + +``No account? Signup`` => ``Create B2ACCESS user account (username) only`` + +- `B2DROP login site `_ + +1.4.1.2 Create app password +::::::::::::::::::::::::::: + +``Settings`` => ``Security`` => ``Create new app password`` and save it. + +1.5 How to install required packages +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +We have a helper script, which you can use to install all required external dependencies: + +.. code:: bash + + source ./scripts/setup.sh + +Next, type ``eblocbroker --help`` for basic usage information. + +1.6 Requester +~~~~~~~~~~~~~ + +1.6.1 Submit Job +^^^^^^^^^^^^^^^^ + +In order to submit your job each user should already registered into eBlocBroker. +You can use `./broker/eblocbroker/register_requester.py <./broker/eblocbroker/register_requester.py>`_ to register. +Please update following arguments inside ``register.yaml``. + +After registration is done, each user should authenticate their ORCID iD using the following `http://eblocbroker.duckdns.org/ `_. + +``$ ./eblocbroker.py submit job.yaml`` + +1.6.1.1 Example yaml file in order to define a job to submit. +::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: + +``job.yaml``: + +.. code:: yaml + + config: + provider_address: '0x3e6ffc5ede9ee6d782303b2dc5f13afeee277aea' + source_code: + cache_type: public + path: ~/test_eblocbroker/source_code + storage_hours: 0 + storage_id: ipfs + base_data_path: ~/test_eblocbroker/test_data/base/ + data: + data1: + cache_type: public + path: ~/test_eblocbroker/dataset_zip/small/KZ2-tsukuba + storage_hours: 1 + storage_id: ipfs + data2: + cache_type: public + path: ~/test_eblocbroker/test_data/base/data/data1 + storage_hours: 0 + storage_id: ipfs + data3: + hash: f13d75bc60898f0823566347e380a34b + data_transfer_out: 1 + jobs: + job1: + cores: 1 + run_time: 1 + +- ``path`` should represented as full path of the corresponding folder. + +- ``cache_type`` should be variable from [ ``public``, ``private`` ] + +- ``storaage_id`` should be variable from [ ``ipfs``, ``ipfs_gpg``, ``none``, ``eudat``, ``gdrive`` ] + + +------------ + +1.7 Provider +~~~~~~~~~~~~ + +Provider should run: `./eblocbroker.py <./eblocbroker.py>`_ driver Python script. + +``$ ./eblocbroker.py driver`` + +1.7.1 Screenshot of provider GUI: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. image:: /docs/gui1.png diff --git a/docs/conf.py b/docs/conf.py index 18885927..8cd76384 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Ethereum Alarm Clock documentation build configuration file, created by +# eBlocBroker documentation build configuration file, created by # sphinx-quickstart on Sun Sep 13 11:14:18 2015. # # This file is execfile()d with the current directory set to its @@ -58,16 +58,16 @@ # built documents. # # The short X.Y version. -version = "1.0.12" +version = "2.0.1" # The full version, including alpha/beta/rc tags. -release = "1.0.12" +release = "2.0.1" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -225,7 +225,7 @@ def setup(sphinx): # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = "EblocBroker" +htmlhelp_basename = "eBlocBroker" # -- Doctest configuration ---------------------------------------- diff --git a/docs/connect.rst b/docs/connect.rst deleted file mode 100755 index 3dc9215f..00000000 --- a/docs/connect.rst +++ /dev/null @@ -1,406 +0,0 @@ -**Proof-of-Authority Private Ethereum Network (eBlocPOA)** -========================================================== - -Dashboard: http://ebloc.cmpe.boun.edu.tr:3015/ - -Explorer: http://ebloc.cmpe.boun.edu.tr:8000/ - -Chat on Gitter: https://gitter.im/eBloc/eBlocPOA - -**Preinstallations** --------------------- - -**Installation Instructions for Mac** -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Pre-requirements -^^^^^^^^^^^^^^^^ - -- If you don’t have Homebrew, `install it first `__. - -- From following link: https://nodejs.org/en/, download - ``10.10.0 Current``. - -.. code:: bash - - sudo npm install npm pm2 -g - brew install go - -.. raw:: html - - - -**Installation Instructions for Linux** -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Node.js and Node Package Manager (``npm``) installation -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code:: bash - - sudo apt-get install nodejs - curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash - - sudo apt-get install -y nodejs - sudo npm install pm2 -g - sudo ln -s /usr/bin/nodejs /usr/bin/node - -`Go Installation `__ -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code:: bash - - sudo apt-get update - wget https://dl.google.com/go/go1.14.linux-amd64.tar.gz - sudo tar -xvf go1.14.linux-amd64.tar.gz - rm -f go1.14.linux-amd64.tar.gz - sudo rm -rf /usr/local/go - sudo mv go /usr/local - export GOROOT=/usr/local/go - -- Put this line ``export PATH=$PATH:/usr/local/go/bin`` into - ``$HOME/.profile`` file and do ``source $HOME/.profile`` - -Go Ethereum (``geth``) Pre-requirements -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code:: bash - - sudo apt-get install git - sudo apt-get install -y build-essential libgmp3-dev golang - -.. raw:: html - - - --------------- - -Do following for both Linux and Mac -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -`Go Ethereum `__ (``geth``) building from source -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -It is recommended to install ``geth`` version ``1.9.13``. - -.. code:: bash - - git clone https://github.com/ethereum/go-ethereum - cd go-ethereum/ - git pull - git checkout tags/v1.9.13 # update it with the latest version of geth - make geth - -After ``go-ethereum`` is installed, copy ``geth`` located under -``go-ethereum/build/bin`` into\ ``/usr/local/bin``: - -.. code:: bash - - $ ls go-ethereum/build/bin - geth - $ sudo cp build/bin/geth /usr/local/bin/ - $ which geth - /usr/local/bin/geth - -Please note that ``Geth`` version should be greater or equal than -``1.9.13``. - -.. code:: bash - - $ geth version | grep "Version: 1" - Version: 1.9.13-unstable - -Now you can jump to `eBloc Setup on Linux and -macOS `__. - -.. raw:: html - - - --------------- - -**eBloc Setup on Linux and macOS** ----------------------------------- - -Downloading -~~~~~~~~~~~ - -.. code:: bash - - cd $HOME - git clone https://github.com/ebloc/eBlocPOA.git - - cd eBlocPOA - git clone https://github.com/cubedro/eth-net-intelligence-api - - cd eth-net-intelligence-api - npm install - -Create private folder -~~~~~~~~~~~~~~~~~~~~~ - -.. code:: bash - - sudo mkdir -p /private - -Initialises a new genesis block and definition for the network -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Navigate into ``eBlocPOA`` directory. - -.. warning:: Do ``/init_custom.sh`` only once. You do not need to do it -again - -.. code:: bash - - sudo ./init_custom.sh - ./initialize.sh - -Server run (Always run with ``sudo``) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. code:: bash - - sudo ./server.sh - -- If you want to kill your server please do: ``sudo killall geth`` -- You can keep track of output of your ``geth-server`` by running - following: ``sudo tail -f gethServer.out`` - -.. code:: bash - - $ sudo tail -f gethServer.out - Password: - INFO [02-12|16:22:34] Imported new chain segment blocks=1 txs=0 mgas=0.000 elapsed=503.882µs mgasps=0.000 number=111203 hash=582a44…6e15dd - INFO [02-12|16:22:49] Imported new chain segment blocks=1 txs=0 mgas=0.000 elapsed=491.377µs mgasps=0.000 number=111204 hash=b752ec…a0725d - -Client run (geth console) -~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. code:: bash - - ./client.sh - -If you are successfully connected into ``eBlocPOA`` network inside -``geth`` console; ``peerCount`` should return 1 or more, after running -``net`` command. - --------------- - -Create an Ethereum Account -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -**Creating an account:** - -.. code:: bash - - $ cd eBlocPOA - $ eblocPath="$PWD" - $ geth --datadir="$eblocPath" account new - Your new account is locked with a password. Please give a password. Do not forget this password. - Passphrase: - Repeat passphrase: - Address: {a0a50a64cac0744dea5287d1025b8ef28aeff36e} - -Your new account is locked with a password. Please give a password. Do -not forget this password. Please enter a difficult passphrase for your -account. - -You should see your ``Keystore File (UTC / JSON)``\ under ``keystore`` -directory. - -.. code:: bash - - [~/eBlocPOA]$ ls keystore - UTC--2018-02-14T10-46-54.423218000Z--a0a50a64cac0744dea5287d1025b8ef28aeff36e - -**Using Console:** - -You can also create your Ethereum account inside your ``geth-client``. -Here your ``Keystore File`` will be created with root permission, -``eBlocWallet`` will not able to unlock it. - -.. code:: bash - - > personal.newAccount() - Passphrase: - Repeat passphrase: - "0x7d334606c71417f944ff8ba5c09e3672066244f8" - > eth.accounts - ["0x7d334606c71417f944ff8ba5c09e3672066244f8"] - -Now you should see your ``Keystore File (UTC / JSON)``\ under -``private/keystore`` directory. - -.. code:: bash - - [~/eBlocPOA]$ ls private/keystore - UTC--2018-02-14T11-00-59.995395000Z--7d334606c71417f944ff8ba5c09e3672066244f8 - -To give open acccess to the keystore file: - -.. code:: bash - - sudo chown -R $(whoami) private/keystore/UTC--... - --------------- - -**How to attach to eBloc Network Status** -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can see your node on eBloc Network Status -(http://ebloc.cmpe.boun.edu.tr:3015). Setup is done when you run -``./initialize.sh``. If you face with any issue please see the -`guide `__. - -To Run -^^^^^^ - -- Please open ``stats.sh`` file under ``eBlocPOA``\ directory. Write - your unique name instead of ``mynameis``. - -- .. warning:: Change ``DATADIR`` variable with path for - ``eth-net-intelligence-api`` directory - -- .. warning:: ``geth-server`` should be running on the background - -Finally you should run following command -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code:: bash - - ./stats.sh - -- ``sudo pm2 show app`` should return some output starting with - ``"status │ online"``. - -Now, you should see your node on http://ebloc.cmpe.boun.edu.tr:3015. - --------------- - -**Helpful commands on geth client** -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Please try following commands on your ``geth-client`` console. - -.. code:: bash - - Welcome to the Geth JavaScript console! - - instance: Geth/v1.7.3-stable/darwin-amd64/go1.9.2 - modules: admin:1.0 clique:1.0 debug:1.0 eth:1.0 miner:1.0 net:1.0 personal:1.0 rpc:1.0 txpool:1.0 web3:1.0 - - > net - { - listening: true, - peerCount: 1, - version: "23422", - getListening: function(callback), - getPeerCount: function(callback), - getVersion: function(callback) - } - - # How to check list of accounts - > eth.accounts - ["0x3b027ff2d229dd1c7918910dee32048f5f65b70d", "0x472eea7de6a43b6e55d8be84d5d29879df42a46c"] - - > sender=eth.accounts[0] - "0x3b027ff2d229dd1c7918910dee32048f5f65b70d" - - > reciever=eth.accounts[1] - "0x472eea7de6a43b6e55d8be84d5d29879df42a46c" - - # How to check your balance - > web3.fromWei(eth.getBalance(sender)) - 100 - - # How to unlock your Ethereum account - > personal.unlockAccount(sender) - Unlock account 0x3b027ff2d229dd1c7918910dee32048f5f65b70d - Passphrase: - true - - # How to send ether to another account - > eth.sendTransaction({from:sender, to:reciever, value: web3.toWei(0.00001, "ether")}) - "0xf92c11b6bd80ab12d5d63f7c6909ac7fc45a6b8052c29256dd28bd97b6375f1b" #This is your transaction receipt. - - # How to get receipt of your transaction - > eth.getTransactionReceipt("0xf92c11b6bd80ab12d5d63f7c6909ac7fc45a6b8052c29256dd28bd97b6375f1b") - { - blockHash: "0x17325837f38ff84c0337db87f13b9496f546645366ebd94c7e78c6a4c0cb5a87", - blockNumber: 111178, - contractAddress: null, - cumulativeGasUsed: 21000, - from: "0x3b027ff2d229dd1c7918910dee32048f5f65b70d", - gasUsed: 21000, - logs: [], - logsBloom: "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - status: "0x1", - to: "0x472eea7de6a43b6e55d8be84d5d29879df42a46c", - transactionHash: "0xf92c11b6bd80ab12d5d63f7c6909ac7fc45a6b8052c29256dd28bd97b6375f1b", - transactionIndex: 0 - } - -**Some helpful links** -~~~~~~~~~~~~~~~~~~~~~~ - -- `Managing your - accounts `__ -- `Sending Ether on - geth-client `__ diff --git a/docs/convert.sh b/docs/convert.sh new file mode 100644 index 00000000..8975695a --- /dev/null +++ b/docs/convert.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +# convert +# "image:: /docs/" => "image:: " diff --git a/docs/convert/convert_md_2_rst.py b/docs/convert/convert_md_2_rst.py index a7e571f8..ecf1568b 100755 --- a/docs/convert/convert_md_2_rst.py +++ b/docs/convert/convert_md_2_rst.py @@ -56,5 +56,4 @@ def convert_md_2_rst_process(fn_root): file_target.close() -home = expanduser("~") -setup(f"{home}/eBlocBroker/docs/convert") +setup(f"{expanduser('~')}/ebloc-broker/docs/convert") diff --git a/docs/convert/run_convert.sh b/docs/convert/run_convert.sh index f12040ce..d72b3c59 100755 --- a/docs/convert/run_convert.sh +++ b/docs/convert/run_convert.sh @@ -1,8 +1,5 @@ #!/bin/bash -# git fetch -# git checkout origin/master -- README.md - wget -O geth.md https://raw.githubusercontent.com/ebloc/eBlocPOA/master/README.md mv geth.md source/geth.md cp $HOME/ebloc-broker/README.md /home/alper/ebloc-broker/docs/convert/source/readme.md diff --git a/docs/index.rst b/docs/index.rst index b3ab1a5b..6fd3c656 100755 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,15 +1,18 @@ -.. Ethereum EBloc documentation master file, created by +.. Ethereum eBloc documentation master file, created by sphinx-quickstart on Sun Sep 13 11:14:18 2015. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. Welcome to eBlocBroker's documentation! ================================================ -Recently, peer-to-peer based blockchain infrastructures have emerged as disruptive technologies and have lead to the realization of crypto-currencies and smart contracts that can used in a globally trustless manner. The Ethereum eBlocBroker is a blockchain based autonomous computational resource broker. +Recently, peer-to-peer based blockchain infrastructures have emerged as +disruptive technologies and have lead to the realization of crypto-currencies +and smart contracts that can used in a globally trustless manner. The Ethereum +eBlocBroker is a blockchain based autonomous computational resource broker. -The service is completely trustless, meaning that the entire broke service operates -as smart contracts on the Ethereum private blockchain, with no priviledged access given -to any party. +The service is completely trustless, meaning that the entire broke service +operates as smart contracts on the Ethereum private blockchain, with no +priviledged access given to any party. Code can be viewed on the `github repository`_. @@ -20,7 +23,7 @@ Contents: .. toctree:: - connect + README quickstart -.. _github repository: https://github.com/ebloc/eBlocBroker +.. _github repository: https://github.com/ebloc/ebloc-broker \ No newline at end of file diff --git a/docs/run.sh b/docs/run.sh index a7507f52..76d259b1 100755 --- a/docs/run.sh +++ b/docs/run.sh @@ -1,5 +1,5 @@ #!/bin/bash -IP=79.123.177.145 +IP=127.0.0.1 PORT=3003 -sphinx-autobuild . _build_html -H $IP --port $PORT +/usr/local/bin/sphinx-autobuild . _build_html -H $IP --port $PORT diff --git a/requirements.txt b/requirements.txt index 83ac2be2..138ae355 100755 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,6 @@ # pip-compile # accesscontrol==4.2 -atomicwrites==1.4.0 acquisition==4.6 # via # accesscontrol @@ -24,11 +23,9 @@ alabaster==0.7.12 # sphinx apipkg==1.5 appdirs==1.4.4 - # via - # virtualenv apscheduler==3.8.0 argcomplete==2.0.0 -aspy.refactor-imports==2.1.1 +aspy-refactor-imports==2.1.1 # via # seed-isort-config astroid==2.11.5 @@ -43,6 +40,7 @@ async-timeout==4.0.2 # aiohttp # eth-brownie asynctest==0.13.0 +atomicwrites==1.4.0 attrdict==2.0.1 attrs==21.4.0 # via @@ -101,9 +99,9 @@ btrees==4.7.2 # zodb cached-property==1.5.2 # via - # aspy.refactor-imports + # aspy-refactor-imports # vulcano -certifi==2021.10.8 +certifi==2022.5.18.1 # via # eth-brownie # pipenv @@ -120,7 +118,7 @@ cfgv==3.1.0 # pre-commit chameleon==3.8.1 # via - # z3c.pt + # z3c-pt chardet==4.0.0 # via # python-debian @@ -141,7 +139,7 @@ click==8.1.3 colorama==0.4.4 # via # crayons - # rich + # sphinx-autobuild colored==1.4.2 colored-traceback==0.3.0 coloredlogs==14.0 @@ -194,12 +192,14 @@ decorator==4.4.2 # ipython # traitlets dill==0.3.4 + # via + # pylint distlib==0.3.1 # via # virtualenv distro==1.4.0 # via - # lazr.restfulclient + # lazr-restfulclient # selinux # ssh-import-id docopt==0.6.2 @@ -228,7 +228,7 @@ eth-account==0.5.7 # via # eth-brownie # web3 -eth-brownie==1.18.2 +eth-brownie==1.19.0 eth-event==1.2.3 # via # eth-brownie @@ -294,7 +294,7 @@ flake8==4.0.1 # flake8-noqa flake8-mypy==17.8.0 flake8-noqa==1.2.1 -Flask==2.1.1 +flask==2.1.1 # via # grip fonttools==4.28.3 @@ -328,7 +328,7 @@ hexbytes==0.2.2 httplib2==0.14.0 # via # launchpadlib - # lazr.restfulclient + # lazr-restfulclient humanfriendly==8.1 # via # coloredlogs @@ -355,6 +355,8 @@ imagesize==1.3.0 # sphinx importchecker==2.0 importlib-metadata==4.2.0 + # via + # flask importlib-resources==5.1.2 importmagic==0.1.7 incremental==17.5.0 @@ -419,10 +421,10 @@ kiwisolver==1.3.1 # via # matplotlib launchpadlib==1.10.13 -lazr.restfulclient==0.14.2 +lazr-restfulclient==0.14.2 # via # launchpadlib -lazr.uri==1.0.3 +lazr-uri==1.0.3 # via # launchpadlib # wadllib @@ -430,6 +432,9 @@ lazy-object-proxy==1.7.1 # via # astroid # eth-brownie +livereload==2.6.3 + # via + # sphinx-autobuild lmdb==1.2.0 lockfile==0.12.2 # via @@ -484,11 +489,11 @@ multihash==0.1.1 multimapping==4.1 multipart==0.2.4 # via - # zope.publisher + # zope-publisher multitasking==0.0.9 # via # yfinance -mypy==0.942 +mypy==0.961 # via # flake8-mypy mypy-extensions==0.4.3 @@ -522,7 +527,7 @@ numpy==1.19.4 # yfinance oauthlib==3.1.0 # via - # lazr.restfulclient + # lazr-restfulclient packaging==21.3 # via # bleach @@ -580,7 +585,7 @@ pillow==8.0.1 # imageio # matplotlib # reportlab -pip-tools==6.5.1 +pip-tools==6.6.2 pipenv==2020.11.15 pipupgrade==1.10.1 pkginfo==1.5.0.1 @@ -589,6 +594,7 @@ platformdirs==2.5.2 # black # eth-brownie # pylint + # virtualenv pluggy==1.0.0 # via # eth-brownie @@ -607,7 +613,7 @@ protobuf==3.20.1 # via # eth-brownie # web3 -psutil==5.9.0 +psutil==5.9.1 # via # eth-brownie ptyprocess==0.7.0 @@ -640,7 +646,8 @@ pyasn1-modules==0.2.8 # via # service-identity pycairo==1.21.0 - # via pygobject + # via + # pygobject pycares==4.0.0 # via # aiodns @@ -748,7 +755,7 @@ python-dotenv==0.16.0 # eth-brownie python-gettext==4.0 # via - # zope.i18n + # zope-i18n python-jsonrpc-server==0.4.0 # via # python-language-server @@ -769,7 +776,7 @@ pytz==2020.1 # datetime # pandas # tzlocal - # zope.i18n + # zope-i18n pyxdg==0.26 pyyaml==5.4.1 # via @@ -807,18 +814,17 @@ restrictedpython==5.0 # via # accesscontrol rfc3986==1.4.0 -rich==12.3.0 +rich==12.4.4 rlp==2.0.1 # via # eth-account # eth-brownie # eth-rlp roman==3.3 -rope==0.22.0 -ruamel.yaml==0.17.20 -ruamel.yaml.clib==0.2.6 +ruamel-yaml==0.17.20 +ruamel-yaml-clib==0.2.6 # via - # ruamel.yaml + # ruamel-yaml secretstorage==3.3.1 # via # keyring @@ -859,7 +865,8 @@ six==1.16.0 # gdown # jsonschema # launchpadlib - # lazr.restfulclient + # lazr-restfulclient + # livereload # multiaddr # parsimonious # persistence @@ -879,12 +886,12 @@ six==1.16.0 # virtualenv # webtest # wsgiproxy2 - # z3c.pt + # z3c-pt # zodb - # zope.i18nmessageid - # zope.publisher - # zope.tales - # zope.traversing + # zope-i18nmessageid + # zope-publisher + # zope-tales + # zope-traversing slip==0.3.8 smmap==3.0.1 # via @@ -902,7 +909,9 @@ soupsieve==2.2.1 # beautifulsoup4 sphinx==4.3.1 # via + # sphinx-autobuild # sphinx-rtd-theme +sphinx-autobuild==2021.3.14 sphinx-rtd-dark-mode==1.2.4 sphinx-rtd-theme==1.0.0 # via @@ -948,7 +957,6 @@ toml==0.10.2 # ipdb # pep517 # pre-commit - # pylint # pytest # tox tomli==2.0.1 @@ -956,11 +964,15 @@ tomli==2.0.1 # black # eth-brownie # mypy + # pylint # setuptools-scm toolz==0.11.2 # via # cytoolz # eth-brownie +tornado==6.1 + # via + # livereload tox==3.24.3 tqdm==4.64.0 # via @@ -974,7 +986,7 @@ transaction==3.0.0 # via # accesscontrol # zodb - # zope.traversing + # zope-traversing twisted==20.3.0 txaio==20.12.1 # via @@ -983,15 +995,19 @@ typed-ast==1.5.1 types-filelock==0.1.5 types-pytz==2021.1.2 types-pyyaml==6.0.0 +types-requests==2.27.14 types-termcolor==1.1.1 +types-urllib3==1.26.15 + # via + # types-requests typing-extensions==4.0.1 # via # astroid # black - # eth-brownie # flake8-noqa # mypy # pylint + # rich tzlocal==2.1 # via # apscheduler @@ -1037,7 +1053,7 @@ vyper==0.3.3 wadllib==1.3.3 # via # launchpadlib - # lazr.restfulclient + # lazr-restfulclient waitress==1.4.4 # via # webtest @@ -1045,7 +1061,7 @@ wcwidth==0.2.5 # via # eth-brownie # prompt-toolkit -web3==5.29.0 +web3==5.29.1 # via # eth-brownie webdriver-manager==3.4.2 @@ -1062,13 +1078,15 @@ websockets==9.1 # python-binance # web3 webtest==2.0.35 -Werkzeug==2.1.1 +werkzeug==2.1.1 # via # flask wheel==0.37.1 # via + # eth-brownie # pip-tools # pypandoc + # vyper wrapt==1.14.1 # via # astroid @@ -1082,8 +1100,8 @@ yarl==1.7.2 # aiohttp # eth-brownie yfinance==0.1.63 -z3c.pt==3.3.0 -zc.lockfile==2.0 +z3c-pt==3.3.0 +zc-lockfile==2.0 # via # zodb zconfig==3.5.0 @@ -1099,60 +1117,60 @@ zodb==5.6.0 zodbpickle==2.0.0 # via # zodb -zope.browser==2.3 +zope-browser==2.3 # via - # zope.publisher -zope.component==4.6.2 + # zope-publisher +zope-component==4.6.2 # via # accesscontrol - # z3c.pt - # zope.contentprovider - # zope.i18n - # zope.publisher - # zope.security - # zope.traversing -zope.configuration==4.4.0 + # z3c-pt + # zope-contentprovider + # zope-i18n + # zope-publisher + # zope-security + # zope-traversing +zope-configuration==4.4.0 # via # accesscontrol - # zope.publisher -zope.contentprovider==4.2.1 + # zope-publisher +zope-contentprovider==4.2.1 # via - # z3c.pt -zope.contenttype==4.5.0 + # z3c-pt +zope-contenttype==4.5.0 # via - # zope.publisher -zope.deferredimport==4.3.1 + # zope-publisher +zope-deferredimport==4.3.1 # via # accesscontrol - # zope.component -zope.deprecation==4.4.0 + # zope-component +zope-deprecation==4.4.0 # via - # zope.component - # zope.i18n -zope.event==4.5.0 + # zope-component + # zope-i18n +zope-event==4.5.0 # via - # zope.component - # zope.contentprovider - # zope.publisher - # zope.schema -zope.exceptions==4.4 + # zope-component + # zope-contentprovider + # zope-publisher + # zope-schema +zope-exceptions==4.4 # via - # zope.publisher -zope.hookable==5.1.0 + # zope-publisher +zope-hookable==5.1.0 # via - # zope.component -zope.i18n==4.7.0 + # zope-component +zope-i18n==4.7.0 # via - # z3c.pt - # zope.publisher - # zope.traversing -zope.i18nmessageid==5.0.1 + # z3c-pt + # zope-publisher + # zope-traversing +zope-i18nmessageid==5.0.1 # via - # zope.configuration - # zope.i18n - # zope.security - # zope.traversing -zope.interface==5.4.0 + # zope-configuration + # zope-i18n + # zope-security + # zope-traversing +zope-interface==5.4.0 # via # accesscontrol # acquisition @@ -1161,64 +1179,64 @@ zope.interface==5.4.0 # persistent # transaction # twisted - # z3c.pt + # z3c-pt # zexceptions # zodb - # zope.browser - # zope.component - # zope.configuration - # zope.contentprovider - # zope.exceptions - # zope.location - # zope.proxy - # zope.publisher - # zope.schema - # zope.security - # zope.tales - # zope.traversing -zope.location==4.2 - # via - # zope.contentprovider - # zope.publisher - # zope.security - # zope.traversing -zope.proxy==4.4.0 - # via - # zope.deferredimport - # zope.location - # zope.publisher - # zope.security - # zope.traversing -zope.publisher==6.0.2 + # zope-browser + # zope-component + # zope-configuration + # zope-contentprovider + # zope-exceptions + # zope-location + # zope-proxy + # zope-publisher + # zope-schema + # zope-security + # zope-tales + # zope-traversing +zope-location==4.2 + # via + # zope-contentprovider + # zope-publisher + # zope-security + # zope-traversing +zope-proxy==4.4.0 + # via + # zope-deferredimport + # zope-location + # zope-publisher + # zope-security + # zope-traversing +zope-publisher==6.0.2 # via # accesscontrol # zexceptions - # zope.contentprovider - # zope.traversing -zope.schema==6.1.0 + # zope-contentprovider + # zope-traversing +zope-schema==6.1.0 # via # accesscontrol - # zope.configuration - # zope.contentprovider - # zope.i18n - # zope.location - # zope.security -zope.security==5.1.1 + # zope-configuration + # zope-contentprovider + # zope-i18n + # zope-location + # zope-security +zope-security==5.1.1 # via # accesscontrol # zexceptions - # zope.publisher - # zope.traversing -zope.tales==5.1 + # zope-publisher + # zope-traversing +zope-tales==5.1 # via - # zope.contentprovider -zope.testing==4.9 + # zope-contentprovider +zope-testing==4.9 # via # accesscontrol -zope.traversing==4.4.1 +zope-traversing==4.4.1 # via - # z3c.pt -types-requests==2.27.14 + # z3c-pt + # The following packages are considered to be unsafe in a requirements file: # pip # setuptools diff --git a/scripts/install_slurm.sh b/scripts/install_slurm.sh index 239854ac..928f713f 100755 --- a/scripts/install_slurm.sh +++ b/scripts/install_slurm.sh @@ -1,13 +1,13 @@ #!/bin/bash sudo apt-get update +sudo apt --fix-broken install -y xargs -a <(awk '! /^ *(#|$)/' ~/ebloc-broker/scripts/package_slurm.list) -r -- sudo apt install -yf -sudo apt install libmariadbclient-dev -y -sudo apt install libmariadb-dev -y #: https://askubuntu.com/a/556387/660555 -sudo DEBIAN_FRONTEND=noninteractive apt-get install mailutils # postfix +sudo DEBIAN_FRONTEND=noninteractive apt-get install mailutils -y # postfix sudo apt autoremove -y +sudo apt --fix-broken install -y # munge # ===== @@ -16,6 +16,15 @@ sudo chmod 400 /etc/munge/munge.key sudo systemctl enable munge sudo systemctl start munge +# configurations +# ============== +sudo groupadd eblocbroker +sudo cp ~/ebloc-broker/broker/_slurm/confs/slurm.conf /usr/local/etc/slurm.conf +sudo cp ~/ebloc-broker/broker/_slurm/confs/slurmdbd.conf /usr/local/etc/slurmdbd.conf +sudo chmod 664 /usr/local/etc/slurm.conf # 0600 , 755 , 660 , 755 +sudo chmod 755 /usr/local/etc/slurmdbd.conf +mkdir -p /tmp/run + # slurm # ===== sudo mkdir -p /var/log/slurm @@ -24,21 +33,21 @@ git clone --depth 1 --branch slurm-19-05-8-1 https://github.com/SchedMD/slurm.gi cd ~/slurm sudo rm -rf /usr/local/lib/slurm/ /tmp/slurmstate/ make clean -./configure --enable-debug --enable-front-end # --enable-multiple-slurmd # seems like this also works -sudo make && sudo make install +./configure --enable-debug --enable-front-end # --enable-multiple-slurmd : seems like this also works +sudo make +sudo make install scontrol --version if [ $? -ne 0 ]; then - echo scontrol [ FAIL ] + echo "scontrol [ FAIL ]" exit 1 fi -# configurations -# ============== -sudo groupadd eblocbroker -sudo cp ~/ebloc-broker/broker/_slurm/confs/slurm.conf /usr/local/etc/slurm.conf -sudo cp ~/ebloc-broker/broker/_slurm/confs/slurmdbd.conf /usr/local/etc/slurmdbd.conf -sudo chmod 664 /usr/local/etc/slurm.conf # 0600 , 755 , 660 , 755 -sudo chmod 755 /usr/local/etc/slurmdbd.conf -mkdir -p /tmp/run - -# apt-cache search mysql | grep "dev" +# sudo sed -i 's/^root:.*$/root:*:16231:0:99999:7:::/' /etc/shadow +sudo slurmdbd +sudo /etc/init.d/mysql start +sudo su -c "mysql -u root < slurm_mysql.sql" +sudo slurmdbd && sleep 1 +user_name=$(whoami) +sacctmgr add cluster eblocbroker --immediate +sacctmgr add account $user_name --immediate +sacctmgr create user $user_name defaultaccount=$user_name adminlevel=None --immediate diff --git a/scripts/setup.sh b/scripts/setup.sh index cd179874..6be94621 100755 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -31,9 +31,7 @@ git pull --rebase -v # nodejs # ====== -output=$(node -v) -if [ "$output" == "" ];then - # curl -sL https://deb.nodesource.com/setup_14.x | sudo bash - +if [ "$(node -v)" == "" ];then curl -fsSL https://deb.nodesource.com/setup_17.x | sudo -E bash - sudo apt-get install -y nodejs node -v @@ -83,7 +81,7 @@ install_ipfs () { echo ipfs_current_version=v$ipfs_current_version fi cd /tmp - version="0.11.0" + version="0.13.0" echo "version_to_download=v"$version if [[ "$ipfs_current_version" == "$version" ]]; then echo "$GREEN##$NC Latest version is already downloaded" @@ -110,20 +108,19 @@ install_ipfs () { ipfs config Routing.Type none open_port_4001 } -install_ipfs - # echo "vm.max_map_count=262144" >> /etc/sysctl.conf # sudo sysctl -p - -# go-geth -# ======= +install_ipfs sudo add-apt-repository -y ppa:ethereum/ethereum sudo apt-get -y install ethereum # python # ====== -sudo apt install software-properties-common -y +sudo apt install libgirepository1.0-dev -y +sudo apt install libcairo2-dev -y + sudo add-apt-repository ppa:deadsnakes/ppa -y +sudo apt install software-properties-common -y sudo apt-get update sudo apt install python-dev -y sudo apt install python2 -y @@ -133,17 +130,13 @@ sudo apt install python3-pip -y sudo apt install python3-venv -y sudo apt install python3-virtualenv -y sudo apt install python3.7 -y -sudo apt install python3.8-dev -y -sudo apt install python3.8-venv -y -sudo apt install libgirepository1.0-dev -y -sudo apt install libcairo2-dev # mongodb # ======= curl -fsSL https://www.mongodb.org/static/pgp/server-4.4.asc | sudo apt-key add - echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/4.4 multiverse" | \ sudo tee /etc/apt/sources.list.d/mongodb-org-4.4.list -sudo apt update +sudo apt-get update sudo apt-get install -y mongodb-org sudo chown -R mongodb. /var/log/mongodb sudo chown -R mongodb. /var/lib/mongodb @@ -157,11 +150,11 @@ install_ebb_pip_packages () { VENV=$HOME/venv [ ! -d $VENV ] && python3 -m venv $VENV source $VENV/bin/activate - $VENV/bin/python3.8 -m pip install --upgrade pip + $VENV/bin/python3 -m pip install --upgrade pip python3 -m pip install --no-use-pep517 cm-rgb - pip install wheel + $VENV/bin/python3 -m pip install wheel cd ~/ebloc-broker - pip install -e . --use-deprecated=legacy-resolver + $VENV/bin/python3 -m pip install -e . --use-deprecated=legacy-resolver mkdir -p $HOME/.cache/black sudo chown $(logname) -R $HOME/.cache/black black_version=$(pip freeze | grep black | sed 's|black==||g') @@ -223,7 +216,7 @@ install_brownie gpg --gen-key gpg --list-keys -mkdir -p ~/git +mkdir -p ~/docker ~/git git clone https://github.com/prasmussen/gdrive.git ~/git/gdrive go env -w GO111MODULE=auto go get github.com/prasmussen/gdrive diff --git a/scripts/slurm_mysql.sql b/scripts/slurm_mysql.sql new file mode 100644 index 00000000..5baf91fd --- /dev/null +++ b/scripts/slurm_mysql.sql @@ -0,0 +1,7 @@ +SET GLOBAL innodb_buffer_pool_size=402653184; +create database slurm_acct_db; +CREATE USER 'alper'@'localhost' IDENTIFIED BY '12345'; -- 'alper'=> $(whoami) +grant usage on *.* to 'alper'@'localhost'; +grant all privileges on slurm_acct_db.* to 'alper'@'localhost'; +flush privileges; +exit; diff --git a/setup.py b/setup.py index b14624eb..985f7606 100755 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ name="ebloc-broker", packages=find_packages(), setup_requires=["wheel", "eth-brownie"], - version="2.0.0", # don't change this manually, use bumpversion instead + version="2.1.0", # don't change this manually, use bumpversion instead license="MIT", description=( # noqa: E501 "A Python framework to communicate with ebloc-broker, which is "