diff --git a/.DS_Store b/.DS_Store deleted file mode 100644 index 859816c..0000000 Binary files a/.DS_Store and /dev/null differ diff --git a/404.html b/404.html index 8975928..1777693 100644 --- a/404.html +++ b/404.html @@ -9,13 +9,13 @@ - - + +
-
Skip to main content

Page Not Found

We could not find what you were looking for.

Please contact the owner of the site that linked you to the original URL and let them know their link is broken.

- - +
Skip to main content

Page Not Found

We could not find what you were looking for.

Please contact the owner of the site that linked you to the original URL and let them know their link is broken.

+ + \ No newline at end of file diff --git a/CNAME b/CNAME deleted file mode 100644 index bf8bfe3..0000000 --- a/CNAME +++ /dev/null @@ -1 +0,0 @@ -vishalgandhi.in \ No newline at end of file diff --git a/about/index.html b/about/index.html index 14b7936..b54801c 100644 --- a/about/index.html +++ b/about/index.html @@ -9,13 +9,13 @@ - - + +
-
Skip to main content

The Data Column

Vishal Gandhi profile picture

FOSS.NoSQL.Databases.Containers - Blog by Vishal Gandhi

What is this?

The Data Column is attempt to share my learnings with the world and the technology enthusiasts.

As I attempt to explore FOSS and new technologies, I would also like to help those who are on a similar journey.

Learn about NoSQL, Containers, Reverse Proxy, Docker, Kubernetes, and more here on my blog.

What do I do?

I have been into multiple areas of software engineering, software support and database technologies for two decades.

Started my journey in traditional software. In the recent years I am fascinated by the FOSS and FOSS way of software development.

I believe that if there is a code develop by someone, there was a business problem that triggered the need. I love to reproduce complex business problems and solve them using FOSS in my free time.

Who am I?

Born and brought up in India, I enjoy reading books, finding ways to be more productive and creative. I believe in DRY (Dont Repeat Yourself) principle and love to automate the routine things.

My other passions includes cycling and running. My life experience changed when i transformed myself. During weekends, love to cook and try new cuisines.

I have two lovely children who add joy to living each day.

- - +
Skip to main content

The Data Column

Vishal Gandhi profile picture

FOSS.NoSQL.Databases.Containers - Blog by Vishal Gandhi

What is this?

The Data Column is attempt to share my learnings with the world and the technology enthusiasts.

As I attempt to explore FOSS and new technologies, I would also like to help those who are on a similar journey.

Learn about NoSQL, Containers, Reverse Proxy, Docker, Kubernetes, and more here on my blog.

What do I do?

I have been into multiple areas of software engineering, software support and database technologies for two decades.

Started my journey in traditional software. In the recent years I am fascinated by the FOSS and FOSS way of software development.

I believe that if there is a code develop by someone, there was a business problem that triggered the need. I love to reproduce complex business problems and solve them using FOSS in my free time.

Who am I?

Born and brought up in India, I enjoy reading books, finding ways to be more productive and creative. I believe in DRY (Dont Repeat Yourself) principle and love to automate the routine things.

My other passions includes cycling and running. My life experience changed when i transformed myself. During weekends, love to cook and try new cuisines.

I have two lovely children who add joy to living each day.

+ + \ No newline at end of file diff --git a/archive/index.html b/archive/index.html index 55e4056..3e4e298 100644 --- a/archive/index.html +++ b/archive/index.html @@ -9,13 +9,13 @@ - - + +
-
Skip to main content

Archive

Archive

- - +
Skip to main content

Archive

Archive

+ + \ No newline at end of file diff --git a/assets/js/0c071de2.f74097c7.js b/assets/js/0c071de2.f74097c7.js new file mode 100644 index 0000000..5d88dd8 --- /dev/null +++ b/assets/js/0c071de2.f74097c7.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[321],{3125:e=>{e.exports=JSON.parse('{"permalink":"/page/2","page":2,"postsPerPage":10,"totalPages":2,"totalCount":11,"previousPage":"/","blogDescription":"Blog","blogTitle":"Blog"}')}}]); \ No newline at end of file diff --git a/assets/js/0fdf99ec.f4b3b904.js b/assets/js/0fdf99ec.f4b3b904.js new file mode 100644 index 0000000..5456f05 --- /dev/null +++ b/assets/js/0fdf99ec.f4b3b904.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[5100],{2776:s=>{s.exports=JSON.parse('{"name":"@easyops-cn/docusaurus-search-local","id":"default"}')}}]); \ No newline at end of file diff --git a/assets/js/100d8997.7a2a85ab.js b/assets/js/100d8997.7a2a85ab.js deleted file mode 100644 index 841ee48..0000000 --- a/assets/js/100d8997.7a2a85ab.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[5403],{5745:e=>{e.exports=JSON.parse('{"name":"docusaurus-plugin-content-pages","id":"default"}')}}]); \ No newline at end of file diff --git a/assets/js/13816aa6.96b71fa4.js b/assets/js/13816aa6.96b71fa4.js new file mode 100644 index 0000000..ea029de --- /dev/null +++ b/assets/js/13816aa6.96b71fa4.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[6410],{4469:e=>{e.exports=JSON.parse('{"name":"docusaurus-plugin-content-blog","id":"default"}')}}]); \ No newline at end of file diff --git a/assets/js/1a22fee9.1c799c27.js b/assets/js/1a22fee9.1c799c27.js deleted file mode 100644 index 967a607..0000000 --- a/assets/js/1a22fee9.1c799c27.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[6561],{4469:e=>{e.exports=JSON.parse('{"name":"docusaurus-plugin-content-blog","id":"default"}')}}]); \ No newline at end of file diff --git a/assets/js/25e6df96.fdd6840c.js b/assets/js/25e6df96.fdd6840c.js new file mode 100644 index 0000000..5cc56ca --- /dev/null +++ b/assets/js/25e6df96.fdd6840c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[2107],{1115:e=>{e.exports=JSON.parse('{"permalink":"/tags/essential-skills","page":1,"postsPerPage":10,"totalPages":1,"totalCount":1,"blogDescription":"Blog","blogTitle":"Blog"}')}}]); \ No newline at end of file diff --git a/assets/js/2e801cce.7f35c828.js b/assets/js/2e801cce.7f35c828.js new file mode 100644 index 0000000..c4f9ee8 --- /dev/null +++ b/assets/js/2e801cce.7f35c828.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[9450],{6029:e=>{e.exports=JSON.parse('{"blogPosts":[{"id":"essential-skills","metadata":{"permalink":"/essential-skills","source":"@site/blog/2024-01-21-essentials-software-developer.md","title":"Essential Skills and Tools for Aspiring Software Developers and Database Engineers","description":"Introduction","date":"2024-01-21T00:00:00.000Z","formattedDate":"January 21, 2024","tags":[{"label":"career","permalink":"/tags/career"},{"label":"fundamentals","permalink":"/tags/fundamentals"},{"label":"essential-skills","permalink":"/tags/essential-skills"}],"readingTime":14.235,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"essential-skills","title":"Essential Skills and Tools for Aspiring Software Developers and Database Engineers","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["career","fundamentals","essential-skills"]},"nextItem":{"title":"My favorite Open Source Projects in 2022","permalink":"/fav-open-source-repo"}},"content":"## Introduction\\n\\nStarting a career as a software developer in the fast-paced and ever-evolving IT industry can be both exciting and challenging. To thrive in this field, it is essential to equip yourself with the right skills, tools, and technologies. In this article, we will highlight key areas of focus that will help you establish a solid foundation as a software developer.\\n\\n\x3c!--truncate--\x3e\\n\\n## Data Structures and Algorithms\\n\\nData structures and algorithms are fundamental concepts in computer science. They are used to organize and manipulate data efficiently. As a software developer, you will need to understand how data structures and algorithms work to solve problems and build applications. Learn about common data structures such as arrays, linked lists, stacks, queues, trees, and graphs. Familiarize yourself with common algorithms such as sorting, searching, and graph traversal. This knowledge will help you write efficient code and optimize your applications.\\n\\nResources to learn Data Structures and Algorithms:\\n\\n- [Data Structures and Algorithms Specialization](https://www.coursera.org/specializations/data-structures-algorithms) by UC San Diego\\n- [Algorithmic Thinking: A Problem-Based Introduction](https://www.amazon.com/Algorithmic-Thinking-Problem-Based-Daniel-Zingaro/dp/1718500807/ref=sr_1_1?keywords=Algorithmic+thinking&qid=1705894084&sr=8-1) by Daniel Zingaro\\n- [Introduction to Algorithms](https://www.amazon.com/Introduction-Algorithms-3rd-MIT-Press/dp/0262033844) by Thomas H. Cormen\\n- [Algorithms, Part I](https://www.coursera.org/learn/algorithms-part1) by Princeton University on Coursera\\n- [Algorithms, Part II](https://www.coursera.org/learn/algorithms-part2) by Princeton University on Coursera\\n- [Data Structures and Algorithms in Python](https://www.amazon.com/Structures-Algorithms-Python-Michael-Goodrich/dp/1118290275) by Michael T. Goodrich, Roberto Tamassia, and Michael H. Goldwasser\\n- [GeeksforGeeks](https://www.geeksforgeeks.org/data-structures/) - A computer science portal with resources on various topics including data structures and algorithms\\n- [LeetCode](https://leetcode.com/) - A platform for preparing technical coding interviews with a focus on algorithms and data structures problems\\n\\n## Programming Languages\\n\\nProgramming languages are the building blocks of software development. As a software developer, you will need to learn multiple programming languages to build applications and solve problems. While there are many programming languages to choose from, it is essential to focus on the ones that are in demand and align with your career goals. Learn about popular programming languages such as Go, Python, Rust, and Javascript. Familiarize yourself with their syntax, features, and use cases. This knowledge will enable you to write clean, maintainable, and extensible code.\\n\\n[Stack Overflow Most Admired and Desired Programming Languages 2023](https://survey.stackoverflow.co/2023/#section-admired-and-desired-programming-scripting-and-markup-languages)\\n\\nResources to learn Go:\\n\\n- [The Go Programming Language](https://www.amazon.com/Go-Programming-Language-Addison-Wesley-Professional/dp/0134190440) by Alan A. A. Donovan and Brian W. Kernighan\\n- [Go by Example](https://gobyexample.com/)\\n- [A Tour of Go](https://tour.golang.org/welcome/1)\\n\\nResources to learn Rust:\\n\\n- [The Rust Programming Language](https://www.amazon.com/Rust-Programming-Language-Steve-Klabnik/dp/1593278284) by Steve Klabnik and Carol Nichols\\n- [Rust by Example](https://doc.rust-lang.org/rust-by-example/)\\n- [Rustlings](https://github.com/rust-lang/rustlings)\\n\\nResources to learn Python:\\n\\n- [Python Crash Course](https://www.amazon.com/Python-Crash-Course-2nd-Edition/dp/1593279280) by Eric Matthes\\n- [Automate the Boring Stuff with Python](https://automatetheboringstuff.com/)\\n- [Learn Python the Hard Way](https://learnpythonthehardway.org/)\\n\\nRoadmaps to learn Go, Rust, and Python:\\n\\n- [Go Developer Roadmap](https://roadmap.sh/golang)\\n- [Rust Developer Roadmap](https://roadmap.sh/rust)\\n- [Python Developer Roadmap](https://roadmap.sh/python)\\n\\nYou can find an extensive range of developer roadmaps at [Roadmap.sh](http://roadmap.sh).\\n\\n## Design Patterns\\n\\nDesign patterns are reusable solutions to common problems in software development. They are used to solve problems and improve the quality of code. As a software developer, you will need to understand how design patterns work to build robust and scalable applications. Learn about common design patterns such as creational, structural, and behavioral patterns. Familiarize yourself with the SOLID principles of object-oriented design. This knowledge will enable you to write clean, maintainable, and extensible code.\\n\\nResources to learn Design Patterns:\\n\\n- [Design Patterns: Elements of Reusable Object-Oriented Software](https://www.amazon.com/Design-Patterns-Elements-Reusable-Object-Oriented/dp/0201633612) by Erich Gamma, Richard Helm, Ralph Johnson, and John Vlissides\\n- [Head First Design Patterns](https://www.amazon.com/Head-First-Design-Patterns-Brain-Friendly/dp/0596007124) by Eric Freeman and Elisabeth Robson\\n- [Refactoring.Guru](https://refactoring.guru/design-patterns) - A website that explains 22 design patterns and 8 principles of object-oriented design in a simple and intuitive way\\n- [SourceMaking](https://sourcemaking.com/design_patterns) - A resource for learning about design patterns, anti-patterns, and refactoring\\n\\n## Cloud Native Development\\n\\nCloud native development is a software development approach that leverages cloud computing to build and deploy applications. It enables developers to focus on writing code instead of managing infrastructure. Learn about cloud computing concepts such as virtualization, containers, and serverless computing. Familiarize yourself with cloud platforms such as Amazon Web Services (AWS), Microsoft Azure, and Google Cloud Platform (GCP). This knowledge will enable you to build scalable and resilient applications.\\n\\nResources to learn Cloud Native Development:\\n\\n- [Cloud Native: Using Containers, Functions, and Data to Build Next-Generation Applications](https://www.amazon.com/Cloud-Native-Containers-Functions-Next-Generation/dp/1492040762) by Boris Scholl, Trent Swanson, and Peter Jausovec\\n- [Kubernetes: Up and Running: Dive into the Future of Infrastructure](https://www.amazon.com/Kubernetes-Running-Dive-Future-Infrastructure/dp/1492046531) by Brendan Burns, Joe Beda, and Kelsey Hightower\\n- [Cloud Native DevOps with Kubernetes: Building, Deploying, and Scaling Modern Applications in the Cloud](https://www.amazon.com/Cloud-Native-DevOps-Kubernetes-Applications/dp/1492040762) by John Arundel and Justin Domingus\\n- [The Cloud Native Learning Resources](https://learnk8s.io/) by Learnk8s\\n- [Cloud Native Computing Foundation](https://www.cncf.io/) - The foundation hosts critical components of the global technology infrastructure, including Kubernetes, Prometheus, and Envoy\\n\\n## Databases\\n\\nDatabases are used to store and retrieve data. As a software developer, you will need to understand how databases work to build applications that interact with them. Learn about relational databases such as MySQL and PostgreSQL, NoSQL databases such as MongoDB and Redis, and cloud databases such as Amazon DynamoDB and Google Cloud Firestore. Familiarize yourself with database design, data modeling, and query languages such as SQL. This knowledge will enable you to build robust and scalable applications.\\n\\n[StackOverflow Most Admired and Desired Databases 2023](https://survey.stackoverflow.co/2023/#section-admired-and-desired-databases)\\n\\nResources to learn Databases:\\n\\n- [Database Internals: A Deep Dive into How Distributed Data Systems Work](https://www.amazon.com/Database-Internals-Deep-Distributed-Systems/dp/1492040347) by Alex Petrov\\n- [Designing Data-Intensive Applications: The Big Ideas Behind Reliable, Scalable, and Maintainable Systems](https://www.amazon.com/Designing-Data-Intensive-Applications-Reliable-Maintainable/dp/1449373321) by Martin Kleppmann\\n- [Database Design for Mere Mortals: A Hands-On Guide to Relational Database Design](https://www.amazon.com/Database-Design-Mere-Mortals-Hands/dp/0321884493) by Michael J. Hernandez\\n- [SQLZOO](https://sqlzoo.net/) - A website that provides interactive SQL tutorials and exercises\\n- [SQLBolt](https://sqlbolt.com/) - A website that provides interactive SQL tutorials and exercises\\n\\nDon\'t underestimate the power of SQLite. It\'s a lightweight, file-based database that\'s great for small applications and prototyping. Also, consider exploring open-source databases like [rqlite](https://github.com/rqlite/rqlite), which is a lightweight, distributed relational database built on SQLite.\\n\\n## Distributed Systems\\n\\nDistributed systems are groups of networked computers that interact with each other to achieve a common goal. Key concepts in distributed systems include consensus algorithms like [RAFT](https://raft.github.io/), and principles like the CAP Theorem.\\n\\nResources to learn Distributed Systems:\\n\\n- [Designing Data-Intensive Applications: The Big Ideas Behind Reliable, Scalable, and Maintainable Systems](https://www.amazon.com/Designing-Data-Intensive-Applications-Reliable-Maintainable/dp/1449373321) by Martin Kleppmann\\n- [Distributed Systems for Fun and Profit](http://book.mixu.net/distsys/single-page.html) by Mikito Takada\\n- [RAFT](https://raft.github.io/)\\n\\nUnderstanding the CAP Theorem is crucial when working with distributed databases. It states that it\'s impossible for a distributed data store to simultaneously provide more than two out of the following three guarantees: Consistency, Availability, and Partition tolerance.\\n\\n- [CAP Theorem: Explained](https://www.ibm.com/cloud/learn/cap-theorem) by IBM Cloud Education\\n\\n## CI/CD Pipelines\\n\\nContinuous Integration (CI) and Continuous Delivery (CD) are software development practices that enable developers to build, test, and deploy code frequently and reliably. Learn about CI/CD pipelines and how they work. Familiarize yourself with popular CI/CD tools such as Jenkins, CircleCI, and Travis CI. This knowledge will enable you to automate the software development lifecycle and deliver high-quality software faster.\\n\\nResources to learn CI/CD:\\n\\n- [Continuous Delivery: Reliable Software Releases through Build, Test, and Deployment Automation](https://www.amazon.com/Continuous-Delivery-Deployment-Automation-Addison-Wesley/dp/0321601912) by Jez Humble and David Farley\\n- [Travis CI Documentation](https://docs.travis-ci.com/) - Comprehensive guide on how to use Travis CI\\n- [GitHub Actions Documentation](https://docs.github.com/en/actions) - Learn how to automate, customize, and execute your software development workflows right in your repository with GitHub Actions\\n\\nResources to learn Git:\\n\\n- [Pro Git](https://git-scm.com/book/en/v2) by Scott Chacon and Ben Straub\\n- [GitHub Learning Lab](https://lab.github.com/) - Interactive courses on Git and GitHub\\n- [Learn Git Branching](https://learngitbranching.js.org/) - An interactive Git visualization tool to educate and challenge\\n- [Git Immersion](http://gitimmersion.com/) - A guided tour that walks through the fundamentals of Git\\n\\n## Testing frameworks\\n\\nTesting frameworks are used to automate the testing process. They enable developers to write tests that can be executed automatically. Learn about popular testing frameworks such as JUnit, TestNG, and Selenium. Familiarize yourself with unit testing, integration testing, and end-to-end testing. This knowledge will enable you to write robust and reliable code.\\n\\nResources to learn Testing Frameworks:\\n\\n- [JUnit 5 User Guide](https://junit.org/junit5/docs/current/user-guide/) - The official guide for JUnit 5, a modern testing framework for Java\\n- [PyTest Documentation](https://docs.pytest.org/en/latest/) - Comprehensive guide on how to use PyTest, a popular testing framework for Python\\n- [Jest Documentation](https://jestjs.io/docs/getting-started) - Learn how to use Jest, a delightful JavaScript Testing Framework with a focus on simplicity\\n- [Cypress Documentation](https://docs.cypress.io/guides/overview/why-cypress) - Learn how to use Cypress, a next generation front end testing tool built for the modern web\\n- [Mocha Documentation](https://mochajs.org/) - Mocha is a feature-rich JavaScript test framework running on Node.js and in the browser, making asynchronous testing simple and fun\\n\\n## Documentation Skills\\n\\n- Clear and concise documentation is crucial in software development. This includes not only code comments and README files, but also blog posts and articles that share your knowledge with others. \\n\\nHere are some key skills and tools to master:\\n\\n- [Markdown](https://www.markdownguide.org/) - A lightweight markup language that you can use to write easy-to-read and easy-to-write plain text format, which then converts to structurally valid HTML. It\'s widely used for README files, documentation, and writing articles on platforms like GitHub and Jekyll blogs.\\n\\n- [Mermaid.js](https://mermaid.js.org/) - A JavaScript library that allows you to create diagrams and flowcharts using text. It integrates well with Markdown, making it great for blog posts that need to explain complex ideas visually.\\n\\n- [Language Server Protocol](https://microsoft.github.io/language-server-protocol/) - A protocol developed by Microsoft that allows code editing tools to provide features like auto-complete, go to definition, find all references and alike without each tool needing to understand each programming language.\\n\\n- [Diagram as a Code](https://diagrams.mingrammer.com/) - A Python library that allows you to create diagrams using code, which is great for prototyping a new system architecture design or explaining system design in your blog posts.\\n\\nRemember, good documentation skills involve clear communication, good organization, and the ability to explain complex ideas in a simple, understandable way. Practice writing blog posts and articles to improve these skills. You can also contribute to open-source projects by writing documentation for them. Maintain documentation in tools like [Dendron](http://dendron.so), [Logseq](http://logseq.com) which is a powerful note-taking system that allows you to organize and navigate your knowledge effectively.\\n\\nOnce a big knowledge base is built, you can use it to capture coding patterns, solutions to common problems, and snippets of code. Combine it with Markdown files and notebook tools like Jupyter Notebook (NB) to create well-structured and executable documentation. This approach will streamline your workflow and serve as a valuable resource for future reference which can be used with Large Language Models to gain insights.\\n\\nConsider all documentation as a code. Refer to my blog post [Documentation as Code](https://vishalgandhi.in/doc-as-code) for more information.\\n\\n## REST API Knowledge\\n\\nREST (Representational State Transfer) is an architectural style commonly used for web services. Understanding how REST APIs work and being able to design, build, and consume them is a fundamental skill for software developers. Learn about HTTP methods (GET, POST, PUT, DELETE), URL structure, request/response formats (JSON, XML), authentication, and common best practices for building RESTful APIs. This knowledge will empower you to interact with various web services and build robust and scalable applications.\\n\\nIn today\'s software development landscape, REST (Representational State Transfer) APIs have become a fundamental building block. They allow different software systems to communicate and exchange data over the internet. Most interfaces in software organizations are now through REST APIs, and it\'s nearly impossible to start any product without an \\"API First\\" approach.\\n\\nUnderstanding how REST APIs work and being able to design, build, and consume them is a crucial skill for software developers. Learn about HTTP methods (GET, POST, PUT, DELETE), URL structure, request/response formats (JSON, XML), and authentication.\\n\\nHere are some resources to get you started:\\n\\n- [RESTful API Design](https://restfulapi.net/) - A comprehensive resource for understanding and designing RESTful APIs.\\n- [Building a RESTful API with Node.js](https://www.youtube.com/watch?v=pKd0Rpw7O48) - A YouTube tutorial by Academind.\\n- [My Blog Post on REST API](https://vishalgandhi.in/rest-api-design-rules) - A deep dive into REST API from my personal experience and understanding.\\n\\nIn addition to understanding the principles of REST APIs, it\'s also important to familiarize yourself with some of the popular frameworks that can help you build REST APIs more efficiently:\\n\\n- [FastAPI](https://fastapi.tiangolo.com/) - A modern, fast (high-performance), web framework for building APIs with Python 3.6+ based on standard Python type hints.\\n- [Gin Web Framework](https://gin-gonic.com/) - Gin is a web framework written in Go. It features a martini-like API with performance that is up to 40 times faster thanks to httprouter.\\n\\nThese frameworks provide a set of tools and libraries that simplify the process of building robust and scalable APIs. They handle a lot of the boilerplate code and allow you to focus on the business logic of your application.\\n\\nRemember, a well-designed API can be a powerful tool for an organization, enabling it to expose its services to a variety of different clients and create new digital products and services.\\n\\n## Linux knowledge\\n\\nFamiliarizing yourself with Linux is valuable because many development environments, servers, and cloud services are based on Linux. Gain proficiency in the command line interface (CLI), file navigation, package management, and shell scripting. Understanding Linux will enhance your ability to work with open-source solutions, deploy applications, and troubleshoot issues efficiently.\\n\\nTo gain a deeper understanding of Linux, consider the following resources:\\n\\n- [Linux Journey](https://linuxjourney.com/) - A free, self-guided tour to help you learn Linux.\\n- [The Linux Command Line: A Complete Introduction](https://www.amazon.com/Linux-Command-Line-Complete-Introduction/dp/1593273894) by William E. Shotts Jr. - This book is a comprehensive guide to using the command line to perform various tasks in Linux.\\n- [How Linux Works, 3rd Edition: What Every Superuser Should Know](https://www.amazon.com/How-Linux-Works-3rd-Superuser/dp/1718500408) by Brian Ward - This book offers a comprehensive, updated guide to understanding how Linux operates.\\n- \\nShell scripting is a powerful tool that can automate tasks and manage system configurations. Here are some of the best books to learn shell scripting:\\n\\n- [Learning the bash Shell: Unix Shell Programming](https://www.amazon.com/Learning-bash-Shell-Programming-Third/dp/0596009658) by Cameron Newham - This book is a complete guide to bash, the default shell for Linux.\\n- [Shell Scripting: Expert Recipes for Linux, Bash, and More](https://www.amazon.com/Shell-Scripting-Expert-Recipes-Linux/dp/1118024486) by Steve Parker - A compendium of shell scripting recipes that can immediately be used, adjusted, and applied.\\n\\n## Contribute to Open Source Solutions\\n\\nEmbrace the open-source community as a software developer. Contributing to open-source projects not only allows you to collaborate with experienced developers but also helps you refine your coding skills. By studying open-source projects, you can learn about software architecture, coding standards, best practices, and gain exposure to different programming languages and frameworks. Explore popular open-source repositories such as GitHub and start contributing to projects aligned with your interests.\\n\\nHere are some resources to get you started:\\n\\n- [First Timers Only](https://www.firsttimersonly.com/) - A site dedicated to helping newcomers get started with contributing to open-source projects.\\n- [GitHub \\"Good First Issue\\" Label](https://github.com/topics/good-first-issue) - GitHub\'s \\"Good First Issue\\" label is a great way to find projects that are beginner-friendly. These issues are specifically marked by project maintainers as good entry points for new contributors.\\n- [Open Source Guide](https://opensource.guide/) - This guide provides resources for contributing to open source, from finding a project to making a contribution.\\n- [Up For Grabs](https://up-for-grabs.net/#/) - This site aggregates (groups together) projects that have tasks specifically for new contributors.\\n\\nRemember, contributing to open source is not just about coding. You can contribute in many ways, including documentation, design, teaching, and more.\\n\\n## Learn Standard Way of Writing Solutions\\n\\nDevelopers often encounter challenges in writing clean, maintainable, and well-documented code. Embrace standard practices and methodologies to overcome these challenges.\\n\\nFor example, when building command-line interfaces (CLI), consider using libraries like docopt to create command-line interfaces with ease. \\n\\nResources for CLI:\\n- [docopt](http://docopt.org/) - A command-line interface description language that will make you smile.\\n- [Command Line Interface Guidelines](https://clig.dev/) - A guide to help you write command-line programs that respect the conventions of the Unix philosophy.\\n\\nWhen developing REST APIs, adhere to established standards such as the Richardson Maturity Model or OpenAPI specifications to ensure consistency and interoperability. \\n\\nResources for REST APIs:\\n- [Richardson Maturity Model](https://martinfowler.com/articles/richardsonMaturityModel.html) - A model (developed by Leonard Richardson) that breaks down the principal elements of a REST approach into three steps.\\n- [OpenAPI Specification](https://swagger.io/specification/) - A standard, language-agnostic interface to RESTful APIs which allows both humans and computers to discover and understand the capabilities of the service without access to source code, documentation, or through network traffic inspection.\\n\\nThese practices will make your code more robust, readable, and easier to maintain.\\n\\nResources for Clean Code:\\n- [Clean Code: A Handbook of Agile Software Craftsmanship](https://www.amazon.com/Clean-Code-Handbook-Software-Craftsmanship/dp/0132350882) by Robert C. Martin - A book that describes the principles, patterns, and practices of writing clean code.\\n- [Refactoring: Improving the Design of Existing Code](https://www.amazon.com/Refactoring-Improving-Design-Existing-Code/dp/0201485672) by Martin Fowler - A book about how to clean up code to minimize the chance of introducing bugs.\\n\\n## Conclusion\\n\\nEmbarking on a career as a software developer requires continuous learning and adaptation. By focusing on the key skills, tools, and technologies mentioned in this article, you will be able to establish a solid foundation and thrive in this field. Remember, it\'s not about how much you know but how well you can apply what you know. So, keep learning and practicing!"},{"id":"fav-open-source-repo","metadata":{"permalink":"/fav-open-source-repo","source":"@site/blog/2022-12-30-fav-open-source-repo.md","title":"My favorite Open Source Projects in 2022","description":"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022.","date":"2022-12-30T00:00:00.000Z","formattedDate":"December 30, 2022","tags":[{"label":"open-source","permalink":"/tags/open-source"},{"label":"2022","permalink":"/tags/2022"},{"label":"favourite-repo","permalink":"/tags/favourite-repo"}],"readingTime":6.49,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"fav-open-source-repo","title":"My favorite Open Source Projects in 2022","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["open-source","2022","favourite-repo"]},"prevItem":{"title":"Essential Skills and Tools for Aspiring Software Developers and Database Engineers","permalink":"/essential-skills"},"nextItem":{"title":"Documentation as a code","permalink":"/doc-as-code"}},"content":"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022.\xa0 \\n\\n\x3c!--truncate--\x3e\\n\\n|Category|Repository|Site URL|Description|\\n| --- | --- | --- | -- |\\n| **Browser** | [**LibreWolf**](https://gitlab.com/librewolf-community) | https://librewolf.net/ | LibreWolf is a privacy-oriented web browser based on Firefox | \\n| **Browser** | [**Brave**](https://github.com/brave/brave-browser) | https://brave.com/ | Brave is a privacy-oriented web browser based on Chromium |\\n|**Task Runner** | [**go-task**](https://github.com/go-task/task) | https://taskfile.dev/ | Task is a task runner / simpler Make alternative written in Go |\\n| **Data Analysis** | [**Pandas**](https://github.com/pandas-dev/pandas) | https://pandas.pydata.org/ | Pandas is a data analysis library for Python |\\n| **Object Storage** | [**Minio**](https://github.com/minio/minio) | https://min.io/ | Minio is an object storage server that is compatible with Amazon S3 |\\n| **Terminal** | [**Tabby**](https://github.com/Eugeny/tabby) | https://tabby.sh/ | A Terminal for the modern age |\\n| **Terminal** | [**Tmux**](https://github.com/tmux/tmux) | https://github.com/tmux/tmux | tmux is a terminal multiplexer: it enables a number of terminals to be created, accessed, and controlled from a single screen. tmux may be detached from a screen and continue running in the background, then later reattached.\\n| **Terminal** | [**Tmuxinator**](https://github.com/tmuxinator/tmuxinator) | https://github.com/tmuxinator/tmuxinator | Tmuxinator is a tool for managing complex tmux sessions easily. |\\n| **Code Editor** | [**Visual Studio Code**](http://https://github.com/microsoft/vscode) | https://code.visualstudio.com/ | Visual Studio Code is a code editor redefined and optimized for building and debugging modern web and cloud applications. Built on top of [Electron](https://github.com/electron/electron) |\\n|**Code Editor** | [**neovim**](https://github.com/neovim/neovim) | https://neovim.io/ | Neovim is a text editor based on Vim. Last few months, i have been using `neovim` more often compared to `Visual Studio Code` |\\n|**Productivity - Note Taking** | [**Dendron**](https://github.com/dendronhq/dendron) | https://dendron.so/ | Dendron is my goto note taking tool. Its available as a plugin for VSCode and allows hirearchy and graph based note taking. Awesome solution to mantain a `second-brain` |\\n|**Productivity - Bookmark Manager** | [**Buku**](https://github.com/jarun/buku) | https://github.com/jarun/buku#quickstart | Buku is a command-line bookmark manager. |\\n|**Private Network VPN** | [**Tailscale**](https://github.com/tailscale/tailscale) | https://tailscale.com/ | Tailscale is a private network VPN. Helps me create a private network for all my home lab machines and devices |\\n|**Tunnelling** | [**ngrok**](https://github.com/inconshreveable/ngrok) | https://ngrok.com/ | ngrok exposes local servers behind NATs and firewalls to the public internet over secure tunnels. | \\n|**Tunnelling** | [**localtunnel**](https://github.com/localtunnel/localtunnel) | https://localtunnel.github.io/www/ | localtunnel exposes your localhost to the world for easy testing and sharing! No need to mess with DNS or deploy just to have others test out your changes. |\\n|**Containers** | [**Podman**](https://github.com/containers/podman) | https://podman.io/ | Podman is a daemonless, open source, Linux native tool designed to make it easy to find, run, build, share and deploy applications using Open Containers Initiative (OCI) Containers and Container Images. |\\n|**Container Scheduling and Management** | [**Kubernetes**](https://github.com/kubernetes/kubernetes) | https://kubernetes.io/ | Kubernetes is an open-source system for automating deployment, scaling, and management of containerized applications. |\\n|**Static Site Generator** | [**Docusaurus**](https://github.com/facebook/docusaurus) | https://docusaurus.io/ | Docusaurus is a static site generator written in JavaScript. Build optimized websites quickly, focus on your content. |\\n|**Static Site Generator** | [**Hugo**](https://github.com/gohugoio/hugo) | https://gohugo.io/ | Hugo is a static site generator written in `Go`.|\\n| **Diagram as a Code** | [**Mermaid**](https://github.com/mermaid-js/mermaid) | https://mermaid.js.org/ | Generation of diagrams like flowcharts or sequence diagrams from text in a similar manner as markdown |\\n| **Diagram as a Code** | [**PlantUML**](https://github.com/plantuml/plantuml) | https://plantuml.com/ | Generate diagrams from textual description \\n| **Diagram as a Code** | [**Draw.io**](https://github.com/jgraph/drawio) | https://app.diagrams.net/ | draw.io, this project, is a configurable diagramming/whiteboarding visualization application. draw.io is jointly owned and developed by JGraph Ltd and draw.io AG. |\\n| **Diagram as a Code** | [**Excalidraw**](https://github.com/excalidraw/excalidraw) | https://excalidraw.com/ | Excalidraw is a free software that offers a whiteboard tool that lets you easily sketch diagrams with a hand-drawn feel. Another features are the collaborative mode, and the ability to export the diagrams to PNG or SVG formats, and to save them locally in a JSON format |\\n| **Diagram as a Code** | [**mingrammer**](https://github.com/mingrammer/diagrams) | https://diagrams.mingrammer.com/ | Diagrams as code for prototyping cloud system architecture. |\\n|**Web Framework** | [**Gin**](https://github.com/gin-gonic/gin) | https://gin-gonic.com/ | Gin is a HTTP web framework written in Go (Golang). It features a Martini-like API, but with performance up to 40 times faster than Martini. If you need smashing performance, get yourself some Gin.|\\n|**Web Framework** | [**FAST API**](https://github.com/tiangolo/fastapi) | https://fastapi.tiangolo.com/ | FastAPI is a Web framework for developing RESTful APIs in Python. FastAPI is based on Pydantic and type hints to validate, serialize, and deserialize data, and automatically auto-generate OpenAPI documents. It fully supports asynchronous programming and can run with Gunicorn and ASGI servers for production such as Uvicorn and Hypercorn. To improve developer-friendliness, editor support was considered since the earliest days of the project. |\\n|**Web Framework** | [**Astro**](https://github.com/withastro/astro) | https://astro.build/ | Astro works with your favorite content sources. Pull content from the filesystem or fetch it remotely from your favorite CMS, database, or API. Astro supports both static output\xa0(SSG) and live server output\xa0(SSR) that can render your content on-demand. |\\n|**Search Engine** | [**Typesense**](https://github.com/typesense/typesense) | https://typesense.org/ | Typesense is a modern, privacy-friendly, open source search engine built from the ground up using cutting-edge search algorithms, that take advantage of the latest advances in hardware capabilities. |\\n| **Nocode Platform** | [**NOCODB**](https://github.com/nocodb/nocodb) | https://nocodb.com/ | NocoDB is an open-source low-code platform for building and managing internal tools and turning your SQL Databases into a smart spreadsheet. It is a self-hosted alternative to Airtable, Notion, and Airtable. |\\n| **Distributed Database** | [**rqlite**](https://github.com/rqlite/rqlite) | https://rqlite.com/ | rqlite is an easy-to-use, lightweight, distributed relational database, which uses SQLite as its storage engine. rqlite is simple to deploy, operating it is very straightforward, and its clustering capabilities provide you with fault-tolerance and high-availability. \\n|**Multi-modal Database** | [**SurrealDB**](https://github.com/surrealdb/surrealdb) | https://surrealdb.com/ | SurrealDB combines the database layer, the querying layer, and the API and authentication layer into one platform. Advanced table-based and row-based customisable access permissions allow for granular data access patterns for different types of users. There\'s no need for custom backend code and security rules with complicated database development. |\\n|**Multi-modal Database** | [**ArangoDB**](https://github.com/arangodb/arangodb) | https://www.arangodb.com/ | ArangoDB is a free and open-source native graph database system developed by ArangoDB Inc. ArangoDB is a multi-model database system since it supports three data models with one database core and a unified query language AQL. AQL is mainly a declarative language and allows the combination of different data access patterns in a single query. |\\n| **Git for Data** | [**Dolt**](https://github.com/dolthub/dolt) | https://dolthub.com/ | Dolt is a SQL database that you can fork, clone, branch, merge, push and pull just like a Git repository. Connect to Dolt just like any MySQL database to run queries or update the data using SQL commands. Use the command line interface to import CSV files, commit your changes, push them to a remote, or merge your teammate\'s changes.|\\n|**Personal Finance** | [**Firefly III**](https://github.com/firefly-iii/firefly-iii) | https://firefly-iii.org/ | \\"Firefly III\\" is a (self-hosted) manager for your personal finances. It can help you keep track of your expenses and income, so you can spend less and save more. Firefly III supports the use of budgets, categories and tags. Using a bunch of external tools, you can import data. It also has many neat financial reports available. |\\n| **Monitoring and TSDB** | [Prometheus](https://github.com/prometheus/prometheus) | https://prometheus.io/ | Prometheus is a systems and service monitoring system. It collects metrics from configured targets at given intervals, evaluates rule expressions, displays the results, and can trigger alerts if some condition is observed to be true. |"},{"id":"doc-as-code","metadata":{"permalink":"/doc-as-code","source":"@site/blog/2022-10-22-doc-as-code.md","title":"Documentation as a code","description":"Documentation is the most critical activity of any product development. The engineer and user experience improve when there is up-to-date Documentation. Most often, in organizations and products, Documentation is an afterthought, and this is not a good practice. If we want more engineers to contribute to the product, Documentation should be considered as code and part of the product development. Engineers should be encouraged to write Documentation before writing the source code.","date":"2022-10-22T00:00:00.000Z","formattedDate":"October 22, 2022","tags":[{"label":"docops","permalink":"/tags/docops"},{"label":"doc-as-code","permalink":"/tags/doc-as-code"}],"readingTime":4.225,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"doc-as-code","title":"Documentation as a code","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["docops","doc-as-code"]},"prevItem":{"title":"My favorite Open Source Projects in 2022","permalink":"/fav-open-source-repo"},"nextItem":{"title":"Understanding REST API Design Rules","permalink":"/rest-api-design-rules"}},"content":"Documentation is the most critical activity of any product development. The engineer and user experience improve when there is up-to-date Documentation. Most often, in organizations and products, Documentation is an afterthought, and this is not a good practice. If we want more engineers to contribute to the product, Documentation should be considered as code and part of the product development. Engineers should be encouraged to write Documentation before writing the source code.\\n\\n\x3c!--truncate--\x3e\\n\\n In many organizations, Documentation is everywhere, but it can be challenging to find. It is often written in various formats, and it is sometimes unclear who is responsible for it. It also needs to be clarified how to contribute to it. Confidence in Documentation could be higher if engineers spent more time writing; there is more incentive to write, and setting up a culture to write docs as part of engineering workflow contributes to Engineer Productivity which is a crucial metric for any organization.\\n\\nThe product engineering teams must identify workflows to integrate Documentation into the existing process to solve the challenges listed below.\\n* The Documentation is not part of the codebase\\n* The Documentation is not part of the CI/CD pipeline\\n* The method of writing Documentation is not integrated into the engineering workflow\\n* The Documentation is not reviewed and tested\\n* The Documentation is written in a separate tool and is not version controlled\\n\\nDocumentation will never be part of engineering culture unless integrated into the codebase and workflow.\\n\\n## What is Docs as Code?\\n\\n* Store the source file version of Documentation in a version control system like Git\\n* Automatically build doc artifacts \\n* Publish artifacts without human intervention\\n\\n## Why Docs as Code?\\n\\n* The Documentation evolves with the code. The flowchart, System Architecture and other diagrams will be up-to-date as the code changes\\n\\n* Long release cycles may result in logic or flowchart being forgotten or outdated\\n\\n* Consistency is critical for the adoption of Docs as a code. Teams can collaborate on the Documentation and can ensure that the Documentation is consistent across the product\\n\\n* Collaboration across product teams is the critical piece of why Documentation should be considered a code\\n\\n* Documentation can be reviewed and approved by the team members\\n\\n* Centralized Internal Documentation framework and familiar structured Documentation for all the products \\n\\n* Track Documentation mistakes as bugs\\n\\n* Documentation can be versioned, tested, and tracked\\n\\n* Manage the complexity around the documentation process\\n\\n* Visualize the Documentation in the form of diagrams, flowcharts, and images\\n\\n* Engineer can use other tools to model dependencies. For example, the Product team can use Mermaid to model the flowchart, system architecture, class diagram, and sequence diagrams\\n\\n* Avoid effort to redo the Documentation when a team member leaves the organization. \\n\\n* The product team can automate Workflows can be automated to generate the Documentation\\n \\n* Makes Documentation standout with [Markdown](https://www.markdownguide.org/)\\n\\n :::info\\n Markdown is a simple, lightweight markup language that is easy to learn and use for plain text formatting and conversion to HTML and many other formats using a tool. Markdown is often used to format readme files, write messages in online discussion forums, and create rich text using a plain text editor.\\n :::\\n\\n\\n```mermaid\\n\\nflowchart\\n\\nA[Start] --\x3e B[Engineer writes Documentation and Code]\\n B --\x3eC[Engineer Commits Documentation and Code]\\n C --\x3eD[Code Review and Testing]\\n D --\x3eE[Documentation Review and Testing]\\n E --\x3eF{Release}\\n F --\x3e|Yes|G[Documentation is published]\\n F --\x3e|No|B\\n G --\x3eH[End]\\n```\\n \\n## Types of Documentation \\n\\nThe most common types of Documentation for every product are:\\n\\n- Long-form \\n - FAQs, User Guides, Tutorials, How-to Guides, etc.\\n\\n- Functional \\n - REST API Documentation, SDK Documentation, etc.\\n\\n## How to do Docs as a Code?\\n\\n* Version your Documentation. Just as you version your code, you should version your Documentation. Versioning allows tracking changes and rollbacks to previous versions if necessary.\\n* Integrate Documentation with CI/CD pipeline. CI/CD Integration will allow you to automate the process of generating Documentation and publishing it to a central location\\n* Start with Proof of Concept and extend to all the products gradually \\n* Choose a static site generator (Documentation Tool) that can be integrated with the CI/CD pipeline\\n\\n## Docs As Code Tools \\n\\n- Static Site Generators \\n They are used for Long form documentation. Allows integration of diagrams, flowcharts, images, etc.\\n\\n - [Docusaurus](http://docusaurus.io), [Hugo](https://gohugo.io), [Gatsby](https://www.gatsbyjs.com), [Jekyll](https://jekyllrb.com), [MkDocs](https://www.mkdocs.org) etc.\\n\\n- Diagram as a code\\n \\n Allows creating diagrams, flowcharts, etc., in a code format. Think of documenting and visualizing a complex system architecture in a code format.\\n\\n * [Mermaid](https://mermaid-js.github.io/mermaid/#/), [PlantUML](https://plantuml.com/), [Graphviz](https://graphviz.org/), [Draw.io](https://www.draw.io/), [mingrammer/Diagrams](https://diagrams.mingrammer.com)\\n- Source code-based document generators \\n * [Sphinx](https://www.sphinx-doc.org/en/master/)\\n- System documentation generators\\n * [ronn](https://github.com/rtomayko/ronn)\\n\\n\\n## Final Thoughts\\n\\n\\nEverything(Infrastructure, Monitoring, Code, Containers, Documentation) as a code is already a reality. For some organizations, the shift to treating Documentation as a code is a complex overhaul of expectations, attitudes, processes, and toolsets. Once implemented, it will vastly improve the engineer and user experience. For open-source projects, it is even more essential to have good Documentation. It is a great way to attract new contributors and users. \\n\\n\\n## References\\n\\n- [DocOps](https://www.writethedocs.org/guide/doc-ops/#what-is-docops-anyway)"},{"id":"rest-api-design-rules","metadata":{"permalink":"/rest-api-design-rules","source":"@site/blog/2022-10-02-rest-api-design-rules.md","title":"Understanding REST API Design Rules","description":"Introduction to REST API","date":"2022-10-02T00:00:00.000Z","formattedDate":"October 2, 2022","tags":[{"label":"rest","permalink":"/tags/rest"},{"label":"restapi","permalink":"/tags/restapi"},{"label":"rest-api-design-rules","permalink":"/tags/rest-api-design-rules"}],"readingTime":10.195,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"rest-api-design-rules","title":"Understanding REST API Design Rules","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["rest","restapi","rest-api-design-rules"]},"prevItem":{"title":"Documentation as a code","permalink":"/doc-as-code"},"nextItem":{"title":"Understanding MongoDB Replicasets and Write Concern - Part 1","permalink":"/mongodb-replicaset-write-concern-read-pref"}},"content":"## Introduction to REST API \\n\\nREST (Representational State Transfer) is an architectural style for building distributed systems. A Web API conforms to the REST architectural style, called RESTful Web API. \\n\\nREST APIs are stateless, client-server, cacheable, layered systems designed around resources. The set of resources is known as the REST API\'s _resource model_\\n\\n\x3c!--truncate--\x3e\\n\\n```mermaid\\n\\nsequenceDiagram\\n\\nparticipant Client\\nparticipant Web API \\nParticipant Web Service \\nClient->>Web API: Request\\nWeb API->>Web Service: Request\\nWeb Service->>Web API: Response\\nWeb API->>Client: Response\\n\\n```\\n\\nREST APIs are one of the most common and fundamental ways to expose data and functionality as web services. REST APIs use HTTP requests to GET, PUT, POST, and DELETE data. \\n\\nAn adequately designed REST API should be easy to understand, use, and evolve over time. It will help clients and browser apps consume the API more efficiently. \\n\\nBefore designing and developing a REST API, we need to seek answers to the following questions:\\n\\n- What are URI Paths? Structure of URI Path segments? \\n- When to use plural nouns or verbs for URI Path segments?\\n- What is the HTTP response status code, and how to use it in a specific scenario? \\n- How to map non-CRUD operations to HTTP methods? \\n\\n## Understanding Uniform Resource Identifier (URI)\\n\\nREST APIs use Uniform Resource Identifiers (URIs) to identify resources. A resource is any information that can be named. Resources are separated by forward slashes (/). A good URI should be short, easy to remember, and should give the user an idea about the resource.\\n\\n### URI Format\\n\\nThe URI format is as follows:\\n\\nURI = scheme \\"://\\" host [ \\":\\" port ] [ \\"/\\" path ] [ \\"?\\" query ] [ \\"#\\" fragment ]\\n\\n```http request\\nhttp:////\\n```\\n\\n### URI Resource Model \\n\\nHeader|Description\\n------|-----------\\nDocument | A document resource is similar to database record or instance of an object. It is a single resource that can be retrieved, created, updated, or deleted.
For example, information about a blog author is a document resource.
`http://api.blog.com/authors/vishal-gandhi`\\nCollection | A collection resource is a server-managed directory of resources.
For example, a list of blog authors is a collection resource.
`http://api.blog.com/authors`\\nStore | A store is a repository which is managed by client. Using store resource client can create, update, delete and retrieve documents.
`http://api.blog.com/store/authors/vishal-gandhi`\\nController | A controller resource models a procedure concept. It is a resource that represents a procedure that can be invoked. A controller resource is a collection resource that supports the POST method. The POST method is used to invoke the controller resource. The controller resource can be used to model a procedure that can be invoked. For example, the following URI models a controller resource that represents a procedure that can be invoked to send an email:
```POST /api.blog.com/email/email/send```
``` {Collection}/{Store}/{Document}/{Controller} ```\\n\\n## REST API Design Rules \\n\\n### URI \\n\\n- **Rule : Forward Slash (/) is used to separate resources in the URI and indicate a hierarchical relationship**\\n\\nA trailing forward slash (/) is not required as the last character of a URI. Many web servers automatically redirect requests with a trailing forward slash to the same URI without the trailing forward slash.\\n\\n\\n- **Rule : Use plural nouns for URI Path segments that represent collections or resources**\\n\\n- **Rule : Use HTTP Methods to Perform Operations on Resources**\\n\\nHTTP methods are used to perform operations on resources. The following table lists the HTTP methods and their corresponding operations:\\n\\n| HTTP Method | Operation |\\n| ----------- | --------- |\\n| GET | Retrieve a resource |\\n| POST | Create a resource |\\n| PUT | Update a resource |\\n| DELETE | Delete a resource |\\n| PATCH | Update a resource with Partial data |\\n\\n\\n```JS\\nconst express = require(\'express\');\\nconst bodyParser = require(\'body-parser\');\\nconst app = express();\\nconst port = 3000;\\n\\napp.use(bodyParser.json());\\n\\napp.get(\'/authors\', (req, res) => {\\n\\nres.send(\'Authors List\'); \\n\\n//get author list from Sql lite backend \\n\\nres.json(authors);\\n\\n});\\n \\napp.post(\'/authors\', (req, res) => {\\n \\n res.send(\'Add Author\');\\n\\n //add author to Sql lite backend\\n\\n res.json(author);\\n\\n});\\n\\n\\n//update an author\\n\\napp.put(\'/authors/:id\', (req, res) => {\\n\\n res.send(\'Update Author\');\\n\\n res.json(author);\\n\\n});\\n\\n//delete an author\\n\\napp.delete(\'/authors/:id\', (req, res) => {\\n\\n res.send(\'Delete Author\');\\n\\n res.json(author);\\n\\n});\\n\\napp.patch(\'/authors/:id\', (req, res) => {\\n\\n res.send(\'Update Author Email\');\\n\\n res.json(author);\\n\\n}); \\n\\n\\napp.listen(port, () => {\\n console.log(`Blog Example app listening at http://localhost:${port}`);\\n});\\n```\\n\\n- **Rule : Hyphen (-) is used to separate words in URI Path**\\n\\nHyphens (-) are used to separate words in URI path. For example, the URI path for a resource named _user-profile_ is _/user-profile_.\\n\\n- **Rule : Underscore (_) is not used in URI**\\n\\nUnderscores (_) are not used in URI path due to text editors and browsers depending on the font hide the underscore by underlining the text.\\n\\n- **Rule : File Extensions are not used in URI**\\n\\nA REST API should not use file extensions in the URI. For example, the URI path for a resource named _user-profile_ is _/user-profile_ and not _/user-profile.json_.\\n\\n- **Rule : If API Provides a developer portal then it should be accessible via a consistent subdomain**\\n\\nIf an API provides a developer portal, then the developer portal should be accessible via a consistent subdomain. For example, the developer portal for the weather API is accessible via _developer.blog.api.com_.\\n\\n- **Rule : Lowercase letters are preferred in URI**\\n\\nLowercase letters are preferred in URI. For example, the URI path for a resource named _user-profile_ is _/user-profile_ and not _/User-Profile_.\\n\\n- **Rule: Use a Verb or verb phrase for Controller Names**\\n\\n```http request\\nPOST /api.blog.com/email/email/send\\n```\\n\\n- **Rule: CRUD function names should not be used in the URI**\\n\\nThe following table lists the CRUD functions and their corresponding HTTP methods:\\n\\n| CRUD Function | HTTP Method |\\n| ------------- | ----------- |\\n| Create | POST |\\n| Read | GET |\\n| Update | PUT |\\n| Delete | DELETE |\\n\\ne.g. Preferred API Interface\\n\\n```http request\\nPUT /api.blog.com/authors/vishal-gandhi\\n```\\n\\nAnti pattern \\n\\n```http request\\nDELETE /deleteusers/abc/\\n```\\n\\n- **Rule: New URIs should be introduced new concepts**\\n\\nA REST API should introduce new URIs for new concepts. For example, the following table lists the URIs for a user resource:\\n\\n| URI | Description |\\n| --- | ----------- |\\n| /authors | Returns a list of authors |\\n| /authors/vishalgandhi | Returns the author details |\\n| /authors/vishalgandhi/books | Returns a list of articles written by the author \\n\\n- **Rule: JSON should be well formed and supported for resource representation**\\n\\n- **Rule: Add Versioning at the start of the URI**\\n\\n```http request\\n\\nhttp://api.blog.com/v1/authors/vishal-gandhi\\n\\n```\\n\\n\\n### HTTP Methods\\n\\n- **Rule: GET must be used to retrieve representation of a resource**\\n\\n- **Rule: Head must be used to retrieve metadata of a resource and response headers**\\n\\n- **Rule: PUT must be used to both insert and update a resource**\\n\\n- **Rule: POST must be used to create a resource**\\n\\n- **Rule: POST must be used to execute a controller**\\n\\n- **Rule: DELETE must be used to delete a resource**\\n\\n- **Rule: OPTIONS must be used to retrieve supported HTTP methods**\\n\\n- **Rule : Use HTTP Status Codes to Indicate Response Status**\\n\\n\\n\\nHTTP status codes are used to indicate the response status of an HTTP request. The following table lists the HTTP status codes and their corresponding meanings:\\n\\n| HTTP Status Code | Meaning | Information |\\n| ---------------- | ------- | ----------- |\\n100 | 100 and above are information | 100 and above are for \\"Information\\". You rarely use them directly. Responses with these status codes cannot have a body.\\n| 200 OK | The request was successful | 200 and above are for \\"Successful\\" responses. These are the ones you would use the most. 200 is the default status code for a successful response.\\n| 201 Created | The request was successful and a resource was created | 201 is \\"Created\\". This is used when a new resource is created. The response will contain a Location header with the URI of the new resource.\\n| 204 No Content | The request was successful but there is no representation to return | A special case is 204, \\"No Content\\". This response is used when there is no content to return to the client, and so the response must not have a body.\\n| 300 Multiple Choices | The requested resource corresponds to any one of a set of representations, each with its own specific location | 300 and above are for \\"Redirection\\". These are used when the client needs to take some additional action in order to complete the request. For example, if you request a resource that has been moved to a different location, the response will be 301, \\"Moved Permanently\\", and the response will contain a Location header with the new location of the resource. The client can then make a new request to that location.\\n| 400 Bad Request | The request could not be understood by the server | 400 and above are for \\"Client Error\\" responses. These are used when the client has made a mistake in its request. For example, if you request a resource that doesn\'t exist, the response will be 404, \\"Not Found\\". \\n| 401 Unauthorized | The request requires user authentication | 401 is \\"Unauthorized\\". This is used when the client needs to authenticate itself to get the requested response.\\n| 403 Forbidden | The server understood the request, but is refusing to fulfill it | 403 is \\"Forbidden\\". This is used when the client is not allowed to access the resource. For example, if you try to access a resource that you don\'t have permission to access, the response will be 403, \\"Forbidden\\".\\n| 404 Not Found | The server has not found anything matching the Request-URI | 404 is \\"Not Found\\". This is used when the client requests a resource that doesn\'t exist. For example, if you request a resource that doesn\'t exist, the response will be 404, \\"Not Found\\".\\n| 405 Method Not Allowed | The method specified in the Request-Line is not allowed for the resource identified by the Request-URI | 405 is \\"Method Not Allowed\\". This is used when the client requests a resource using a method that isn\'t allowed. For example, if you try to access a resource using the POST method, but the resource only supports the GET method, the response will be 405, \\"Method Not Allowed\\".\\n| 500 Internal Server Error | The server encountered an unexpected condition which prevented it from fulfilling the request | 500 and above are for \\"Server Error\\" responses. These are used when the server encounters an error while fulfilling the request. For example, if the server runs out of memory while fulfilling the request, the response will be 500, \\"Internal Server Error\\".\\n\\nThe approaches and best practices of REST API outlined in this blog article will help anyone follow consistent guidelines for designing and developing REST APIs. \\n\\n## References\\n\\n- [Roy Fielding\'s Dissertation](https://www.ics.uci.edu/~fielding/pubs/dissertation/rest_arch_style.htm)\\n- [What is REST](https://restfulapi.net/)\\n- [REST API Design Rulebook](https://www.amazon.in/REST-API-Design-Rulebook-Consistent-ebook/dp/B005XE5A7Q/ref=sr_1_1?keywords=rest+api+design+rulebook&qid=1665926194&qu=eyJxc2MiOiIwLjQzIiwicXNhIjoiMC41NCIsInFzcCI6IjAuMDAifQ%3D%3D&sprefix=REST+API+D%2Caps%2C194&sr=8-1)\\n- [Hands-on RESTful API Design Patterns](https://www.amazon.in/Hands-RESTful-Design-Patterns-Practices-ebook/dp/B07BJL399D/ref=sr_1_2?keywords=rest+api+design+rulebook&qid=1665926194&qu=eyJxc2MiOiIwLjQzIiwicXNhIjoiMC41NCIsInFzcCI6IjAuMDAifQ%3D%3D&sprefix=REST+API+D%2Caps%2C194&sr=8-2)\\n\\n\\n\\n\\n\x3c!-- Change HTTP Codes to below \\n\\n100 and above are for \\"Information\\". You rarely use them directly. Responses with these status codes cannot have a body.\\n200 and above are for \\"Successful\\" responses. These are the ones you would use the most.\\n200 is the default status code, which means everything was \\"OK\\".\\nAnother example would be 201, \\"Created\\". It is commonly used after creating a new record in the database.\\nA special case is 204, \\"No Content\\". This response is used when there is no content to return to the client, and so the response must not have a body.\\n300 and above are for \\"Redirection\\". Responses with these status codes may or may not have a body, except for 304, \\"Not Modified\\", which must not have one.\\n400 and above are for \\"Client error\\" responses. These are the second type you would probably use the most.\\nAn example is 404, for a \\"Not Found\\" response.\\nFor generic errors from the client, you can just use 400.\\n500 and above are for server errors. You almost never use them directly. When something goes wrong at some part in your application code, or server, it will automatically return one of these status codes. --\x3e"},{"id":"mongodb-replicaset-write-concern-read-pref","metadata":{"permalink":"/mongodb-replicaset-write-concern-read-pref","source":"@site/blog/2022-08-21-mongodb-replicaset-write-concern-read-pref.md","title":"Understanding MongoDB Replicasets and Write Concern - Part 1","description":"Introducing Replicasets","date":"2022-08-21T00:00:00.000Z","formattedDate":"August 21, 2022","tags":[{"label":"mongodb","permalink":"/tags/mongodb"},{"label":"replicaset","permalink":"/tags/replicaset"},{"label":"write-concern","permalink":"/tags/write-concern"}],"readingTime":6.25,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"mongodb-replicaset-write-concern-read-pref","title":"Understanding MongoDB Replicasets and Write Concern - Part 1","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["mongodb","replicaset","write-concern"]},"prevItem":{"title":"Understanding REST API Design Rules","permalink":"/rest-api-design-rules"},"nextItem":{"title":"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI","permalink":"/docker-sbom"}},"content":"## Introducing Replicasets\\n\\nThe way to achieve fault tolerance in MongoDB is through the use of `replica sets`. \\n\\n```mermaid\\nstateDiagram-v2\\n [*] --\x3e Application\\n direction LR\\n state Application\\n Application --\x3e replicaset \\n state replicaset\\n {\\n direction RL\\n Primary:primary\\n Secondary1:secondary \\n Secondary2:secondary\\n Secondary1--\x3ePrimary : Fetch Oplog\\n Secondary2--\x3ePrimary : Fetch Oplog\\n \\n }\\n```\\n\\nTwo or more `secondary` nodes along with a `primary` node forms a replica set. Application makes all the read/write calls to the primary node which propagate all the write requests synchronously or asynchronously to the secondary nodes. \\n\\nThe Secondary nodes fetches the data via Oplog pull from Primary or other nodes. \\n\\n\x3c!--truncate--\x3e\\n\\nThe Primary node is responsible for all the writes and reads. The secondary nodes can be utilized for reads via [`setSecondaryOk`](https://docs.mongodb.com/manual/reference/method/Mongo.setSecondaryOk/) or [`readPreference`](https://docs.mongodb.com/manual/reference/read-preference/). \\n\\n## Understanding Oplog\\n\\nWhen the application performs a write, the primary node applies the write to the database like a standalone. \\n\\nThe difference between Replicaset write and standalone write is that replica set nodes have an `OpObserver` that inserts a document to the **oplog** whenever a write to the database happens, describing the write. The **oplog** is a capped collection called `oplog.rs` in the `local` database. \\n\\nFor every operation performed in a write, the primary node inserts a document into the oplog. The oplog is a capped collection, which means that it has a maximum size. When the oplog reaches its maximum size, MongoDB removes the oldest entries to make room for new entries. \\n\\nFor a write which performs create collection and insert, there are two oplog entries created one for `create` collection and another for `insert`.\\n\\n```cpp\\n// mongod_main.cpp\\nsetUpObservers(service);\\n```\\n\\n```cpp\\n\\n//op_observer_registry.h\\nvoid onCreateCollection(OperationContext* const opCtx,\\n const CollectionPtr& coll,\\n const NamespaceString& collectionName,\\n const CollectionOptions& options,\\n const BSONObj& idIndex,\\n const OplogSlot& createOpTime,\\n bool fromMigrate) override {\\n ReservedTimes times{opCtx};\\n for (auto& o : _observers)\\n o->onCreateCollection(\\n opCtx, coll, collectionName, options, idIndex, createOpTime, fromMigrate);\\n }\\n\\n using OpObserver::onInserts;\\n void onInserts(OperationContext* const opCtx,\\n const NamespaceString& nss,\\n const UUID& uuid,\\n std::vector::const_iterator begin,\\n std::vector::const_iterator end,\\n bool fromMigrate) override {\\n ReservedTimes times{opCtx};\\n for (auto& o : _observers)\\n o->onInserts(opCtx, nss, uuid, begin, end, fromMigrate);\\n }\\n```\\n\\n## Understanding Write Concern\\n\\nWrite concern is a way to ensure that the write operations are propagated to the secondary nodes.\\n\\n### Default Write Concern\\n\\nIf a write operation does not explicitly specify a write concern, the server will use a default\\nwrite concern. \\n\\nThis default write concern will be defined by either the\\n\\n- **Cluster-Wide write concern**, explicitly set by the user \\nor\\n- **Implicit Default write concern**, implicitly set by the server based on replica set configuration.\\n\\n#### Cluster-Wide Write Concern\\n\\nThe cluster-wide write concern is set by the user using the [`setDefaultRWConcern`](https://docs.mongodb.com/manual/reference/command/setDefaultRWConcern/) command. Setting the cluster-wide write concern will cause the implicit default write concern not to take effect.\\n\\nOn a sharded cluster, the cluster-wide write concern is set on the config server. On a replica set, the cluster-wide write concern is set on the primary node. The below code snippets shows how the cluster-wide write concern is set on the primary node and stored on the config node. \\n\\n```javascript\\ndb.adminCommand(\\n {\\n setDefaultRWConcern : 1,\\n defaultReadConcern: { },\\n defaultWriteConcern: { },\\n writeConcern: { },\\n comment: \\n }\\n)\\n```\\n\\n```cpp\\n//cluster_rwc_defaults_commands.cpp \\nclass ClusterSetDefaultRWConcernCommand : public BasicCommand {\\npublic:\\n ClusterSetDefaultRWConcernCommand() : BasicCommand(\\"setDefaultRWConcern\\") {}\\n\\n bool run(OperationContext* opCtx,\\n const DatabaseName&,\\n const BSONObj& cmdObj,\\n BSONObjBuilder& result) override {\\n auto configShard = Grid::get(opCtx)->shardRegistry()->getConfigShard();\\n auto cmdResponse = uassertStatusOK(configShard->runCommandWithFixedRetryAttempts(\\n opCtx,\\n ReadPreferenceSetting(ReadPreference::PrimaryOnly),\\n NamespaceString::kAdminDb.toString(),\\n CommandHelpers::appendMajorityWriteConcern(\\n CommandHelpers::filterCommandRequestForPassthrough(cmdObj),\\n opCtx->getWriteConcern()),\\n Shard::RetryPolicy::kNotIdempotent));\\n\\n uassertStatusOK(cmdResponse.commandStatus);\\n uassertStatusOK(cmdResponse.writeConcernStatus);\\n\\n // Quickly pick up the new defaults by setting them in the cache.\\n auto newDefaults = RWConcernDefault::parse(IDLParserContext(\\"ClusterSetDefaultRWConcern\\"),\\n cmdResponse.response);\\n if (auto optWC = newDefaults.getDefaultWriteConcern()) {\\n if (optWC->hasCustomWriteMode()) {\\n LOGV2_WARNING(\\n 6081700,\\n \\"A custom write concern is being set as the default write concern in a sharded \\"\\n \\"cluster. This set is unchecked, but if the custom write concern does not \\"\\n \\"exist on all shards in the cluster, errors will occur upon writes\\",\\n \\"customWriteConcern\\"_attr = stdx::get(optWC->w));\\n }\\n }\\n ReadWriteConcernDefaults::get(opCtx).setDefault(opCtx, std::move(newDefaults));\\n\\n CommandHelpers::filterCommandReplyForPassthrough(cmdResponse.response, &result);\\n return true;\\n }\\n```\\n\\n#### Implicit default write concern\\n\\nThe implicit default write concern is calculated and set on startup by the server based on the replica set configuration. The server will set the implicit default write concern to the following:\\n\\n- If the replica set has a single node, the implicit default write concern is `{ w: 1 }`\\n- For most of the cases the implicit default write concern is `{ w: \\"majority\\" }`\\n\\n##### PSA \\n\\n`implicitDefaultWriteConcern = if ((#arbiters > 0) AND (#non-arbiters <= majority(#voting nodes)) then {w:1} else {w:majority}`\\n\\nImplicit default to a value that the set can satisfy in the event of one data-bearing node\\ngoing down. That is, the number of data-bearing nodes must be strictly greater than the majority\\nof voting nodes for the set to set `{w: \\"majority\\"}`.\\n\\nFor example, if we have a PSA replica set, and the secondary goes down, the primary cannot\\nsuccessfully acknowledge a majority write as the majority for the set is two nodes. However, the\\nprimary will remain primary with the arbiter\'s vote. In this case, the DWCF will have preemptively\\nset the IDWC to `{w: 1}` so the user can still perform writes to the replica set.\\n\\n##### Sharded Cluster \\n\\nFor a sharded cluster, the implicit default write concern is set to `{ w: \\"majority\\" }` if the\\ncluster has a majority of voting nodes. Otherwise, the implicit default write concern is set to\\n`{ w: 1 }`.\\n\\n## Understanding Secondary Nodes Operations \\n\\nThe secondary nodes will choose the node with the highest `lastApplied` timestamp as the** sync source**. The secondary nodes will then **pull** the oplog entries from the sync source and apply them to its own oplog.\\n\\nThe Secondary will also keep its **sync source** uptodate with its progress, this helps primary satisfy the read concern. \\n\\nHere are the high level steps performed to select and probe the sync source\\n\\n1. `TopologyCoordinator` checks if user requested a specific sync source using `replSetSyncFrom` command. If so, it will use that sync source. Otherwise, it will use the sync source from the last successful election.\\n2. Check if **chaining** is disabled. If so, the secondary will always use primary as its sync source \\n\\n```cpp\\n if (chainingPreference == ChainingPreference::kUseConfiguration &&\\n !_rsConfig.isChainingAllowed()) {\\n if (_currentPrimaryIndex == -1) {\\n LOG(1) << \\"Cannot select a sync source because chaining is\\"\\n \\" not allowed and primary is unknown/down\\";\\n _syncSource = HostAndPort();\\n return _syncSource;\\n } else if (_memberIsBlacklisted(*_currentPrimaryMember(), now)) {\\n LOG(1) << \\"Cannot select a sync source because chaining is not allowed and primary \\"\\n \\"member is blacklisted: \\"\\n << _currentPrimaryMember()->getHostAndPort();\\n _syncSource = HostAndPort();\\n return _syncSource;\\n\\n```\\n\\n3. Fetch latest opTime. Do not sync from a node where newest oplog is more than `maxSyncSourceLagSecs`\\n\\n```cpp\\n if (_currentPrimaryIndex != -1) {\\n OpTime primaryOpTime = _memberData.at(_currentPrimaryIndex).getHeartbeatAppliedOpTime();\\n\\n // Check if primaryOpTime is still close to 0 because we haven\'t received\\n // our first heartbeat from a new primary yet.\\n unsigned int maxLag =\\n static_cast(durationCount(_options.maxSyncSourceLagSecs));\\n if (primaryOpTime.getSecs() >= maxLag) {\\n oldestSyncOpTime =\\n OpTime(Timestamp(primaryOpTime.getSecs() - maxLag, 0), primaryOpTime.getTerm());\\n }\\n }\\n```\\n4. Loop through all the nodes and find the closest node which satisfies the condition \\n\\n```cpp\\nHostAndPort TopologyCoordinator::chooseNewSyncSource(Date_t now,\\n const OpTime& lastOpTimeFetched,\\n ChainingPreference chainingPreference) {\\n\\n...\\n...\\n...\\n```\\n\\n### Oplog Fetching \\n\\nThe secondary node will fetch the oplog entries from the sync source to keep its data syncronized. The entire implementation of the oplog fetching is in the `OplogFetcher` class which runs in a separate thread and communicates via a dedicated client connection.\\n\\n```cpp\\n\\nvoid OplogFetcher::setConnection(std::unique_ptr&& _connectedClient) {\\n // Can only call this once, before startup.\\n invariant(!_conn);\\n _conn = std::move(_connectedClient);\\n}\\n\\n```"},{"id":"docker-sbom","metadata":{"permalink":"/docker-sbom","source":"@site/blog/2022-07-09-docker-sbom.md","title":"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI","description":"An Software Bill of Materials (SBoM) is a formal record containing the details and supply chain relationships of various components used in building the software. These components, including libraries and modules, can be proprietary or open source,free or paid and the data can be widely available or access-restricted.","date":"2022-07-09T00:00:00.000Z","formattedDate":"July 9, 2022","tags":[{"label":"docker-desktop","permalink":"/tags/docker-desktop"},{"label":"SBOM","permalink":"/tags/sbom"},{"label":"docker","permalink":"/tags/docker"}],"readingTime":2.895,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"docker-sbom","title":"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["docker-desktop","SBOM","docker"]},"prevItem":{"title":"Understanding MongoDB Replicasets and Write Concern - Part 1","permalink":"/mongodb-replicaset-write-concern-read-pref"},"nextItem":{"title":"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO","permalink":"/percona-mongo-replicaset-minio"}},"content":"An **Software Bill of Materials (SBoM)** is a formal record containing the details and supply chain relationships of various components used in building the software. These components, including libraries and modules, can be proprietary or open source,free or paid and the data can be widely available or access-restricted.\\n\\n\x3c!--truncate--\x3e\\n\\n![](sbom.webp)\\n\\nSBoM is analogous to a list of ingredients on food packaging. In May 2021, the US President released the [Executive Order](https://www.whitehouse.gov/briefing-room/presidential-actions/2021/05/12/executive-order-on-improving-the-nations-cybersecurity/) on improving the Nation\u2019s Cybersecurity. The Software Bill of Materials (SBoM) directly impacts all developers. The SBoM requires third-party software companies to provide customers with the code equivalent of a \u201cnutrition chart.\u201d\\n\\n## When should SBoM be used \u2013 Use cases ?\\n\\n* **Developing products**\\n * Scan vulnerabilities in the components\\n * Keep codebase to bare minimum, reduce the number of dependencies and size\\n * Generate SBoM for end users\\n\\n* **IT Operations**\\n * Understand operational risk\\n * Understand potential exploitations\\n * Real time asset inventory\\n * Software Selection\\n * Identify known vulnerabilities and compliance\\n\\n* **EOL**\\n * Complete visibility to components before evaluation or deploying in production\\n * Understand the software architecture and the dependencies of the software\\n\\n## Why SBOM ?\\n\\n* Requirement from regulatory bodies to track the components used in the software\\n\\n* Transparency of components getting shipped\\n\\n* Container ecosystem has exploded and the need to track the components getting shipped is a must\\n\\n* Software Vulnerabilities are bugs\\n\\n* Detecting and remediating Vulnerabilities\\n\\n## SBOM Formats\\n\\n* **SPDX (Software Package Data Exchange )**\\n * Open standard for communicating software bill of material information, including components, licenses, copyrights and security references. Reduces redundant work by providing a common format for organizations and communities to share and use\\n\\n* **CycloneDX**\\n * Open Web Application Security Project(OWASP) CycloneDX is a lightweight Software Bill of Materials (SBOM) standard designed for use in application security contexts and supply chain component analysis.\\n\\n* **SWID (Software Identification Tags)**\\n * SWID is used primarily to identify installed software and is the preferred format of the NVD. SWID tags are used in the National Vulnerability Database to describe vulnerable components. The CycloneDX specification compliments this work as CycloneDX documents can incorporate SWID tags and other high-level SWID metadata and optionally include entire SWID documents. Use of SWID tag ID\u2019s are useful in determining if a specific component has known vulnerabilities.\\n\\n## Docker Desktop \u2013 SBOM CLI\\n\\nIn Docker Desktop 4.7.0 Docker introduced and included a new experimental docker sbom CLI that is used for displaying SBoM for any container image. docker sbom scans the layer of container images using the Syft Project\\n\\n## Usage\\n\\n* Display SBOM in CyloneDX format\\n\\n\\n```shell\\n\\t\\n$ docker sbom mongo:latest --format cyclonedx-json | more\\n \\n{\\n \\"type\\": \\"library\\",\\n \\"publisher\\": \\"MongoDB Packaging \\\\u003cpackaging@mongodb.com\\\\u003e\\",\\n \\"name\\": \\"mongodb-org-server\\",\\n \\"version\\": \\"5.0.9\\",\\n \\"cpe\\": \\"cpe:2.3:a:mongodb-org-server:mongodb-org-server:5.0.9:*:*:*:*:*:*:*\\",\\n \\"purl\\": \\"pkg:deb/ubuntu/mongodb-org-server@5.0.9?arch=arm64\\\\u0026upstream=mongodb-org\\\\u0026distro=ubuntu-20.04\\",\\n \\"properties\\": [\\n {\\n \\"name\\": \\"syft:package:foundBy\\",\\n \\"value\\": \\"dpkgdb-cataloger\\"\\n },\\n {\\n \\"name\\": \\"syft:package:metadataType\\",\\n \\"value\\": \\"DpkgMetadata\\"\\n }\\n```\\n\\n* Display SBOM summary of packages. e.g. using the below command we can check for the log4j vulnerabilities\\n\\n```shell\\t\\n$ docker sbom neo4j | grep log4j\\n \\nlog4j-api 2.17.1 java-archive\\nlog4j-core 2.17.1 java-archive\\n\\t\\n$ docker sbom neo4j:4.4.1 | grep log4j\\n \\nlog4j-api 2.15.0 java-archive\\nlog4j-core 2.15.0 java-archive\\n\\t\\n$ docker sbom elasticsearch:7.16.3 | grep log4j\\n \\nelasticsearch-log4j 7.16.3 java-archive\\nlog4j-1.2-api 2.17.1 java-archive\\nlog4j-api 2.17.1 java-archive\\nlog4j-core 2.17.1 java-archive\\nlog4j-slf4j-impl 2.17.1 java-archive\\n```\\n\\nThere are many benefits to generate SBOM for compliance and vulnerability analysis. Further SBOM can be used for input to open source vulnerability databases like [Snyk](https://github.com/snyk/cli) and open source vulnerability scanning tools like [Grype](https://github.com/anchore/grype)"},{"id":"percona-mongo-replicaset-minio","metadata":{"permalink":"/percona-mongo-replicaset-minio","source":"@site/blog/2022-05-29-percona-mongo-replicaset-minio.md","title":"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO","description":"In this blog post, i will walk you through the steps required to containerize Percona Server for MongoDB, Percona Backup Manager, and Agent from source and configure cloud-native S3(Simple Storage Service) compatible distributed object storage MINIO to backup and restore Percona MongoDB snapshot backups.","date":"2022-05-29T00:00:00.000Z","formattedDate":"May 29, 2022","tags":[{"label":"mongodb","permalink":"/tags/mongodb"},{"label":"containers","permalink":"/tags/containers"},{"label":"docker","permalink":"/tags/docker"},{"label":"s3","permalink":"/tags/s-3"},{"label":"minio","permalink":"/tags/minio"},{"label":"pbm","permalink":"/tags/pbm"}],"readingTime":4.205,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"percona-mongo-replicaset-minio","title":"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["mongodb","containers","docker","s3","minio","pbm"]},"prevItem":{"title":"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI","permalink":"/docker-sbom"},"nextItem":{"title":"Local Home Lab DNS Setup with DNSMasq and NGINX","permalink":"/local-home-lab"}},"content":"In this blog post, i will walk you through the steps required to containerize [Percona Server for MongoDB](https://github.com/percona/percona-server-mongodb), [Percona Backup Manager](https://github.com/percona/percona-backup-mongodb), and Agent from source and configure cloud-native S3(**S**imple **S**torage **S**ervice) compatible distributed object storage [MINIO](https://min.io/) to backup and restore Percona MongoDB snapshot backups.\\n\\n\x3c!--truncate--\x3e\\n\\n![](minio.webp)\\n\\n## Prerequisites\\nEnsure the below binaries are installed before starting the setup and configuration\\n\\n[Docker](https://www.docker.com/get-started/) or [Podman](https://podman.io/) to containerize Percona MongoDB replicaset and PBM Agent\\n[Docker Compose](https://docs.docker.com/compose/install/)\\n[Golang](https://go.dev/learn/) compiler \u2013 Build Percona Backup Manager binaries\\n[Portainer](https://www.portainer.io/) (Optional) \u2013 Intuitive UI for container configuration and monitoring\\nLet us perform the below steps to set up PSMDB Replicaset; PBM Agent; Minio, S3 compatible bucket, and PBM configuration to perform backups and restores from the bucket.\\n\\n## Steps\\n* Create the Docker environment file with Docker Image, tag, port, and replicaset information. Save the file as .env in the working directory\\n\\n```shell \\nMONGODB_IMAGE=percona/percona-server-mongodb\\nMONGODB_VERSION=5.0\\nMONGO1_PORT=0.0.0.0:15000\\nMONGO2_PORT=0.0.0.0:15001\\nMONGO3_PORT=0.0.0.0:15002\\nMONGODB_PORT=27017\\nMONGODB_DOCKER_NETWORK=mongo_net\\nRS_NAME=rs1\\n\\n```\\n* Create keyFile , Dockerfile and download percona-backup-manager source code in the working directory\\n\\n```shell\\n$ git clone https://github.com/percona/percona-backup-mongodb.git\\n\\nARG MONGODB_VERSION\\nARG MONGODB_IMAGE\\nFROM ${MONGODB_IMAGE}:${MONGODB_VERSION}\\nUSER root\\nCOPY keyFile /opt/keyFile\\nRUN chown mongodb /opt/keyFile && chmod 400 /opt/keyFile && mkdir -p /home/mongodb/ && chown mongodb /home/mongodb\\nUSER mongodb\\n```\\n* Create Docker Compose file\\n\\n```YAML\\nversion: \\"3.8\\"\\nservices:\\n rs101:\\n build:\\n dockerfile: Dockerfile\\n context: /home/vishal/dev/psmdb\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION}\\n - MONGODB_IMAGE=${MONGODB_IMAGE}\\n hostname: rs101\\n labels:\\n - \\"com.percona.pbm.app=mongod\\"\\n environment:\\n - REPLSET_NAME=rs1\\n - MONGO_USER=dba\\n - BACKUP_USER=bcp\\n - MONGO_PASS=test1234\\n ports:\\n - \\"${MONGO1_PORT}:${MONGODB_PORT}\\"\\n # command: mongod --replSet rs1 --port ${MONGO1_PORT}:27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\\n command: [\\"--replSet\\", \\"${RS_NAME}\\", \\"--bind_ip_all\\", \\"--storageEngine\\", \\"wiredTiger\\" , \\"--keyFile\\", \\"/opt/keyFile\\"]\\n volumes:\\n - data-rs101:/data/db\\n - ./scripts/start.sh:/opt/start.sh\\n rs102:\\n build:\\n dockerfile: Dockerfile\\n context: /home/vishal/dev/psmdb\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION}\\n - MONGODB_IMAGE=${MONGODB_IMAGE}\\n hostname: rs102\\n labels:\\n - \\"com.percona.pbm.app=mongod\\"\\n # command: mongod --replSet rs1 --port 27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\\n ports:\\n - \\"${MONGO2_PORT}:${MONGODB_PORT}\\"\\n command: [\\"--replSet\\", \\"${RS_NAME}\\", \\"--bind_ip_all\\", \\"--storageEngine\\", \\"wiredTiger\\" , \\"--keyFile\\", \\"/opt/keyFile\\"]\\n volumes:\\n - data-rs102:/data/db\\n rs103:\\n build:\\n dockerfile: Dockerfile\\n context: /home/vishal/dev/psmdb\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION}\\n - MONGODB_IMAGE=${MONGODB_IMAGE}\\n hostname: rs103\\n labels:\\n - \\"com.percona.pbm.app=mongod\\"\\n # command: mongod --replSet rs1 --port 27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\\n ports:\\n - \\"${MONGO3_PORT}:${MONGODB_PORT}\\"\\n command: [\\"--replSet\\", \\"${RS_NAME}\\", \\"--bind_ip_all\\", \\"--storageEngine\\", \\"wiredTiger\\" , \\"--keyFile\\", \\"/opt/keyFile\\"]\\n volumes:\\n - data-rs103:/data/db\\n agent-rs101:\\n container_name: \\"pbmagent_rs101\\"\\n user: \\"1001\\"\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n - \\"com.percona.pbm.agent.rs=rs1\\"\\n environment:\\n - \\"PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs101:27017\\"\\n build:\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\\n context: /home/vishal/open-source/percona-backup-mongodb/\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\\n volumes:\\n - ./conf:/etc/pbm\\n - ./backups:/opt/backups\\n - data-rs101:/data/db\\n command: pbm-agent\\n cap_add:\\n - NET_ADMIN\\n agent-rs102:\\n container_name: \\"pbmagent_rs102\\"\\n user: \\"1001\\"\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n - \\"com.percona.pbm.agent.rs=rs1\\"\\n environment:\\n - \\"PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs102:27017\\"\\n build:\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\\n context: /home/vishal/open-source/percona-backup-mongodb/\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\\n volumes:\\n - ./conf:/etc/pbm\\n - ./backups:/opt/backups\\n - data-rs102:/data/db\\n command: pbm-agent\\n cap_add:\\n - NET_ADMIN\\n agent-rs103:\\n container_name: \\"pbmagent_rs103\\"\\n user: \\"1001\\"\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n - \\"com.percona.pbm.agent.rs=rs1\\"\\n environment:\\n - \\"PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs103:27017\\"\\n build:\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\\n context: /home/vishal/open-source/percona-backup-mongodb/\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\\n volumes:\\n - ./conf:/etc/pbm\\n - ./backups:/opt/backups\\n - data-rs103:/data/db\\n command: pbm-agent\\n cap_add:\\n - NET_ADMIN\\nvolumes:\\n backups: null\\n data-rs101: null\\n data-rs102: null\\n data-rs103: null\\n\\n```\\n\\n* Run Docker compose\\nThe below command will build and start the docker container for Percona Server MongoDB Primary Secondary Secondary replicaset and Percona Backup Manager Agent for each replicaset\\n\\n```\\n$ psmdb docker compose -f docker-compose-rs.yaml up -d\\n[+] Running 8/8\\n\u283f Container psmdb-rs102-1 Running 0.0s\\n\u283f Container psmdb-rs103-1 Running 0.0s\\n\u283f Container pbmagent_rs103 Running 0.0s\\n\u283f Container pbmagent_rs102 Running 0.0s\\n\u283f Container psmdb-rs101-1 Running 0.0s\\n\u283f Container pbmagent_rs101 Running 0.0s\\n\\n```\\n\\n* Connect to MongoDB replicaset and ensure replication and containers are working\\n\\n```shell\\n$ mongo \\"mongodb://dba:test1234@192.168.50.113:15000,192.168.50.113:15001,192.168.50.113:15002/admin?replicaSet=rs1\\"\\n```\\n\\n* Setup Minio and Minio CLI\\n\\n```shell\\n\\n$ cd ~/downloads && wget https://dl.min.io/server/minio/release/linux-amd64/minio\\n \\n$ wget https://dl.min.io/client/mc/release/linux-amd64/mc\\nchmod +x mc\\n./mc --help\\n \\n$ downloads ./minio server /home/vishal/data --address=0.0.0.0:7000\\n \\n\\nAPI: http://0.0.0.0:7000 \\nRootUser: minioadmin \\nRootPass: minioadmin \\nFinished loading IAM sub-system (took 0.0s of 0.0s to load data).\\n \\nConsole: http://192.168.50.113:43859 http://192.168.160.1:43859 http://172.18.0.1:43859 http://172.19.0.1:43859 http://172.24.0.1:43859 http://172.26.0.1:43859 http://172.17.0.1:43859 http://127.0.0.1:43859 \\nRootUser: minioadmin \\nRootPass: minioadmin \\n \\nCommand-line: https://docs.min.io/docs/minio-client-quickstart-guide\\n $ mc alias set myminio http://0.0.0.0:7000 minioadmin minioadmin\\n \\nDocumentation: https://docs.min.io\\n\\n\\n```\\n\\n* Setup Minio server alias and List buckets\\n\\n```shell\\n$ mc alias set minio-deb http://192.168.50.113:7000 minioadmin minioadmin\\n$ mc ls minio-deb\\n[2022-05-29 14:59:32 IST] 0B nocodb/\\n[2022-05-29 00:19:41 IST] 0B typesense/\\n\\n```\\n\\n* Create a new bucket and name it `pbm`\\n\\n```shell\\n$ mc alias set minio-deb http://192.168.50.113:7000 minioadmin minioadmin\\n$ mc ls minio-deb\\n [2022-05-29 14:59:32 IST] 0B nocodb/\\n [2022-05-29 00:19:41 IST] 0B typesense/\\n```\\n\\n* Setup PBM or compile PBM from the source repository\\n\\n```shell\\n$ sudo apt-get install -y libkrb5-dev\\n$ cd percona-backup-mongodb\\n$ make build\\n$ make install\\n```\\n* create pbm_config.YAML to be used for configuring PBM for using MINIO\\n\\n```YAML\\n\\nstorage:\\n type: s3\\n s3:\\n endpointUrl: http://192.168.50.113:7000\\n bucket: pbm\\n credentials:\\n access-key-id: \\"minioadmin\\"\\n secret-access-key: \\"minioadmin\\"\\n\\n\\n```\\n\\n* Configure PBM\\n\\n```shell\\n$ ./pbm config --file /home/vishal/dev/psmdb/pbm_config.yaml --mongodb-uri=\\"mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1\\"\\n\\n```\\n* Validate agent container logs and run the pbm list command. If MINIO is configured successfully, agent container logs shouldn\u2019t log any errors.\\n\\n```shell\\n2022-05-29T01:31:14.000+0000 D [resync] got backups list: 02022-05-29T01:31:14.000+0000 D [resync] got physical restores list: 0\\n\\n$ bin git:(main) ./pbm list --mongodb-uri=\\"mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1\\"\\nBackup snapshots:\\n2022-05-29T01:29:12Z [complete: 2022-05-29T01:29:16Z]\\n2022-05-29T01:38:38Z [complete: 2022-05-29T01:38:42Z]\\n2022-05-29T04:04:44Z [complete: 2022-05-29T04:04:48Z]\\n```\\n\\n* To run PBM backup and restore execute the below commands\\n\\n```bash \\n$ ./pbm backup --mongodb-uri=\\"mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1\\" \\n$ ./pbm restore 2022-05-29T04:04:44Z --mongodb-uri=\\"mongodb:/\\n```"},{"id":"local-home-lab","metadata":{"permalink":"/local-home-lab","source":"@site/blog/2022-04-10-local-home-lab.md","title":"Local Home Lab DNS Setup with DNSMasq and NGINX","description":"As I explored and set up an increased number of FOSS software using containers(Docker and LXD) and virtual machines(Multipass) in my home lab environment, I realized the difficulty in remembering the different ports the applications and containers are running. The solution to address this problem was to have a Domain Name System for the local network, which works to resolve local and external addresses with a reverse proxy to redirect calls based on DNS resolution.","date":"2022-04-10T00:00:00.000Z","formattedDate":"April 10, 2022","tags":[{"label":"reverse-proxy","permalink":"/tags/reverse-proxy"},{"label":"NGINX","permalink":"/tags/nginx"},{"label":"DNSMASQ","permalink":"/tags/dnsmasq"},{"label":"Lab","permalink":"/tags/lab"}],"readingTime":3.34,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"local-home-lab","title":"Local Home Lab DNS Setup with DNSMasq and NGINX","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["reverse-proxy","NGINX","DNSMASQ","Lab"]},"prevItem":{"title":"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO","permalink":"/percona-mongo-replicaset-minio"},"nextItem":{"title":"Configure Sharding in MongoDB on Docker Containers","permalink":"/sharding-mongo-docker"}},"content":"As I explored and set up an increased number of FOSS software using containers(Docker and LXD) and virtual machines(Multipass) in my home lab environment, I realized the difficulty in remembering the different ports the applications and containers are running. The solution to address this problem was to have a Domain Name System for the local network, which works to resolve local and external addresses with a reverse proxy to redirect calls based on DNS resolution.\\n\\n\x3c!--truncate--\x3e\\n\\n\\nThe below command lists the Docker Containers and ports the container are running on, the requirement is to create a domain for a home setup with domain homelab.net and access the containerized applications with appsmith.homelab.net; typesense.homelab.net; excalidraw.homelab.net\\n\\nLet\u2019s get the list of docker containers with port numbers\\n\\n```shell\\n# get container names and port numbers\\n$ docker container ls --format \\"table {{.ID}}\\\\t{{.Names}}\\\\t{{.Ports}}\\" -a\\n\\n\\nCONTAINER ID NAMES PORTS\\ncbb2ac402270 appsmith 0.0.0.0:9001->9001/tcp, 0.0.0.0:70->80/tcp, 0.0.0.0:444->443/tcp\\nc9875323b989 typesense_typesense-1_1 0.0.0.0:8108->8108/tcp\\nc453288c8496 excalidraw 0.0.0.0:3001->80/tcp\\n5be5d33f1f50 k8s-control-plane 127.0.0.1:34589->6443/tcp\\n4140d2fbf7d5 mysql_nocodb_1 0.0.0.0:8082->8080/tcp\\ne7310461bee9 mysql_root_db_1 3306/tcp, 33060/tcp\\n9b56c33d45d5 meilisearch_ms_1 0.0.0.0:7700->7700/tcp\\n9ac6a0e16b0e mongo2 0.0.0.0:20002->27017/tcp\\n2aaf01d2233f mongo1 0.0.0.0:20001->27017/tcp\\n860b521f97dc mongo3 0.0.0.0:20003->27017/tcp\\nd8ad1ec3cab8 rethinkdb_rethinkdb_1 0.0.0.0:28015->28015/tcp, 0.0.0.0:29015->29015/tcp, 0.0.0.0:8081->8080/tcp\\n```\\n\\nThe containers and applications running on the local home network as shown above do not have a public domain name, the option was to look for setting up a DNS server with [DNSMasq](https://thekelleys.org.uk/dnsmasq/doc.html), and a reverse proxy using [NGINX](https://www.nginx.com/). The containers may not be the only use case scenario for local DNS servers with [DNSMasq](https://thekelleys.org.uk/dnsmasq/doc.html), there could be many others like accessing a local file share across devices; accessing applications from a mobile device, and sharing a printer.\\n\\n\\n[DNSMasq](https://thekelleys.org.uk/dnsmasq/doc.html) - Dnsmasq provides network infrastructure for small networks: DNS, DHCP, router advertisement, and network boot. It is designed to be lightweight and has a small footprint, suitable for resource-constrained routers and firewalls.\\n\\n[NGINX](https://www.nginx.com/) - Reverse Proxy \u2013 A reverse proxy provides an additional level of abstraction and control to ensure the smooth flow of network traffic between clients and servers.\\n\\n**Let us get started with the implementation steps for DNSMasq and NGINX. The below steps are performed on Ubuntu 20.04 (Debian-based distro).**\\n\\nBefore starting the installation of DNSMasq, \\n\\n### Step 1: Disable systemd-resolve which binds to port 53, the default port for DNSMasq\\n\\n```shell\\n\\n sudo systemctl stop systemd-resolved\\n sudo systemctl disable systemd-resolved\\n\\n```\\n\\n### Step 2: Install DNSUtils, DNSMasq\\n\\n```shell\\nsudo apt update && sudo apt install dnsmasq && sudo apt install dnsutils\\n```\\n\\n### Step 3: Create the DNSMasq configuration file\\n\\n```shell\\n$ dnsmasq_conf=\\"no-dhcp-interface=enp2s0f0\\nbogus-priv\\ndomain=homelab.net\\nexpand-hosts\\nlocal=/homelab.net/\\ndomain-needed\\nno-resolv\\nno-poll\\nserver=8.8.8.8\\nserver=8.8.4.4\\"\\n\\n$ sudo echo -e \\"$dnsmasq_conf\\" > /etc/dnsmasq.d/home-lab.net \\n\\n$ sudo systemctl restart dnsmasq\\n```\\n\\n\\n### Step 4: Add container DNS records in the file./etc/hosts. The records in the hosts file will be used by DNSMasq for client responses\\n\\n```shell\\n $ sudo nano /etc/hosts \\n # add the below records to the hosts file\\n #Container DNS records\\n # appsmith\\n 192.168.20.113 appsmith\\n # excalidraw\\n 192.168.20.113 excalidraw\\n # typesense\\n 192.168.20.113 typesense\\n```\\n\\n### Step 5: Restart DNSMasq service\\n\\n```shell\\n$ sudo systemctl restart dnsmasq.service\\n```\\n\\n### Step 6: Install NGINX\\n\\n```shell\\n$ sudo apt update && sudo apt install nginx\\n```\\n\\n\\n### Step 6: To enable reverse proxy feature, create a new NGINX configuration file in `sites-enabled` directory\\n\\n```shell\\n $ sudo nano /etc/nginx/sites-enabled/homelab.conf\\n server {\\n listen 80;\\n listen [::]:80;\\n server_name typesense.homelab.net;\\n location / {\\n proxy_bind 192.168.20.113;\\n proxy_pass http://localhost:3000;\\n }\\n }\\n server {\\n listen 80;\\n listen [::]:80;\\n server_name appsmith.homelab.net;\\n location / {\\n proxy_bind 192.168.20.113;\\n proxy_pass http://localhost:70;\\n }\\n\\n }\\n server {\\n listen 80;\\n listen [::]:80;\\n server_name excalidraw.homelab.net;\\n location / {\\n proxy_bind 192.168.20.113;\\n proxy_pass http://localhost:3001;\\n }\\n\\n }\\n```\\n\\nThe `proxy_pass` argument will forward all incoming client requests to app.homelab.net to the respective app. The IP address and port number can be easily changed.\\n \\n### Step 7 reload NGINX for the configuration to take into effect\\n```shell \\n$ sudo systemctl reload nginx\\n```\\nAfter a successful implementation, we will be able to access container applications using domain URLs as seen in the below screenshot with three panes first pane is appsmith ; second pane is excalidraw and third pane is typesense.\\n\\n![local-home-lab-snapshot](2022-07-19-23-09-57.png)"},{"id":"sharding-mongo-docker","metadata":{"permalink":"/sharding-mongo-docker","source":"@site/blog/2021-10-02-sharding-mongo-docker.md","title":"Configure Sharding in MongoDB on Docker Containers","description":"In my previous blog post, I posted about configuring Replica Set to meet high availability requirements.","date":"2021-10-02T00:00:00.000Z","formattedDate":"October 2, 2021","tags":[{"label":"mongodb","permalink":"/tags/mongodb"},{"label":"docker","permalink":"/tags/docker"},{"label":"sharding","permalink":"/tags/sharding"}],"readingTime":7.095,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"sharding-mongo-docker","title":"Configure Sharding in MongoDB on Docker Containers","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["mongodb","docker","sharding"]},"prevItem":{"title":"Local Home Lab DNS Setup with DNSMasq and NGINX","permalink":"/local-home-lab"},"nextItem":{"title":"MongoDB Replicaset with Persistent Volume using Docker Compose","permalink":"/mongodb-rs-docker-persistent-volume"}},"content":"In my previous [blog](2021-09-18-mongodb-rs-docker-persistent-volume.md) post, I posted about configuring Replica Set to meet high availability requirements. \\n\\nIn this post, i cover \\n\\n* MongoDB Sharded Cluster Components \\n* Steps to create MongoDB Sharded Cluster using Docker Compose\\n* Add Replica Set as a Shard\\n* Sharding Data\\n* Verify Distribution of Data \\n\\n\x3c!--truncate--\x3e\\n\\n## Replica Set vs Sharding \\n\\n**Replica Set** is the way of keeping identical set of data on multiple servers. Sharding refers to the process of splitting data across nodes, also known as horizontal partitioning. \\n\\nA database **shard**, is a horizontal partition of data in a database, each node contains different set of the data. \\n\\nMongoDB supports and implements `auto-sharding` by automating balancing of data across the shards. \\n\\n\\n\\n\\n## MongoDB Sharding Components \\n\\nThe first step in creating a Sharded MongoDB cluster is to understand all the components and processes that constitute a cluster \\n\\n* **Query Router - mongos**\\n\\nmongos is the routing process. The goal of sharding is to make cluster of 100-1000 nodes looks like a single interface for the application and abstract all the complexity of data access from multiple shards. The mongos router is table of contents and knows where the data required by application is located, mongos forwards the application request to appropriate shard(s). \\n\\n* **Config Servers**\\n\\nConfig Servers hold all the metadata about which node is holding which data(chunks). mongos retrieves all the metadata from Config Servers. Config Servers are critical and its important to configure and bring the config servers first, backup config servers and setup config servers as Replica Set. \\n\\n## Steps to create MongoDB Sharded Cluster using Docker Compose\\n\\nBelow image show different components required to setup MongoDB sharding with Replica Set. The image also shows how application communicates to MongoDB sharded cluster. As discussed in the sharding components application always connects first to mongos and mongos communicates with config server (cfg1, cfg2, cfg3 are part of replicaset in below image)\\n\\n```mermaid\\n stateDiagram-v2\\n [*] --\x3e Application\\n direction LR\\n state Application\\n state QueryRouter \\n {\\n \\n mongos \\n }\\n Application --\x3e QueryRouter : Read\\n QueryRouter --\x3e Application: Results\\n state cfg: config \\n {\\n \\n cfg1 \\n cfg2\\n cfg3\\n \\n }\\n QueryRouter --\x3e config\\n config --\x3e QueryRouter\\n state Shard1: rs_mongo1\\n {\\n shard1_mongo1\\n shard1_mongo2\\n shard1_mongo3\\n }\\n state Shard2: rs_mongo2\\n {\\n shard2_mongo1\\n shard2_mongo2\\n shard2_mongo3\\n }\\n \\n state Shard3: rs_mongo3 \\n {\\n shard3_mongo1\\n shard3_mongo2\\n shard3_mongo3\\n }\\n\\n \\n QueryRouter --\x3e rs_mongo1\\n QueryRouter --\x3e rs_mongo2\\n QueryRouter --\x3e rs_mongo3\\n rs_mongo1 --\x3e QueryRouter\\n rs_mongo2 --\x3e QueryRouter\\n rs_mongo3 --\x3e QueryRouter\\n \\n```\\n\\nLets setup above MongoDB Sharding Cluster using docker compose\\n\\n### Step 1 - Author Docker Compose file \\n\\n:::note\\nEnsure directory path mentioned in docker compose for persistent volume before the \u201c:\u201d is existing on local host\\n:::\\n\\n```YAML\\nservices:\\n shard1_mongo1:\\n image: mongo_ssh\\n hostname: shard1_mongo1\\n container_name: shard1_mongo1\\n volumes:\\n - ~/db/shard1_mongo1/mongod.conf:/etc/mongod.conf\\n - ~/db/shard1_mongo1/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard1_mongo1/data/db/:/data/db/\\n - ~/db/shard1_mongo1/log/:/var/log/mongodb/\\n ports:\\n - 20005:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard1_mongo2:\\n image: mongo_ssh\\n hostname: shard1_mongo2\\n container_name: shard1_mongo2\\n volumes:\\n - ~/db/shard1_mongo2/mongod.conf:/etc/mongod.conf\\n - ~/db/shard1_mongo2/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard1_mongo2/data/db/:/data/db/\\n - ~/db/shard1_mongo2/log/:/var/log/mongodb/\\n ports:\\n - 20006:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard1_mongo3:\\n image: mongo_ssh\\n hostname: shard1_mongo3\\n container_name: shard1_mongo3\\n volumes:\\n - ~/db/shard1_mongo3/mongod.conf:/etc/mongod.conf\\n - ~/db/shard1_mongo3/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard1_mongo3/data/db/:/data/db/\\n - ~/db/shard1_mongo3/log/:/var/log/mongodb/\\n ports:\\n - 20007:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard2_mongo1:\\n image: mongo_ssh\\n hostname: shard2_mongo1\\n container_name: shard2_mongo1\\n volumes:\\n - ~/db/shard2_mongo1/mongod.conf:/etc/mongod.conf\\n - ~/db/shard2_mongo1/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard2_mongo1/data/db/:/data/db/\\n - ~/db/shard2_mongo1/log/:/var/log/mongodb/\\n ports:\\n - 20008:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard2_mongo2:\\n image: mongo_ssh\\n hostname: shard2_mongo2\\n container_name: shard2_mongo2\\n volumes:\\n - ~/db/shard2_mongo2/mongod.conf:/etc/mongod.conf\\n - ~/db/shard2_mongo2/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard2_mongo2/data/db/:/data/db/\\n - ~/db/shard2_mongo2/log/:/var/log/mongodb/\\n ports:\\n - 20009:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard2_mongo3:\\n image: mongo_ssh\\n hostname: shard2_mongo3\\n container_name: shard2_mongo3\\n volumes:\\n - ~/db/shard2_mongo3/mongod.conf:/etc/mongod.conf\\n - ~/db/shard2_mongo3/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard2_mongo3/data/db/:/data/db/\\n - ~/db/shard2_mongo3/log/:/var/log/mongodb/\\n ports:\\n - 20010:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard3_mongo1:\\n image: mongo_ssh\\n hostname: shard3_mongo1\\n container_name: shard3_mongo1\\n volumes:\\n - ~/db/shard3_mongo1/mongod.conf:/etc/mongod.conf\\n - ~/db/shard3_mongo1/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard3_mongo1/data/db/:/data/db/\\n - ~/db/shard3_mongo1/log/:/var/log/mongodb/\\n ports:\\n - 20011:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard3_mongo2:\\n image: mongo_ssh\\n hostname: shard3_mongo2\\n container_name: shard3_mongo2\\n volumes:\\n - ~/db/shard3_mongo2/mongod.conf:/etc/mongod.conf\\n - ~/db/shard3_mongo2/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard3_mongo2/data/db/:/data/db/\\n - ~/db/shard3_mongo2/log/:/var/log/mongodb/\\n ports:\\n - 20012:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard3_mongo3:\\n image: mongo_ssh\\n hostname: shard3_mongo3\\n container_name: shard3_mongo3\\n volumes:\\n - ~/db/shard3_mongo3/mongod.conf:/etc/mongod.conf\\n - ~/db/shard3_mongo3/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard3_mongo3/data/db/:/data/db/\\n - ~/db/shard3_mongo3/log/:/var/log/mongodb/\\n ports:\\n - 20013:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n# MongoDB Confiugration Server \\n cfg1:\\n image: mongo_ssh\\n hostname: cfg1\\n container_name: cfg1\\n volumes:\\n - ~/db/cfg1/mongod.conf:/etc/mongod.conf\\n - ~/db/cfg1/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/cfg1/data/db/:/data/db/\\n - ~/db/cfg1/log/:/var/log/mongodb/\\n ports:\\n - 20014:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n cfg2:\\n image: mongo_ssh\\n hostname: cfg2\\n container_name: cfg2\\n volumes:\\n - ~/db/cfg2/mongod.conf:/etc/mongod.conf\\n - ~/db/cfg2/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/cfg2/data/db/:/data/db/\\n - ~/db/cfg2/log/:/var/log/mongodb/\\n ports:\\n - 20015:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n cfg3:\\n image: mongo_ssh\\n hostname: cfg3\\n container_name: cfg3\\n volumes:\\n - ~/db/cfg3/mongod.conf:/etc/mongod.conf\\n - ~/db/cfg3/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/cfg3/data/db/:/data/db/\\n - ~/db/cfg3/log/:/var/log/mongodb/\\n ports:\\n - 20016:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n mongos:\\n image: mongo_ssh\\n hostname: mongos\\n container_name: mongos\\n volumes:\\n - ~/db/mongos/mongod.conf:/etc/mongod.conf\\n - ~/db/mongos/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/mongos/data/db/:/data/db/\\n - ~/db/mongos/log/:/var/log/mongodb/\\n ports:\\n - 20017:27017\\n command: [\\"mongos\\",\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n\\n\\n```\\n\\n### Step 2 - Draft Config Server configuration file (pass clusterRole: configsvr to indicate this server is Config Server)\\n\\n```YAML\\nsystemLog:\\n destination: file\\n logAppend: true\\n path: /var/log/mongodb/mongod.log\\nstorage:\\n dbPath: /data/db\\n journal:\\n enabled: true\\n engine: wiredTiger\\nnet:\\n port: 27017\\n bindIp: 127.0.0.1 # Enter 0.0.0.0,:: to bind to all IPv4 and IPv6 addresses or, alternatively, use the net.bindIpAll setting.\\nsharding:\\n clusterRole: configsvr\\nreplication:\\n replSetName: rs_config\\n```\\n\\n### Step 3 - Draft Query Router mongos configuration file (pass configDB:config server list)\\n\\n```YAML\\nsystemLog:\\n destination: file\\n logAppend: true\\n path: /var/log/mongodb/mongod.log\\n \\nnet:\\n port: 27017\\n bindIp: 127.0.0.1 # Enter 0.0.0.0,:: to bind to all IPv4 and IPv6 addresses or, alternatively, use the net.bindIpAll setting.\\n \\nsharding:\\n configDB: rs_config/cfg1:27017,cfg2:27017,cfg3:27017\\n```\\n\\n### Step 4 - Copy mongod.conf and mongos.conf to the path mentioned in step 1 `docker-compose.yaml`\\n\\n### Step 5 - Spin up Config Server, mongos, all mongod nodes\\n\\n```shell \\n$ docker compose up -d\\n```\\n\\n### Step 6 - Connect to config server and add config server in a Replica Set\\n\\n```javascript \\nrs_config:PRIMARY> rs.initiate() \\nrs_config:PRIMARY> rs.add(\\"cfg2:27017\\")\\nrs_config:PRIMARY> rs.add(\\"cfg3:27017\\")\\n```\\n\\n### Step 7 - Add all data nodes to replicaset\\n\\n```javascript \\n# Connect to shard1_mongo1\\n \\nadmin> rs.initiate()\\nrs_mongo1 [direct: primary] admin> rs.add(\\"shard1_mongo2\\")\\nrs_mongo1 [direct: primary] admin> rs.add(\\"shard1_mongo3\\")\\n \\n# Connect to shard2_mongo1\\n \\nadmin> rs.initiate()\\nrs_mongo2 [direct: primary] test> rs.add(\\"shard2_mongo2\\")\\nrs_mongo2 [direct: primary] test> rs.add(\\"shard2_mongo3\\")\\n \\n# Connect to shard3_mongo1\\n \\ntest> rs.initiate()\\nrs_mongo3 [direct: other] test> rs.add(\\"shard3_mongo2\\")\\nrs_mongo3 [direct: primary] test> rs.add(\\"shard3_mongo3\\")\\n\\n```\\n\\n### Step 8 \u2013 Connect to mongos and convert data replicaset nodes to shards\\n\\n```javascript\\n\\nmongos>sh.addShard(\\"rs_mongo1/shard1_mongo1:27017,shard1_mongo2:27017,shard1_mongo3:27017\\")\\n \\nmongos>sh.addShard(\\"rs_mongo2/shard2_mongo1:27017,shard2_mongo2:27017,shard2_mongo3:27017\\")\\n \\nmongos>sh.addShard(\\"rs_mongo3/shard3_mongo1:27017,shard3_mongo2:27017,\\n\\n```\\n\\n### Step 9 \u2013 Connect to mongos and enable sharding on a test database \u201cEmployee\u201d\\n\\n```javascript\\nmongos> db.adminCommand({enableSharding : \\"employee\\"})\\n```\\n\\n### Step 10 \u2013 Generate test data ; Create an index on the key to be sharded and shard the collection\\n\\n```javascript\\nmongos> use employee\\nswitched to db employee\\n \\nmongos> for (var i = 0; i < 100000; i++) { db.emp_list2.insert({ \\"sr_no\\": \\"emp # \\" + i, \\"create_date\\": new Date() }); }\\n \\nmongos> db.emp_list2.ensureIndex({\\"sr_no\\" : \\"hashed\\"})\\n \\nmongos> sh.shardCollection(\\"employee.emp_list2\\", {\\"sr_no\\":\\"hashed\\"})\\n \\n{\\n \\"collectionsharded\\" : \\"employee.emp_list2\\",\\n \\"collectionUUID\\" : UUID(\\"17195baa-fc6c-4c3e-8a2b-58fb1278e40c\\"),\\n \\"ok\\" : 1,\\n \\"operationTime\\" : Timestamp(1633177398, 26),\\n \\"$clusterTime\\" : {\\n \\"clusterTime\\" : Timestamp(1633177398, 26),\\n \\"signature\\" : {\\n \\"hash\\" : BinData(0,\\"AAAAAAAAAAAAAAAAAAAAAAAAAAA=\\"),\\n \\"keyId\\" : NumberLong(0)\\n }\\n }\\n}\\n```\\n\\n### Step 11 \u2013 Validate sharding status\\n\\n```javascript \\n\\n\\nmongos> sh.status()\\n--- Sharding Status ---\\n sharding version: {\\n \\"_id\\" : 1,\\n \\"minCompatibleVersion\\" : 5,\\n \\"currentVersion\\" : 6,\\n \\"clusterId\\" : ObjectId(\\"6157efd7982782e314f1b651\\")\\n }\\n shards:\\n { \\"_id\\" : \\"rs_mongo1\\", \\"host\\" : \\"rs_mongo1/shard1_mongo1:27017,shard1_mongo2:27017,shard1_mongo3:27017\\", \\"state\\" : 1 }\\n { \\"_id\\" : \\"rs_mongo2\\", \\"host\\" : \\"rs_mongo2/shard2_mongo1:27017,shard2_mongo2:27017,shard2_mongo3:27017\\", \\"state\\" : 1 }\\n { \\"_id\\" : \\"rs_mongo3\\", \\"host\\" : \\"rs_mongo3/shard3_mongo1:27017,shard3_mongo2:27017,shard3_mongo3:27017\\", \\"state\\" : 1 }\\n active mongoses:\\n \\"4.4.8\\" : 1\\n autosplit:\\n Currently enabled: yes\\n balancer:\\n Currently enabled: yes\\n Currently running: no\\n Failed balancer rounds in last 5 attempts: 0\\n Migration Results for the last 24 hours:\\n 682 : Success\\n databases:\\n { \\"_id\\" : \\"config\\", \\"primary\\" : \\"config\\", \\"partitioned\\" : true }\\n config.system.sessions\\n shard key: { \\"_id\\" : 1 }\\n unique: false\\n balancing: true\\n chunks:\\n rs_mongo1 342\\n rs_mongo2 341\\n rs_mongo3 341\\n too many chunks to print, use verbose if you want to force print\\n employee.emp_list2\\n shard key: { \\"sr_no\\" : \\"hashed\\" }\\n unique: false\\n balancing: true\\n chunks:\\n rs_mongo1 2\\n rs_mongo2 2\\n rs_mongo3 \\n```\\n\\n### Step 12 - Validate chunk distribution \\n```javascript\\n\\nmongos> db.getSiblingDB(\\"employee\\").emp_list2.getShardDistribution();\\n \\nShard rs_mongo1 at rs_mongo1/shard1_mongo1:27017,shard1_mongo2:27017,shard1_mongo3:27017\\n data : 2.09MiB docs : 33426 chunks : 2\\n estimated data per chunk : 1.04MiB\\n estimated docs per chunk : 16713\\n \\nShard rs_mongo3 at rs_mongo3/shard3_mongo1:27017,shard3_mongo2:27017,shard3_mongo3:27017\\n data : 2.09MiB docs : 33379 chunks : 2\\n estimated data per chunk : 1.04MiB\\n estimated docs per chunk : 16689\\n \\nShard rs_mongo2 at rs_mongo2/shard2_mongo1:27017,shard2_mongo2:27017,shard2_mongo3:27017\\n data : 2.08MiB docs : 33195 chunks : 2\\n estimated data per chunk : 1.04MiB\\n estimated docs per chunk : 16597\\n \\nTotals\\n data : 6.28MiB docs : 100000 chunks : 6\\n Shard rs_mongo1 contains 33.42% data, 33.42% docs in cluster, avg obj size on shard : 65B\\n Shard rs_mongo3 contains 33.37% data, 33.37% docs in cluster, avg obj size on shard : 65B\\n Shard rs_mongo2 contains 33.19% data, 33.19% docs in cluster, avg \\n\\n```"},{"id":"mongodb-rs-docker-persistent-volume","metadata":{"permalink":"/mongodb-rs-docker-persistent-volume","source":"@site/blog/2021-09-18-mongodb-rs-docker-persistent-volume.md","title":"MongoDB Replicaset with Persistent Volume using Docker Compose","description":"In this article we will see the steps required to create and configure MongoDB replicaset containers on persistent volumes using Docker Compose. Compose was developed to define, configure and spin-up multi-container docker applications with single command, further reducing . Extensive usage of Docker with several container management quickly becomes cumbersome, Compose overcomes this problem and allows to easily handle multiple containers at once using YAML configuration docker-compose.yml","date":"2021-09-18T00:00:00.000Z","formattedDate":"September 18, 2021","tags":[{"label":"mongodb","permalink":"/tags/mongodb"},{"label":"docker","permalink":"/tags/docker"},{"label":"replicaset","permalink":"/tags/replicaset"},{"label":"persistent-volume","permalink":"/tags/persistent-volume"}],"readingTime":2.795,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"mongodb-rs-docker-persistent-volume","title":"MongoDB Replicaset with Persistent Volume using Docker Compose","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["mongodb","docker","replicaset","persistent-volume"]},"prevItem":{"title":"Configure Sharding in MongoDB on Docker Containers","permalink":"/sharding-mongo-docker"},"nextItem":{"title":"Create MongoDB Standalone and Replica Set containers using Docker","permalink":"/create-mongodb-docker"}},"content":"In this article we will see the steps required to create and configure MongoDB replicaset containers on **persistent volumes** using [Docker Compose](https://docs.docker.com/compose/). Compose was developed to define, configure and spin-up multi-container docker applications with single command, further reducing . Extensive usage of Docker with several container management quickly becomes cumbersome, Compose overcomes this problem and allows to easily handle multiple containers at once using YAML configuration `docker-compose.yml`\\n\\n\x3c!--truncate--\x3e\\n\\n## Docker Compose Steps\\n\\n### Step 1: System Configuration\\n\\nTo run Compose, make sure you have installed Compose on your local system where Docker is installed. The Compose setup and installation instructions can be found here.\\n\\n### Step 2: Ensure mongo_net network bridge is already existing\\n\\n```shell\\n$ docker network create mongo_net\\n$ docker network inspect mongo_net \\n```\\n### Step 3: Lets convert the below command as seen in previous blog post to docker-compose.yml. If you are new to Docker and drafting compose files try using composerize to convert docker run commands into compose YAML output\\n\\n```shell \\n$ docker run -d -p 20003:27017 --name mongo3 --network mongo_net mongo:4.4.9-rc0 mongod --replSet rs_mongo\\n```\\nThere are few additional attributes passed in the `docker-compose.yml`. The difference in the options passed in the command line above and `docker-compose.yml` is as below\\n\\n- image: custom image uploaded to docker hub with additional utilities installed on ubuntu build\\nhostname: container host name\\n- volumes: map directory on the host file system to manage and store container data. In the below YAML i use separate directory for all 3 MongoDB replicaset. This helps in creating persistent data store for docker containers and doesn\u2019t bloat the container runtime instance.\\n- Pass mongod configuration options through file mongod.conf\\n\\nCreate the below YAML compose file in your favourite editor, i have been using Visual Studio Code. Save the file as docker-compose.yml\\n\\n\\n```shell\\n$ code .\\n\\n``` \\n\\n```yaml\\n#version: \\"3.3\\"\\nservices:\\n mongo_1:\\n image: ivishalgandhi/mongo-custom:latest\\n hostname: mongo_1\\n container_name: mongo_1\\n volumes:\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_1/mongod.conf:/etc/mongod.conf\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_1/initdb.d/:/docker-entrypoint-initdb.d/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_1/data/db/:/data/db/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_1/log/:/var/log/mongodb/\\n ports:\\n - 20003:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\",\\"--replSet\\", \\"rs_mongo\\"]\\n network_mode: mongo_net\\n \\n mongo_2:\\n image: ivishalgandhi/mongo-custom:latest\\n hostname: mongo_2\\n container_name: mongo_2\\n volumes:\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_2/mongod.conf:/etc/mongod.conf\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_2/initdb.d/:/docker-entrypoint-initdb.d/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_2/data/db/:/data/db/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_2/log/:/var/log/mongodb/\\n ports:\\n - 20004:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\",\\"--replSet\\", \\"rs_mongo\\"]\\n network_mode: mongo_net\\n \\n mongo_3:\\n image: ivishalgandhi/mongo-custom:latest\\n hostname: mongo_3\\n container_name: mongo_3\\n volumes:\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_3/mongod.conf:/etc/mongod.conf\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_3/initdb.d/:/docker-entrypoint-initdb.d/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_3/data/db/:/data/db/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_3/log/:/var/log/mongodb/\\n ports:\\n - 20005:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\",\\"--replSet\\", \\"rs_mongo\\"]\\n network_mode: mongo_net\\n\\n```\\n\\n### Step 4: create mongod.conf\\n\\n```\\n$ code .\\n\\n```\\n\\n```YAML \\n# mongod.conf\\n \\n# for documentation of all options, see:\\n# http://docs.mongodb.org/manual/reference/configuration-options/\\n \\n# where to write logging data.\\nsystemLog:\\n destination: file\\n logAppend: true\\n path: /var/log/mongodb/mongod.log\\n \\n# Where and how to store data.\\nstorage:\\n dbPath: /data/db\\n journal:\\n enabled: true\\n engine: wiredTiger\\n \\n# network interfaces\\nnet:\\n port: 27017\\n bindIp: 127.0.0.1 \\n```\\n\\n### Step 5: Spin-up replicaset containers\\n\\n```shell \\n$ docker compose up -d\\n[+] Running 3/3\\n \u283f Container mongo_2 Created 0.2s\\n \u283f Container mongo_1 Created 0.2s\\n \u283f Container mongo_3 Created\\n```\\n\\n### Step 6: Initiate replicaset\\n\\n```shell\\n$ docker exec -it mongo_1 bash\\n\\nroot@mongo_1:/# mongo\\nrs_mongo:SECONDARY> rs.initiate(\\n {\\n _id: \u201crs_mongo\u201d,\\n version: 1,\\n members: [\\n { _id: 0, host : \u201cmongo_1:27017\u201d },\\n { _id: 1, host : \u201cmongo_2:27017\u201d },\\n { _id: 2, host : \u201cmongo_3:27017\u201d }\\n ]\\n }\\n)\\n \\nrs_mongo:SECONDARY> db.isMaster() \\n{\\n \\"topologyVersion\\" : {\\n \\"processId\\" : ObjectId(\\"614615744d54c08963ef67f6\\"),\\n \\"counter\\" : NumberLong(6)\\n },\\n \\"hosts\\" : [\\n \\"mongo_1:27017\\",\\n \\"mongo_2:27017\\",\\n \\"mongo_3:27017\\"\\n ],\\n \\"setName\\" : \\"rs_mongo\\",\\n \\"setVersion\\" : 1,\\n \\"ismaster\\" : true,\\n \\"secondary\\" : false,\\n \\"primary\\" : \\"mongo_2:27017\\",\\n \\"me\\" : \\"mongo_2:27017\\",\\n\\n```"},{"id":"create-mongodb-docker","metadata":{"permalink":"/create-mongodb-docker","source":"@site/blog/2021-09-12-create-mongodb-docker.md","title":"Create MongoDB Standalone and Replica Set containers using Docker","description":"Docker Containers offer easy setup, customization and scalability. In this article, i will walk you through how to use Docker to setup MongoDB standalone and replica set containers within minutes.","date":"2021-09-12T00:00:00.000Z","formattedDate":"September 12, 2021","tags":[{"label":"mongodb","permalink":"/tags/mongodb"},{"label":"containers","permalink":"/tags/containers"},{"label":"docker","permalink":"/tags/docker"},{"label":"mongo-replicaset","permalink":"/tags/mongo-replicaset"}],"readingTime":4.65,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"create-mongodb-docker","title":"Create MongoDB Standalone and Replica Set containers using Docker","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["mongodb","containers","docker","mongo-replicaset"]},"prevItem":{"title":"MongoDB Replicaset with Persistent Volume using Docker Compose","permalink":"/mongodb-rs-docker-persistent-volume"}},"content":"Docker Containers offer easy setup, customization and scalability. In this article, i will walk you through how to use Docker to setup MongoDB standalone and replica set containers within minutes.\\n\\nThe article is divided in two parts, the first part is setting up the standalone MongoDB container and second part is setting up and grouping MongoDB containers as member of replica set with Docker.\\n\\nLet\u2019s get started.\\n\\n\x3c!--truncate--\x3e\\n\\n## System Configuration\\n\\nTo run this setup, Docker Engine is required to be installed on the system. Follow the official documentation to setup Docker Engine on your system.\\n\\n:::caution\\n\\nThe steps and configuration for both standalone and replica set is not to be used for production deployment. The intended use is only for setting up a environment to support learning of MongoDB.\\n\\n:::\\n\\n## Standalone MongoDB Setup\\n\\n* Pull the Docker MongoDB official image from Docker Hub. The following code snippet demonstrates pulling the docker MongoDB 4.4.9 release. To pull the MongoDB 5.0 latest release replace :4.4.9-rc0 with :latest tag\\n\\n```shell \\n\\n$ docker pull mongo:4.4.9-rc0 \\n\\n```\\n\\n* To check if the the image pull from Docker Hub was successful\\n\\n\\n```\\n\\n$ docker images \\nREPOSITORY TAG IMAGE ID CREATED SIZE\\nmongo 4.4.9-rc0 24599d6cde30 9 days ago 413MB\\nmongo latest 31299b956c79 10 days ago 642MB\\n\\n```\\n\\n* Lets start first standalone container \u2013 the below command starts MongoDB docker container with name mongo_449 in detached mode using the 4.4.9-rc0 image\\n\\n```shell\\n\\n$ docker run --name mongo_449 -d mongo:4.4.9-rc0\\n\\n```\\n\\n* List the container status and health by executing\\n\\n```shell\\n\\n$ docker container ls -a\\n\\nCONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\\n96e64ec525a2 24599d6cde30 \\"docker-entrypoint.s\u2026\\" 2 hours ago Up 33 minutes 27017/tcp mongo_449\\n\\n```\\n\\n* To run a command inside the container\\n * docker exec: interact with containers (running/up mode)\\n * -i : interactive STDIN open even if not attached to the container\\n * -t: pseudo TTY\\n\\n\\n* Connect to MongoDB daemon\\n\\n```shell\\nroot@96e64ec525a2:/# mongo\\n\\nMongoDB shell version v4.4.9-rc0\\nconnecting to: mongodb://127.0.0.1:27017/?compressors=disabled&gssapiServiceName=mongodb\\nImplicit session: session { \\"id\\" : UUID(\\"ac624a79-908b-4580-90ae-22d0a7aee07a\\") }\\nMongoDB server version: 4.4.9-rc0\\n\\n```\\n\\n* Install utilities. The utilities ping, systemctl, sudo installed in the containers can be used for troubleshooting during the setup of Docker containers.\\n\\n```shell\\n\\nroot@96e64ec525a2:/# apt-get install iputils-ping\u0335\\nroot@96e64ec525a2:/# apt-get install sudo \\nroot@96e64ec525a2:/# apt-get install systemctl\\n\\n```\\n\\nThis finishes the setup of standalone MongoDB Container. Now let\u2019s look at ReplicaSet setup.\\n\\n## Creating MongoDB ReplicaSet using Docker\\n\\nA replica set consists of a primary node together with two or more secondary nodes. It is recommended to group three or more nodes, with an odd number of total nodes. The primary node accepts all the write requests which are propagated synchronously or asynchronously to the secondary nodes. Below are the steps required to complete the replica set setup using Docker.\\n\\nCreate a new network(bridge) within Docker. The replica set containers will be mapped to the new network.\\n\\n```shell \\n$ docker network create mongo_net\\n$ docker network inspect mongo_net \\n[\\n {\\n \\"Name\\": \\"mongo_net\\",\\n \\"Id\\": \\"e2567806642a9245436371a9b9904c71fadae969fbd11a7bb8203e07976b1b2a\\",\\n \\"Created\\": \\"2021-09-11T00:36:33.989688708Z\\",\\n \\"Scope\\": \\"local\\",\\n \\"Driver\\": \\"bridge\\",\\n \\"EnableIPv6\\": false,\\n \\"IPAM\\": {\\n \\"Driver\\": \\"default\\",\\n \\"Options\\": {},\\n \\"Config\\": [\\n {\\n \\"Subnet\\": \\"172.18.0.0/16\\",\\n \\"Gateway\\": \\"172.18.0.1\\"\\n }\\n ]\\n },\\n...\\n]\\n```\\n\\n* Start 3 containers \u2013 Primary Secondary Secondary\\n * Break down of parameters docker run : start a new container\\n * `-d` : run the container in detached mode\\n * `-p 20001:27017` publish container port to the host and bind 27017 to 20001 on the host. This is useful if connecting mongo client like mongosh to container\\n * `--name` : name of the mongo container\\n * `-- network` : connect to user created network mongo_net\\n * `mongo:4.4.9-rc0` : Docker MongoDB image\\n * `mongod --replSet rs_mongo` : run the mongod daemon and add the container to replica set name rs_mongo\\n\\n```shell \\n$ docker run -d -p 20001:27017 --name mongo1 --network mongo_net mongo:4.4.9-rc0 mongod --replSet rs_mongo\\n$ docker run -d -p 20002:27017 --name mongo2 --network mongo_net mongo:4.4.9-rc0 mongod --replSet rs_mongo\\n$ docker run -d -p 20003:27017 --name mongo3 --network mongo_net mongo:4.4.9-rc0 mongod --replSet rs_mongo\\n```\\n\\n* Set up Replica set. Connect to one of the containers and run the below commands. The container that receives the initiate will pass on the configuration to other containers assigned as members.\\n\\n```js\\nrs_mongo [direct: primary] test_2> config = {\\n \\"_id\\" : \\"rs_mongo\\",\\n \\"members\\" : [\\n {\\n \\"_id\\" : 0,\\n \\"host\\" : \\"mongo1:27017\\"\\n },\\n {\\n \\"_id\\" : 1,\\n \\"host\\" : \\"mongo2:27017\\"\\n },\\n {\\n \\"_id\\" : 2,\\n \\"host\\" : \\"mongo3:27017\\"\\n }\\n ]\\n }\\n\\nrs_mongo [direct: primary] admin> rs.initiate(config)\\n\\n//Insert test data\\n\\nrs_mongo [direct: primary] admin> use test_2\\nrs_mongo [direct: primary] test_2> db.employees.insert({name: \\"vishal\\")\\n\\n//To read queries on secondary run setReadPref. \\nrs_mongo [direct: secondary] test_2>db.getMongo().setReadPref(\'secondary\')\\n\\nrs_mongo [direct: secondary] test_2> db.employees.find()\\n[\\n { _id: ObjectId(\\"613c99801ea796508e3c73f5\\"), name: \'vishal\' }\\n]\\n\\n```\\n\\n* Validate Replica Set Configuration\\n\\n```js\\nrs_mongo [direct: primary] test_2> db.printReplicationInfo()\\n\\nconfigured oplog size\\n\'557174 MB\'\\n---\\nlog length start to end\\n\'71372 secs (19.83 hrs)\'\\n---\\noplog first event time\\n\'Sat Sep 11 2021 15:47:21 GMT+0530 (India Standard Time)\'\\n---\\noplog last event time\\n\'Sun Sep 12 2021 11:36:53 GMT+0530 (India Standard Time)\'\\n---\\nnow\\n\'Sun Sep 12 2021 11:36:54 GMT+0530 (India Standard Time)\'\\n\\n\\nrs_mongo [direct: primary] test_2> rs.conf()\\n{\\n _id: \'rs_mongo\',\\n version: 1,\\n term: 1,\\n protocolVersion: Long(\\"1\\"),\\n writeConcernMajorityJournalDefault: true,\\n members: [\\n {\\n _id: 0,\\n host: \'mongo1:27017\',\\n arbiterOnly: false,\\n buildIndexes: true,\\n hidden: false,\\n priority: 1,\\n tags: {},\\n slaveDelay: Long(\\"0\\"),\\n votes: 1\\n },\\n {\\n _id: 1,\\n host: \'mongo2:27017\',\\n arbiterOnly: false,\\n buildIndexes: true,\\n hidden: false,\\n priority: 1,\\n tags: {},\\n slaveDelay: Long(\\"0\\"),\\n votes: 1\\n },\\n {\\n _id: 2,\\n host: \'mongo3:27017\',\\n arbiterOnly: false,\\n buildIndexes: true,\\n hidden: false,\\n priority: 1,\\n tags: {},\\n slaveDelay: Long(\\"0\\"),\\n votes: 1\\n }\\n\\n```\\nThat concludes this article."}]}')}}]); \ No newline at end of file diff --git a/assets/js/2e801cce.eb291cdc.js b/assets/js/2e801cce.eb291cdc.js deleted file mode 100644 index c133ae2..0000000 --- a/assets/js/2e801cce.eb291cdc.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[9450],{6029:e=>{e.exports=JSON.parse('{"blogPosts":[{"id":"fav-open-source-repo","metadata":{"permalink":"/fav-open-source-repo","source":"@site/blog/2022-12-30-fav-open-source-repo.md","title":"My favorite Open Source Projects in 2022","description":"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022.","date":"2022-12-30T00:00:00.000Z","formattedDate":"December 30, 2022","tags":[{"label":"open-source","permalink":"/tags/open-source"},{"label":"2022","permalink":"/tags/2022"},{"label":"favourite-repo","permalink":"/tags/favourite-repo"}],"readingTime":6.49,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"fav-open-source-repo","title":"My favorite Open Source Projects in 2022","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["open-source","2022","favourite-repo"]},"nextItem":{"title":"Documentation as a code","permalink":"/doc-as-code"}},"content":"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022.\xa0 \\n\\n\x3c!--truncate--\x3e\\n\\n|Category|Repository|Site URL|Description|\\n| --- | --- | --- | -- |\\n| **Browser** | [**LibreWolf**](https://gitlab.com/librewolf-community) | https://librewolf.net/ | LibreWolf is a privacy-oriented web browser based on Firefox | \\n| **Browser** | [**Brave**](https://github.com/brave/brave-browser) | https://brave.com/ | Brave is a privacy-oriented web browser based on Chromium |\\n|**Task Runner** | [**go-task**](https://github.com/go-task/task) | https://taskfile.dev/ | Task is a task runner / simpler Make alternative written in Go |\\n| **Data Analysis** | [**Pandas**](https://github.com/pandas-dev/pandas) | https://pandas.pydata.org/ | Pandas is a data analysis library for Python |\\n| **Object Storage** | [**Minio**](https://github.com/minio/minio) | https://min.io/ | Minio is an object storage server that is compatible with Amazon S3 |\\n| **Terminal** | [**Tabby**](https://github.com/Eugeny/tabby) | https://tabby.sh/ | A Terminal for the modern age |\\n| **Terminal** | [**Tmux**](https://github.com/tmux/tmux) | https://github.com/tmux/tmux | tmux is a terminal multiplexer: it enables a number of terminals to be created, accessed, and controlled from a single screen. tmux may be detached from a screen and continue running in the background, then later reattached.\\n| **Terminal** | [**Tmuxinator**](https://github.com/tmuxinator/tmuxinator) | https://github.com/tmuxinator/tmuxinator | Tmuxinator is a tool for managing complex tmux sessions easily. |\\n| **Code Editor** | [**Visual Studio Code**](http://https://github.com/microsoft/vscode) | https://code.visualstudio.com/ | Visual Studio Code is a code editor redefined and optimized for building and debugging modern web and cloud applications. Built on top of [Electron](https://github.com/electron/electron) |\\n|**Code Editor** | [**neovim**](https://github.com/neovim/neovim) | https://neovim.io/ | Neovim is a text editor based on Vim. Last few months, i have been using `neovim` more often compared to `Visual Studio Code` |\\n|**Productivity - Note Taking** | [**Dendron**](https://github.com/dendronhq/dendron) | https://dendron.so/ | Dendron is my goto note taking tool. Its available as a plugin for VSCode and allows hirearchy and graph based note taking. Awesome solution to mantain a `second-brain` |\\n|**Productivity - Bookmark Manager** | [**Buku**](https://github.com/jarun/buku) | https://github.com/jarun/buku#quickstart | Buku is a command-line bookmark manager. |\\n|**Private Network VPN** | [**Tailscale**](https://github.com/tailscale/tailscale) | https://tailscale.com/ | Tailscale is a private network VPN. Helps me create a private network for all my home lab machines and devices |\\n|**Tunnelling** | [**ngrok**](https://github.com/inconshreveable/ngrok) | https://ngrok.com/ | ngrok exposes local servers behind NATs and firewalls to the public internet over secure tunnels. | \\n|**Tunnelling** | [**localtunnel**](https://github.com/localtunnel/localtunnel) | https://localtunnel.github.io/www/ | localtunnel exposes your localhost to the world for easy testing and sharing! No need to mess with DNS or deploy just to have others test out your changes. |\\n|**Containers** | [**Podman**](https://github.com/containers/podman) | https://podman.io/ | Podman is a daemonless, open source, Linux native tool designed to make it easy to find, run, build, share and deploy applications using Open Containers Initiative (OCI) Containers and Container Images. |\\n|**Container Scheduling and Management** | [**Kubernetes**](https://github.com/kubernetes/kubernetes) | https://kubernetes.io/ | Kubernetes is an open-source system for automating deployment, scaling, and management of containerized applications. |\\n|**Static Site Generator** | [**Docusaurus**](https://github.com/facebook/docusaurus) | https://docusaurus.io/ | Docusaurus is a static site generator written in JavaScript. Build optimized websites quickly, focus on your content. |\\n|**Static Site Generator** | [**Hugo**](https://github.com/gohugoio/hugo) | https://gohugo.io/ | Hugo is a static site generator written in `Go`.|\\n| **Diagram as a Code** | [**Mermaid**](https://github.com/mermaid-js/mermaid) | https://mermaid.js.org/ | Generation of diagrams like flowcharts or sequence diagrams from text in a similar manner as markdown |\\n| **Diagram as a Code** | [**PlantUML**](https://github.com/plantuml/plantuml) | https://plantuml.com/ | Generate diagrams from textual description \\n| **Diagram as a Code** | [**Draw.io**](https://github.com/jgraph/drawio) | https://app.diagrams.net/ | draw.io, this project, is a configurable diagramming/whiteboarding visualization application. draw.io is jointly owned and developed by JGraph Ltd and draw.io AG. |\\n| **Diagram as a Code** | [**Excalidraw**](https://github.com/excalidraw/excalidraw) | https://excalidraw.com/ | Excalidraw is a free software that offers a whiteboard tool that lets you easily sketch diagrams with a hand-drawn feel. Another features are the collaborative mode, and the ability to export the diagrams to PNG or SVG formats, and to save them locally in a JSON format |\\n| **Diagram as a Code** | [**mingrammer**](https://github.com/mingrammer/diagrams) | https://diagrams.mingrammer.com/ | Diagrams as code for prototyping cloud system architecture. |\\n|**Web Framework** | [**Gin**](https://github.com/gin-gonic/gin) | https://gin-gonic.com/ | Gin is a HTTP web framework written in Go (Golang). It features a Martini-like API, but with performance up to 40 times faster than Martini. If you need smashing performance, get yourself some Gin.|\\n|**Web Framework** | [**FAST API**](https://github.com/tiangolo/fastapi) | https://fastapi.tiangolo.com/ | FastAPI is a Web framework for developing RESTful APIs in Python. FastAPI is based on Pydantic and type hints to validate, serialize, and deserialize data, and automatically auto-generate OpenAPI documents. It fully supports asynchronous programming and can run with Gunicorn and ASGI servers for production such as Uvicorn and Hypercorn. To improve developer-friendliness, editor support was considered since the earliest days of the project. |\\n|**Web Framework** | [**Astro**](https://github.com/withastro/astro) | https://astro.build/ | Astro works with your favorite content sources. Pull content from the filesystem or fetch it remotely from your favorite CMS, database, or API. Astro supports both static output\xa0(SSG) and live server output\xa0(SSR) that can render your content on-demand. |\\n|**Search Engine** | [**Typesense**](https://github.com/typesense/typesense) | https://typesense.org/ | Typesense is a modern, privacy-friendly, open source search engine built from the ground up using cutting-edge search algorithms, that take advantage of the latest advances in hardware capabilities. |\\n| **Nocode Platform** | [**NOCODB**](https://github.com/nocodb/nocodb) | https://nocodb.com/ | NocoDB is an open-source low-code platform for building and managing internal tools and turning your SQL Databases into a smart spreadsheet. It is a self-hosted alternative to Airtable, Notion, and Airtable. |\\n| **Distributed Database** | [**rqlite**](https://github.com/rqlite/rqlite) | https://rqlite.com/ | rqlite is an easy-to-use, lightweight, distributed relational database, which uses SQLite as its storage engine. rqlite is simple to deploy, operating it is very straightforward, and its clustering capabilities provide you with fault-tolerance and high-availability. \\n|**Multi-modal Database** | [**SurrealDB**](https://github.com/surrealdb/surrealdb) | https://surrealdb.com/ | SurrealDB combines the database layer, the querying layer, and the API and authentication layer into one platform. Advanced table-based and row-based customisable access permissions allow for granular data access patterns for different types of users. There\'s no need for custom backend code and security rules with complicated database development. |\\n|**Multi-modal Database** | [**ArangoDB**](https://github.com/arangodb/arangodb) | https://www.arangodb.com/ | ArangoDB is a free and open-source native graph database system developed by ArangoDB Inc. ArangoDB is a multi-model database system since it supports three data models with one database core and a unified query language AQL. AQL is mainly a declarative language and allows the combination of different data access patterns in a single query. |\\n| **Git for Data** | [**Dolt**](https://github.com/dolthub/dolt) | https://dolthub.com/ | Dolt is a SQL database that you can fork, clone, branch, merge, push and pull just like a Git repository. Connect to Dolt just like any MySQL database to run queries or update the data using SQL commands. Use the command line interface to import CSV files, commit your changes, push them to a remote, or merge your teammate\'s changes.|\\n|**Personal Finance** | [**Firefly III**](https://github.com/firefly-iii/firefly-iii) | https://firefly-iii.org/ | \\"Firefly III\\" is a (self-hosted) manager for your personal finances. It can help you keep track of your expenses and income, so you can spend less and save more. Firefly III supports the use of budgets, categories and tags. Using a bunch of external tools, you can import data. It also has many neat financial reports available. |\\n| **Monitoring and TSDB** | [Prometheus](https://github.com/prometheus/prometheus) | https://prometheus.io/ | Prometheus is a systems and service monitoring system. It collects metrics from configured targets at given intervals, evaluates rule expressions, displays the results, and can trigger alerts if some condition is observed to be true. |"},{"id":"doc-as-code","metadata":{"permalink":"/doc-as-code","source":"@site/blog/2022-10-22-doc-as-code.md","title":"Documentation as a code","description":"Documentation is the most critical activity of any product development. The engineer and user experience improve when there is up-to-date Documentation. Most often, in organizations and products, Documentation is an afterthought, and this is not a good practice. If we want more engineers to contribute to the product, Documentation should be considered as code and part of the product development. Engineers should be encouraged to write Documentation before writing the source code.","date":"2022-10-22T00:00:00.000Z","formattedDate":"October 22, 2022","tags":[{"label":"docops","permalink":"/tags/docops"},{"label":"doc-as-code","permalink":"/tags/doc-as-code"}],"readingTime":4.225,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"doc-as-code","title":"Documentation as a code","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["docops","doc-as-code"]},"prevItem":{"title":"My favorite Open Source Projects in 2022","permalink":"/fav-open-source-repo"},"nextItem":{"title":"Understanding REST API Design Rules","permalink":"/rest-api-design-rules"}},"content":"Documentation is the most critical activity of any product development. The engineer and user experience improve when there is up-to-date Documentation. Most often, in organizations and products, Documentation is an afterthought, and this is not a good practice. If we want more engineers to contribute to the product, Documentation should be considered as code and part of the product development. Engineers should be encouraged to write Documentation before writing the source code.\\n\\n\x3c!--truncate--\x3e\\n\\n In many organizations, Documentation is everywhere, but it can be challenging to find. It is often written in various formats, and it is sometimes unclear who is responsible for it. It also needs to be clarified how to contribute to it. Confidence in Documentation could be higher if engineers spent more time writing; there is more incentive to write, and setting up a culture to write docs as part of engineering workflow contributes to Engineer Productivity which is a crucial metric for any organization.\\n\\nThe product engineering teams must identify workflows to integrate Documentation into the existing process to solve the challenges listed below.\\n* The Documentation is not part of the codebase\\n* The Documentation is not part of the CI/CD pipeline\\n* The method of writing Documentation is not integrated into the engineering workflow\\n* The Documentation is not reviewed and tested\\n* The Documentation is written in a separate tool and is not version controlled\\n\\nDocumentation will never be part of engineering culture unless integrated into the codebase and workflow.\\n\\n## What is Docs as Code?\\n\\n* Store the source file version of Documentation in a version control system like Git\\n* Automatically build doc artifacts \\n* Publish artifacts without human intervention\\n\\n## Why Docs as Code?\\n\\n* The Documentation evolves with the code. The flowchart, System Architecture and other diagrams will be up-to-date as the code changes\\n\\n* Long release cycles may result in logic or flowchart being forgotten or outdated\\n\\n* Consistency is critical for the adoption of Docs as a code. Teams can collaborate on the Documentation and can ensure that the Documentation is consistent across the product\\n\\n* Collaboration across product teams is the critical piece of why Documentation should be considered a code\\n\\n* Documentation can be reviewed and approved by the team members\\n\\n* Centralized Internal Documentation framework and familiar structured Documentation for all the products \\n\\n* Track Documentation mistakes as bugs\\n\\n* Documentation can be versioned, tested, and tracked\\n\\n* Manage the complexity around the documentation process\\n\\n* Visualize the Documentation in the form of diagrams, flowcharts, and images\\n\\n* Engineer can use other tools to model dependencies. For example, the Product team can use Mermaid to model the flowchart, system architecture, class diagram, and sequence diagrams\\n\\n* Avoid effort to redo the Documentation when a team member leaves the organization. \\n\\n* The product team can automate Workflows can be automated to generate the Documentation\\n \\n* Makes Documentation standout with [Markdown](https://www.markdownguide.org/)\\n\\n :::info\\n Markdown is a simple, lightweight markup language that is easy to learn and use for plain text formatting and conversion to HTML and many other formats using a tool. Markdown is often used to format readme files, write messages in online discussion forums, and create rich text using a plain text editor.\\n :::\\n\\n\\n```mermaid\\n\\nflowchart\\n\\nA[Start] --\x3e B[Engineer writes Documentation and Code]\\n B --\x3eC[Engineer Commits Documentation and Code]\\n C --\x3eD[Code Review and Testing]\\n D --\x3eE[Documentation Review and Testing]\\n E --\x3eF{Release}\\n F --\x3e|Yes|G[Documentation is published]\\n F --\x3e|No|B\\n G --\x3eH[End]\\n```\\n \\n## Types of Documentation \\n\\nThe most common types of Documentation for every product are:\\n\\n- Long-form \\n - FAQs, User Guides, Tutorials, How-to Guides, etc.\\n\\n- Functional \\n - REST API Documentation, SDK Documentation, etc.\\n\\n## How to do Docs as a Code?\\n\\n* Version your Documentation. Just as you version your code, you should version your Documentation. Versioning allows tracking changes and rollbacks to previous versions if necessary.\\n* Integrate Documentation with CI/CD pipeline. CI/CD Integration will allow you to automate the process of generating Documentation and publishing it to a central location\\n* Start with Proof of Concept and extend to all the products gradually \\n* Choose a static site generator (Documentation Tool) that can be integrated with the CI/CD pipeline\\n\\n## Docs As Code Tools \\n\\n- Static Site Generators \\n They are used for Long form documentation. Allows integration of diagrams, flowcharts, images, etc.\\n\\n - [Docusaurus](http://docusaurus.io), [Hugo](https://gohugo.io), [Gatsby](https://www.gatsbyjs.com), [Jekyll](https://jekyllrb.com), [MkDocs](https://www.mkdocs.org) etc.\\n\\n- Diagram as a code\\n \\n Allows creating diagrams, flowcharts, etc., in a code format. Think of documenting and visualizing a complex system architecture in a code format.\\n\\n * [Mermaid](https://mermaid-js.github.io/mermaid/#/), [PlantUML](https://plantuml.com/), [Graphviz](https://graphviz.org/), [Draw.io](https://www.draw.io/), [mingrammer/Diagrams](https://diagrams.mingrammer.com)\\n- Source code-based document generators \\n * [Sphinx](https://www.sphinx-doc.org/en/master/)\\n- System documentation generators\\n * [ronn](https://github.com/rtomayko/ronn)\\n\\n\\n## Final Thoughts\\n\\n\\nEverything(Infrastructure, Monitoring, Code, Containers, Documentation) as a code is already a reality. For some organizations, the shift to treating Documentation as a code is a complex overhaul of expectations, attitudes, processes, and toolsets. Once implemented, it will vastly improve the engineer and user experience. For open-source projects, it is even more essential to have good Documentation. It is a great way to attract new contributors and users. \\n\\n\\n## References\\n\\n- [DocOps](https://www.writethedocs.org/guide/doc-ops/#what-is-docops-anyway)"},{"id":"rest-api-design-rules","metadata":{"permalink":"/rest-api-design-rules","source":"@site/blog/2022-10-02-rest-api-design-rules.md","title":"Understanding REST API Design Rules","description":"Introduction to REST API","date":"2022-10-02T00:00:00.000Z","formattedDate":"October 2, 2022","tags":[{"label":"rest","permalink":"/tags/rest"},{"label":"restapi","permalink":"/tags/restapi"},{"label":"rest-api-design-rules","permalink":"/tags/rest-api-design-rules"}],"readingTime":10.195,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"rest-api-design-rules","title":"Understanding REST API Design Rules","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["rest","restapi","rest-api-design-rules"]},"prevItem":{"title":"Documentation as a code","permalink":"/doc-as-code"},"nextItem":{"title":"Understanding MongoDB Replicasets and Write Concern - Part 1","permalink":"/mongodb-replicaset-write-concern-read-pref"}},"content":"## Introduction to REST API \\n\\nREST (Representational State Transfer) is an architectural style for building distributed systems. A Web API conforms to the REST architectural style, called RESTful Web API. \\n\\nREST APIs are stateless, client-server, cacheable, layered systems designed around resources. The set of resources is known as the REST API\'s _resource model_\\n\\n\x3c!--truncate--\x3e\\n\\n```mermaid\\n\\nsequenceDiagram\\n\\nparticipant Client\\nparticipant Web API \\nParticipant Web Service \\nClient->>Web API: Request\\nWeb API->>Web Service: Request\\nWeb Service->>Web API: Response\\nWeb API->>Client: Response\\n\\n```\\n\\nREST APIs are one of the most common and fundamental ways to expose data and functionality as web services. REST APIs use HTTP requests to GET, PUT, POST, and DELETE data. \\n\\nAn adequately designed REST API should be easy to understand, use, and evolve over time. It will help clients and browser apps consume the API more efficiently. \\n\\nBefore designing and developing a REST API, we need to seek answers to the following questions:\\n\\n- What are URI Paths? Structure of URI Path segments? \\n- When to use plural nouns or verbs for URI Path segments?\\n- What is the HTTP response status code, and how to use it in a specific scenario? \\n- How to map non-CRUD operations to HTTP methods? \\n\\n## Understanding Uniform Resource Identifier (URI)\\n\\nREST APIs use Uniform Resource Identifiers (URIs) to identify resources. A resource is any information that can be named. Resources are separated by forward slashes (/). A good URI should be short, easy to remember, and should give the user an idea about the resource.\\n\\n### URI Format\\n\\nThe URI format is as follows:\\n\\nURI = scheme \\"://\\" host [ \\":\\" port ] [ \\"/\\" path ] [ \\"?\\" query ] [ \\"#\\" fragment ]\\n\\n```http request\\nhttp:////\\n```\\n\\n### URI Resource Model \\n\\nHeader|Description\\n------|-----------\\nDocument | A document resource is similar to database record or instance of an object. It is a single resource that can be retrieved, created, updated, or deleted.
For example, information about a blog author is a document resource.
`http://api.blog.com/authors/vishal-gandhi`\\nCollection | A collection resource is a server-managed directory of resources.
For example, a list of blog authors is a collection resource.
`http://api.blog.com/authors`\\nStore | A store is a repository which is managed by client. Using store resource client can create, update, delete and retrieve documents.
`http://api.blog.com/store/authors/vishal-gandhi`\\nController | A controller resource models a procedure concept. It is a resource that represents a procedure that can be invoked. A controller resource is a collection resource that supports the POST method. The POST method is used to invoke the controller resource. The controller resource can be used to model a procedure that can be invoked. For example, the following URI models a controller resource that represents a procedure that can be invoked to send an email:
```POST /api.blog.com/email/email/send```
``` {Collection}/{Store}/{Document}/{Controller} ```\\n\\n## REST API Design Rules \\n\\n### URI \\n\\n- **Rule : Forward Slash (/) is used to separate resources in the URI and indicate a hierarchical relationship**\\n\\nA trailing forward slash (/) is not required as the last character of a URI. Many web servers automatically redirect requests with a trailing forward slash to the same URI without the trailing forward slash.\\n\\n\\n- **Rule : Use plural nouns for URI Path segments that represent collections or resources**\\n\\n- **Rule : Use HTTP Methods to Perform Operations on Resources**\\n\\nHTTP methods are used to perform operations on resources. The following table lists the HTTP methods and their corresponding operations:\\n\\n| HTTP Method | Operation |\\n| ----------- | --------- |\\n| GET | Retrieve a resource |\\n| POST | Create a resource |\\n| PUT | Update a resource |\\n| DELETE | Delete a resource |\\n| PATCH | Update a resource with Partial data |\\n\\n\\n```JS\\nconst express = require(\'express\');\\nconst bodyParser = require(\'body-parser\');\\nconst app = express();\\nconst port = 3000;\\n\\napp.use(bodyParser.json());\\n\\napp.get(\'/authors\', (req, res) => {\\n\\nres.send(\'Authors List\'); \\n\\n//get author list from Sql lite backend \\n\\nres.json(authors);\\n\\n});\\n \\napp.post(\'/authors\', (req, res) => {\\n \\n res.send(\'Add Author\');\\n\\n //add author to Sql lite backend\\n\\n res.json(author);\\n\\n});\\n\\n\\n//update an author\\n\\napp.put(\'/authors/:id\', (req, res) => {\\n\\n res.send(\'Update Author\');\\n\\n res.json(author);\\n\\n});\\n\\n//delete an author\\n\\napp.delete(\'/authors/:id\', (req, res) => {\\n\\n res.send(\'Delete Author\');\\n\\n res.json(author);\\n\\n});\\n\\napp.patch(\'/authors/:id\', (req, res) => {\\n\\n res.send(\'Update Author Email\');\\n\\n res.json(author);\\n\\n}); \\n\\n\\napp.listen(port, () => {\\n console.log(`Blog Example app listening at http://localhost:${port}`);\\n});\\n```\\n\\n- **Rule : Hyphen (-) is used to separate words in URI Path**\\n\\nHyphens (-) are used to separate words in URI path. For example, the URI path for a resource named _user-profile_ is _/user-profile_.\\n\\n- **Rule : Underscore (_) is not used in URI**\\n\\nUnderscores (_) are not used in URI path due to text editors and browsers depending on the font hide the underscore by underlining the text.\\n\\n- **Rule : File Extensions are not used in URI**\\n\\nA REST API should not use file extensions in the URI. For example, the URI path for a resource named _user-profile_ is _/user-profile_ and not _/user-profile.json_.\\n\\n- **Rule : If API Provides a developer portal then it should be accessible via a consistent subdomain**\\n\\nIf an API provides a developer portal, then the developer portal should be accessible via a consistent subdomain. For example, the developer portal for the weather API is accessible via _developer.blog.api.com_.\\n\\n- **Rule : Lowercase letters are preferred in URI**\\n\\nLowercase letters are preferred in URI. For example, the URI path for a resource named _user-profile_ is _/user-profile_ and not _/User-Profile_.\\n\\n- **Rule: Use a Verb or verb phrase for Controller Names**\\n\\n```http request\\nPOST /api.blog.com/email/email/send\\n```\\n\\n- **Rule: CRUD function names should not be used in the URI**\\n\\nThe following table lists the CRUD functions and their corresponding HTTP methods:\\n\\n| CRUD Function | HTTP Method |\\n| ------------- | ----------- |\\n| Create | POST |\\n| Read | GET |\\n| Update | PUT |\\n| Delete | DELETE |\\n\\ne.g. Preferred API Interface\\n\\n```http request\\nPUT /api.blog.com/authors/vishal-gandhi\\n```\\n\\nAnti pattern \\n\\n```http request\\nDELETE /deleteusers/abc/\\n```\\n\\n- **Rule: New URIs should be introduced new concepts**\\n\\nA REST API should introduce new URIs for new concepts. For example, the following table lists the URIs for a user resource:\\n\\n| URI | Description |\\n| --- | ----------- |\\n| /authors | Returns a list of authors |\\n| /authors/vishalgandhi | Returns the author details |\\n| /authors/vishalgandhi/books | Returns a list of articles written by the author \\n\\n- **Rule: JSON should be well formed and supported for resource representation**\\n\\n- **Rule: Add Versioning at the start of the URI**\\n\\n```http request\\n\\nhttp://api.blog.com/v1/authors/vishal-gandhi\\n\\n```\\n\\n\\n### HTTP Methods\\n\\n- **Rule: GET must be used to retrieve representation of a resource**\\n\\n- **Rule: Head must be used to retrieve metadata of a resource and response headers**\\n\\n- **Rule: PUT must be used to both insert and update a resource**\\n\\n- **Rule: POST must be used to create a resource**\\n\\n- **Rule: POST must be used to execute a controller**\\n\\n- **Rule: DELETE must be used to delete a resource**\\n\\n- **Rule: OPTIONS must be used to retrieve supported HTTP methods**\\n\\n- **Rule : Use HTTP Status Codes to Indicate Response Status**\\n\\n\\n\\nHTTP status codes are used to indicate the response status of an HTTP request. The following table lists the HTTP status codes and their corresponding meanings:\\n\\n| HTTP Status Code | Meaning | Information |\\n| ---------------- | ------- | ----------- |\\n100 | 100 and above are information | 100 and above are for \\"Information\\". You rarely use them directly. Responses with these status codes cannot have a body.\\n| 200 OK | The request was successful | 200 and above are for \\"Successful\\" responses. These are the ones you would use the most. 200 is the default status code for a successful response.\\n| 201 Created | The request was successful and a resource was created | 201 is \\"Created\\". This is used when a new resource is created. The response will contain a Location header with the URI of the new resource.\\n| 204 No Content | The request was successful but there is no representation to return | A special case is 204, \\"No Content\\". This response is used when there is no content to return to the client, and so the response must not have a body.\\n| 300 Multiple Choices | The requested resource corresponds to any one of a set of representations, each with its own specific location | 300 and above are for \\"Redirection\\". These are used when the client needs to take some additional action in order to complete the request. For example, if you request a resource that has been moved to a different location, the response will be 301, \\"Moved Permanently\\", and the response will contain a Location header with the new location of the resource. The client can then make a new request to that location.\\n| 400 Bad Request | The request could not be understood by the server | 400 and above are for \\"Client Error\\" responses. These are used when the client has made a mistake in its request. For example, if you request a resource that doesn\'t exist, the response will be 404, \\"Not Found\\". \\n| 401 Unauthorized | The request requires user authentication | 401 is \\"Unauthorized\\". This is used when the client needs to authenticate itself to get the requested response.\\n| 403 Forbidden | The server understood the request, but is refusing to fulfill it | 403 is \\"Forbidden\\". This is used when the client is not allowed to access the resource. For example, if you try to access a resource that you don\'t have permission to access, the response will be 403, \\"Forbidden\\".\\n| 404 Not Found | The server has not found anything matching the Request-URI | 404 is \\"Not Found\\". This is used when the client requests a resource that doesn\'t exist. For example, if you request a resource that doesn\'t exist, the response will be 404, \\"Not Found\\".\\n| 405 Method Not Allowed | The method specified in the Request-Line is not allowed for the resource identified by the Request-URI | 405 is \\"Method Not Allowed\\". This is used when the client requests a resource using a method that isn\'t allowed. For example, if you try to access a resource using the POST method, but the resource only supports the GET method, the response will be 405, \\"Method Not Allowed\\".\\n| 500 Internal Server Error | The server encountered an unexpected condition which prevented it from fulfilling the request | 500 and above are for \\"Server Error\\" responses. These are used when the server encounters an error while fulfilling the request. For example, if the server runs out of memory while fulfilling the request, the response will be 500, \\"Internal Server Error\\".\\n\\nThe approaches and best practices of REST API outlined in this blog article will help anyone follow consistent guidelines for designing and developing REST APIs. \\n\\n## References\\n\\n- [Roy Fielding\'s Dissertation](https://www.ics.uci.edu/~fielding/pubs/dissertation/rest_arch_style.htm)\\n- [What is REST](https://restfulapi.net/)\\n- [REST API Design Rulebook](https://www.amazon.in/REST-API-Design-Rulebook-Consistent-ebook/dp/B005XE5A7Q/ref=sr_1_1?keywords=rest+api+design+rulebook&qid=1665926194&qu=eyJxc2MiOiIwLjQzIiwicXNhIjoiMC41NCIsInFzcCI6IjAuMDAifQ%3D%3D&sprefix=REST+API+D%2Caps%2C194&sr=8-1)\\n- [Hands-on RESTful API Design Patterns](https://www.amazon.in/Hands-RESTful-Design-Patterns-Practices-ebook/dp/B07BJL399D/ref=sr_1_2?keywords=rest+api+design+rulebook&qid=1665926194&qu=eyJxc2MiOiIwLjQzIiwicXNhIjoiMC41NCIsInFzcCI6IjAuMDAifQ%3D%3D&sprefix=REST+API+D%2Caps%2C194&sr=8-2)\\n\\n\\n\\n\\n\x3c!-- Change HTTP Codes to below \\n\\n100 and above are for \\"Information\\". You rarely use them directly. Responses with these status codes cannot have a body.\\n200 and above are for \\"Successful\\" responses. These are the ones you would use the most.\\n200 is the default status code, which means everything was \\"OK\\".\\nAnother example would be 201, \\"Created\\". It is commonly used after creating a new record in the database.\\nA special case is 204, \\"No Content\\". This response is used when there is no content to return to the client, and so the response must not have a body.\\n300 and above are for \\"Redirection\\". Responses with these status codes may or may not have a body, except for 304, \\"Not Modified\\", which must not have one.\\n400 and above are for \\"Client error\\" responses. These are the second type you would probably use the most.\\nAn example is 404, for a \\"Not Found\\" response.\\nFor generic errors from the client, you can just use 400.\\n500 and above are for server errors. You almost never use them directly. When something goes wrong at some part in your application code, or server, it will automatically return one of these status codes. --\x3e"},{"id":"mongodb-replicaset-write-concern-read-pref","metadata":{"permalink":"/mongodb-replicaset-write-concern-read-pref","source":"@site/blog/2022-08-21-mongodb-replicaset-write-concern-read-pref.md","title":"Understanding MongoDB Replicasets and Write Concern - Part 1","description":"Introducing Replicasets","date":"2022-08-21T00:00:00.000Z","formattedDate":"August 21, 2022","tags":[{"label":"mongodb","permalink":"/tags/mongodb"},{"label":"replicaset","permalink":"/tags/replicaset"},{"label":"write-concern","permalink":"/tags/write-concern"}],"readingTime":6.25,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"mongodb-replicaset-write-concern-read-pref","title":"Understanding MongoDB Replicasets and Write Concern - Part 1","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["mongodb","replicaset","write-concern"]},"prevItem":{"title":"Understanding REST API Design Rules","permalink":"/rest-api-design-rules"},"nextItem":{"title":"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI","permalink":"/docker-sbom"}},"content":"## Introducing Replicasets\\n\\nThe way to achieve fault tolerance in MongoDB is through the use of `replica sets`. \\n\\n```mermaid\\nstateDiagram-v2\\n [*] --\x3e Application\\n direction LR\\n state Application\\n Application --\x3e replicaset \\n state replicaset\\n {\\n direction RL\\n Primary:primary\\n Secondary1:secondary \\n Secondary2:secondary\\n Secondary1--\x3ePrimary : Fetch Oplog\\n Secondary2--\x3ePrimary : Fetch Oplog\\n \\n }\\n```\\n\\nTwo or more `secondary` nodes along with a `primary` node forms a replica set. Application makes all the read/write calls to the primary node which propagate all the write requests synchronously or asynchronously to the secondary nodes. \\n\\nThe Secondary nodes fetches the data via Oplog pull from Primary or other nodes. \\n\\n\x3c!--truncate--\x3e\\n\\nThe Primary node is responsible for all the writes and reads. The secondary nodes can be utilized for reads via [`setSecondaryOk`](https://docs.mongodb.com/manual/reference/method/Mongo.setSecondaryOk/) or [`readPreference`](https://docs.mongodb.com/manual/reference/read-preference/). \\n\\n## Understanding Oplog\\n\\nWhen the application performs a write, the primary node applies the write to the database like a standalone. \\n\\nThe difference between Replicaset write and standalone write is that replica set nodes have an `OpObserver` that inserts a document to the **oplog** whenever a write to the database happens, describing the write. The **oplog** is a capped collection called `oplog.rs` in the `local` database. \\n\\nFor every operation performed in a write, the primary node inserts a document into the oplog. The oplog is a capped collection, which means that it has a maximum size. When the oplog reaches its maximum size, MongoDB removes the oldest entries to make room for new entries. \\n\\nFor a write which performs create collection and insert, there are two oplog entries created one for `create` collection and another for `insert`.\\n\\n```cpp\\n// mongod_main.cpp\\nsetUpObservers(service);\\n```\\n\\n```cpp\\n\\n//op_observer_registry.h\\nvoid onCreateCollection(OperationContext* const opCtx,\\n const CollectionPtr& coll,\\n const NamespaceString& collectionName,\\n const CollectionOptions& options,\\n const BSONObj& idIndex,\\n const OplogSlot& createOpTime,\\n bool fromMigrate) override {\\n ReservedTimes times{opCtx};\\n for (auto& o : _observers)\\n o->onCreateCollection(\\n opCtx, coll, collectionName, options, idIndex, createOpTime, fromMigrate);\\n }\\n\\n using OpObserver::onInserts;\\n void onInserts(OperationContext* const opCtx,\\n const NamespaceString& nss,\\n const UUID& uuid,\\n std::vector::const_iterator begin,\\n std::vector::const_iterator end,\\n bool fromMigrate) override {\\n ReservedTimes times{opCtx};\\n for (auto& o : _observers)\\n o->onInserts(opCtx, nss, uuid, begin, end, fromMigrate);\\n }\\n```\\n\\n## Understanding Write Concern\\n\\nWrite concern is a way to ensure that the write operations are propagated to the secondary nodes.\\n\\n### Default Write Concern\\n\\nIf a write operation does not explicitly specify a write concern, the server will use a default\\nwrite concern. \\n\\nThis default write concern will be defined by either the\\n\\n- **Cluster-Wide write concern**, explicitly set by the user \\nor\\n- **Implicit Default write concern**, implicitly set by the server based on replica set configuration.\\n\\n#### Cluster-Wide Write Concern\\n\\nThe cluster-wide write concern is set by the user using the [`setDefaultRWConcern`](https://docs.mongodb.com/manual/reference/command/setDefaultRWConcern/) command. Setting the cluster-wide write concern will cause the implicit default write concern not to take effect.\\n\\nOn a sharded cluster, the cluster-wide write concern is set on the config server. On a replica set, the cluster-wide write concern is set on the primary node. The below code snippets shows how the cluster-wide write concern is set on the primary node and stored on the config node. \\n\\n```javascript\\ndb.adminCommand(\\n {\\n setDefaultRWConcern : 1,\\n defaultReadConcern: { },\\n defaultWriteConcern: { },\\n writeConcern: { },\\n comment: \\n }\\n)\\n```\\n\\n```cpp\\n//cluster_rwc_defaults_commands.cpp \\nclass ClusterSetDefaultRWConcernCommand : public BasicCommand {\\npublic:\\n ClusterSetDefaultRWConcernCommand() : BasicCommand(\\"setDefaultRWConcern\\") {}\\n\\n bool run(OperationContext* opCtx,\\n const DatabaseName&,\\n const BSONObj& cmdObj,\\n BSONObjBuilder& result) override {\\n auto configShard = Grid::get(opCtx)->shardRegistry()->getConfigShard();\\n auto cmdResponse = uassertStatusOK(configShard->runCommandWithFixedRetryAttempts(\\n opCtx,\\n ReadPreferenceSetting(ReadPreference::PrimaryOnly),\\n NamespaceString::kAdminDb.toString(),\\n CommandHelpers::appendMajorityWriteConcern(\\n CommandHelpers::filterCommandRequestForPassthrough(cmdObj),\\n opCtx->getWriteConcern()),\\n Shard::RetryPolicy::kNotIdempotent));\\n\\n uassertStatusOK(cmdResponse.commandStatus);\\n uassertStatusOK(cmdResponse.writeConcernStatus);\\n\\n // Quickly pick up the new defaults by setting them in the cache.\\n auto newDefaults = RWConcernDefault::parse(IDLParserContext(\\"ClusterSetDefaultRWConcern\\"),\\n cmdResponse.response);\\n if (auto optWC = newDefaults.getDefaultWriteConcern()) {\\n if (optWC->hasCustomWriteMode()) {\\n LOGV2_WARNING(\\n 6081700,\\n \\"A custom write concern is being set as the default write concern in a sharded \\"\\n \\"cluster. This set is unchecked, but if the custom write concern does not \\"\\n \\"exist on all shards in the cluster, errors will occur upon writes\\",\\n \\"customWriteConcern\\"_attr = stdx::get(optWC->w));\\n }\\n }\\n ReadWriteConcernDefaults::get(opCtx).setDefault(opCtx, std::move(newDefaults));\\n\\n CommandHelpers::filterCommandReplyForPassthrough(cmdResponse.response, &result);\\n return true;\\n }\\n```\\n\\n#### Implicit default write concern\\n\\nThe implicit default write concern is calculated and set on startup by the server based on the replica set configuration. The server will set the implicit default write concern to the following:\\n\\n- If the replica set has a single node, the implicit default write concern is `{ w: 1 }`\\n- For most of the cases the implicit default write concern is `{ w: \\"majority\\" }`\\n\\n##### PSA \\n\\n`implicitDefaultWriteConcern = if ((#arbiters > 0) AND (#non-arbiters <= majority(#voting nodes)) then {w:1} else {w:majority}`\\n\\nImplicit default to a value that the set can satisfy in the event of one data-bearing node\\ngoing down. That is, the number of data-bearing nodes must be strictly greater than the majority\\nof voting nodes for the set to set `{w: \\"majority\\"}`.\\n\\nFor example, if we have a PSA replica set, and the secondary goes down, the primary cannot\\nsuccessfully acknowledge a majority write as the majority for the set is two nodes. However, the\\nprimary will remain primary with the arbiter\'s vote. In this case, the DWCF will have preemptively\\nset the IDWC to `{w: 1}` so the user can still perform writes to the replica set.\\n\\n##### Sharded Cluster \\n\\nFor a sharded cluster, the implicit default write concern is set to `{ w: \\"majority\\" }` if the\\ncluster has a majority of voting nodes. Otherwise, the implicit default write concern is set to\\n`{ w: 1 }`.\\n\\n## Understanding Secondary Nodes Operations \\n\\nThe secondary nodes will choose the node with the highest `lastApplied` timestamp as the** sync source**. The secondary nodes will then **pull** the oplog entries from the sync source and apply them to its own oplog.\\n\\nThe Secondary will also keep its **sync source** uptodate with its progress, this helps primary satisfy the read concern. \\n\\nHere are the high level steps performed to select and probe the sync source\\n\\n1. `TopologyCoordinator` checks if user requested a specific sync source using `replSetSyncFrom` command. If so, it will use that sync source. Otherwise, it will use the sync source from the last successful election.\\n2. Check if **chaining** is disabled. If so, the secondary will always use primary as its sync source \\n\\n```cpp\\n if (chainingPreference == ChainingPreference::kUseConfiguration &&\\n !_rsConfig.isChainingAllowed()) {\\n if (_currentPrimaryIndex == -1) {\\n LOG(1) << \\"Cannot select a sync source because chaining is\\"\\n \\" not allowed and primary is unknown/down\\";\\n _syncSource = HostAndPort();\\n return _syncSource;\\n } else if (_memberIsBlacklisted(*_currentPrimaryMember(), now)) {\\n LOG(1) << \\"Cannot select a sync source because chaining is not allowed and primary \\"\\n \\"member is blacklisted: \\"\\n << _currentPrimaryMember()->getHostAndPort();\\n _syncSource = HostAndPort();\\n return _syncSource;\\n\\n```\\n\\n3. Fetch latest opTime. Do not sync from a node where newest oplog is more than `maxSyncSourceLagSecs`\\n\\n```cpp\\n if (_currentPrimaryIndex != -1) {\\n OpTime primaryOpTime = _memberData.at(_currentPrimaryIndex).getHeartbeatAppliedOpTime();\\n\\n // Check if primaryOpTime is still close to 0 because we haven\'t received\\n // our first heartbeat from a new primary yet.\\n unsigned int maxLag =\\n static_cast(durationCount(_options.maxSyncSourceLagSecs));\\n if (primaryOpTime.getSecs() >= maxLag) {\\n oldestSyncOpTime =\\n OpTime(Timestamp(primaryOpTime.getSecs() - maxLag, 0), primaryOpTime.getTerm());\\n }\\n }\\n```\\n4. Loop through all the nodes and find the closest node which satisfies the condition \\n\\n```cpp\\nHostAndPort TopologyCoordinator::chooseNewSyncSource(Date_t now,\\n const OpTime& lastOpTimeFetched,\\n ChainingPreference chainingPreference) {\\n\\n...\\n...\\n...\\n```\\n\\n### Oplog Fetching \\n\\nThe secondary node will fetch the oplog entries from the sync source to keep its data syncronized. The entire implementation of the oplog fetching is in the `OplogFetcher` class which runs in a separate thread and communicates via a dedicated client connection.\\n\\n```cpp\\n\\nvoid OplogFetcher::setConnection(std::unique_ptr&& _connectedClient) {\\n // Can only call this once, before startup.\\n invariant(!_conn);\\n _conn = std::move(_connectedClient);\\n}\\n\\n```"},{"id":"docker-sbom","metadata":{"permalink":"/docker-sbom","source":"@site/blog/2022-07-09-docker-sbom.md","title":"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI","description":"An Software Bill of Materials (SBoM) is a formal record containing the details and supply chain relationships of various components used in building the software. These components, including libraries and modules, can be proprietary or open source,free or paid and the data can be widely available or access-restricted.","date":"2022-07-09T00:00:00.000Z","formattedDate":"July 9, 2022","tags":[{"label":"docker-desktop","permalink":"/tags/docker-desktop"},{"label":"SBOM","permalink":"/tags/sbom"},{"label":"docker","permalink":"/tags/docker"}],"readingTime":2.895,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"docker-sbom","title":"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["docker-desktop","SBOM","docker"]},"prevItem":{"title":"Understanding MongoDB Replicasets and Write Concern - Part 1","permalink":"/mongodb-replicaset-write-concern-read-pref"},"nextItem":{"title":"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO","permalink":"/percona-mongo-replicaset-minio"}},"content":"An **Software Bill of Materials (SBoM)** is a formal record containing the details and supply chain relationships of various components used in building the software. These components, including libraries and modules, can be proprietary or open source,free or paid and the data can be widely available or access-restricted.\\n\\n\x3c!--truncate--\x3e\\n\\n![](sbom.webp)\\n\\nSBoM is analogous to a list of ingredients on food packaging. In May 2021, the US President released the [Executive Order](https://www.whitehouse.gov/briefing-room/presidential-actions/2021/05/12/executive-order-on-improving-the-nations-cybersecurity/) on improving the Nation\u2019s Cybersecurity. The Software Bill of Materials (SBoM) directly impacts all developers. The SBoM requires third-party software companies to provide customers with the code equivalent of a \u201cnutrition chart.\u201d\\n\\n## When should SBoM be used \u2013 Use cases ?\\n\\n* **Developing products**\\n * Scan vulnerabilities in the components\\n * Keep codebase to bare minimum, reduce the number of dependencies and size\\n * Generate SBoM for end users\\n\\n* **IT Operations**\\n * Understand operational risk\\n * Understand potential exploitations\\n * Real time asset inventory\\n * Software Selection\\n * Identify known vulnerabilities and compliance\\n\\n* **EOL**\\n * Complete visibility to components before evaluation or deploying in production\\n * Understand the software architecture and the dependencies of the software\\n\\n## Why SBOM ?\\n\\n* Requirement from regulatory bodies to track the components used in the software\\n\\n* Transparency of components getting shipped\\n\\n* Container ecosystem has exploded and the need to track the components getting shipped is a must\\n\\n* Software Vulnerabilities are bugs\\n\\n* Detecting and remediating Vulnerabilities\\n\\n## SBOM Formats\\n\\n* **SPDX (Software Package Data Exchange )**\\n * Open standard for communicating software bill of material information, including components, licenses, copyrights and security references. Reduces redundant work by providing a common format for organizations and communities to share and use\\n\\n* **CycloneDX**\\n * Open Web Application Security Project(OWASP) CycloneDX is a lightweight Software Bill of Materials (SBOM) standard designed for use in application security contexts and supply chain component analysis.\\n\\n* **SWID (Software Identification Tags)**\\n * SWID is used primarily to identify installed software and is the preferred format of the NVD. SWID tags are used in the National Vulnerability Database to describe vulnerable components. The CycloneDX specification compliments this work as CycloneDX documents can incorporate SWID tags and other high-level SWID metadata and optionally include entire SWID documents. Use of SWID tag ID\u2019s are useful in determining if a specific component has known vulnerabilities.\\n\\n## Docker Desktop \u2013 SBOM CLI\\n\\nIn Docker Desktop 4.7.0 Docker introduced and included a new experimental docker sbom CLI that is used for displaying SBoM for any container image. docker sbom scans the layer of container images using the Syft Project\\n\\n## Usage\\n\\n* Display SBOM in CyloneDX format\\n\\n\\n```shell\\n\\t\\n$ docker sbom mongo:latest --format cyclonedx-json | more\\n \\n{\\n \\"type\\": \\"library\\",\\n \\"publisher\\": \\"MongoDB Packaging \\\\u003cpackaging@mongodb.com\\\\u003e\\",\\n \\"name\\": \\"mongodb-org-server\\",\\n \\"version\\": \\"5.0.9\\",\\n \\"cpe\\": \\"cpe:2.3:a:mongodb-org-server:mongodb-org-server:5.0.9:*:*:*:*:*:*:*\\",\\n \\"purl\\": \\"pkg:deb/ubuntu/mongodb-org-server@5.0.9?arch=arm64\\\\u0026upstream=mongodb-org\\\\u0026distro=ubuntu-20.04\\",\\n \\"properties\\": [\\n {\\n \\"name\\": \\"syft:package:foundBy\\",\\n \\"value\\": \\"dpkgdb-cataloger\\"\\n },\\n {\\n \\"name\\": \\"syft:package:metadataType\\",\\n \\"value\\": \\"DpkgMetadata\\"\\n }\\n```\\n\\n* Display SBOM summary of packages. e.g. using the below command we can check for the log4j vulnerabilities\\n\\n```shell\\t\\n$ docker sbom neo4j | grep log4j\\n \\nlog4j-api 2.17.1 java-archive\\nlog4j-core 2.17.1 java-archive\\n\\t\\n$ docker sbom neo4j:4.4.1 | grep log4j\\n \\nlog4j-api 2.15.0 java-archive\\nlog4j-core 2.15.0 java-archive\\n\\t\\n$ docker sbom elasticsearch:7.16.3 | grep log4j\\n \\nelasticsearch-log4j 7.16.3 java-archive\\nlog4j-1.2-api 2.17.1 java-archive\\nlog4j-api 2.17.1 java-archive\\nlog4j-core 2.17.1 java-archive\\nlog4j-slf4j-impl 2.17.1 java-archive\\n```\\n\\nThere are many benefits to generate SBOM for compliance and vulnerability analysis. Further SBOM can be used for input to open source vulnerability databases like [Snyk](https://github.com/snyk/cli) and open source vulnerability scanning tools like [Grype](https://github.com/anchore/grype)"},{"id":"percona-mongo-replicaset-minio","metadata":{"permalink":"/percona-mongo-replicaset-minio","source":"@site/blog/2022-05-29-percona-mongo-replicaset-minio.md","title":"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO","description":"In this blog post, i will walk you through the steps required to containerize Percona Server for MongoDB, Percona Backup Manager, and Agent from source and configure cloud-native S3(Simple Storage Service) compatible distributed object storage MINIO to backup and restore Percona MongoDB snapshot backups.","date":"2022-05-29T00:00:00.000Z","formattedDate":"May 29, 2022","tags":[{"label":"mongodb","permalink":"/tags/mongodb"},{"label":"containers","permalink":"/tags/containers"},{"label":"docker","permalink":"/tags/docker"},{"label":"s3","permalink":"/tags/s-3"},{"label":"minio","permalink":"/tags/minio"},{"label":"pbm","permalink":"/tags/pbm"}],"readingTime":4.205,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"percona-mongo-replicaset-minio","title":"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["mongodb","containers","docker","s3","minio","pbm"]},"prevItem":{"title":"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI","permalink":"/docker-sbom"},"nextItem":{"title":"Local Home Lab DNS Setup with DNSMasq and NGINX","permalink":"/local-home-lab"}},"content":"In this blog post, i will walk you through the steps required to containerize [Percona Server for MongoDB](https://github.com/percona/percona-server-mongodb), [Percona Backup Manager](https://github.com/percona/percona-backup-mongodb), and Agent from source and configure cloud-native S3(**S**imple **S**torage **S**ervice) compatible distributed object storage [MINIO](https://min.io/) to backup and restore Percona MongoDB snapshot backups.\\n\\n\x3c!--truncate--\x3e\\n\\n![](minio.webp)\\n\\n## Prerequisites\\nEnsure the below binaries are installed before starting the setup and configuration\\n\\n[Docker](https://www.docker.com/get-started/) or [Podman](https://podman.io/) to containerize Percona MongoDB replicaset and PBM Agent\\n[Docker Compose](https://docs.docker.com/compose/install/)\\n[Golang](https://go.dev/learn/) compiler \u2013 Build Percona Backup Manager binaries\\n[Portainer](https://www.portainer.io/) (Optional) \u2013 Intuitive UI for container configuration and monitoring\\nLet us perform the below steps to set up PSMDB Replicaset; PBM Agent; Minio, S3 compatible bucket, and PBM configuration to perform backups and restores from the bucket.\\n\\n## Steps\\n* Create the Docker environment file with Docker Image, tag, port, and replicaset information. Save the file as .env in the working directory\\n\\n```shell \\nMONGODB_IMAGE=percona/percona-server-mongodb\\nMONGODB_VERSION=5.0\\nMONGO1_PORT=0.0.0.0:15000\\nMONGO2_PORT=0.0.0.0:15001\\nMONGO3_PORT=0.0.0.0:15002\\nMONGODB_PORT=27017\\nMONGODB_DOCKER_NETWORK=mongo_net\\nRS_NAME=rs1\\n\\n```\\n* Create keyFile , Dockerfile and download percona-backup-manager source code in the working directory\\n\\n```shell\\n$ git clone https://github.com/percona/percona-backup-mongodb.git\\n\\nARG MONGODB_VERSION\\nARG MONGODB_IMAGE\\nFROM ${MONGODB_IMAGE}:${MONGODB_VERSION}\\nUSER root\\nCOPY keyFile /opt/keyFile\\nRUN chown mongodb /opt/keyFile && chmod 400 /opt/keyFile && mkdir -p /home/mongodb/ && chown mongodb /home/mongodb\\nUSER mongodb\\n```\\n* Create Docker Compose file\\n\\n```YAML\\nversion: \\"3.8\\"\\nservices:\\n rs101:\\n build:\\n dockerfile: Dockerfile\\n context: /home/vishal/dev/psmdb\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION}\\n - MONGODB_IMAGE=${MONGODB_IMAGE}\\n hostname: rs101\\n labels:\\n - \\"com.percona.pbm.app=mongod\\"\\n environment:\\n - REPLSET_NAME=rs1\\n - MONGO_USER=dba\\n - BACKUP_USER=bcp\\n - MONGO_PASS=test1234\\n ports:\\n - \\"${MONGO1_PORT}:${MONGODB_PORT}\\"\\n # command: mongod --replSet rs1 --port ${MONGO1_PORT}:27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\\n command: [\\"--replSet\\", \\"${RS_NAME}\\", \\"--bind_ip_all\\", \\"--storageEngine\\", \\"wiredTiger\\" , \\"--keyFile\\", \\"/opt/keyFile\\"]\\n volumes:\\n - data-rs101:/data/db\\n - ./scripts/start.sh:/opt/start.sh\\n rs102:\\n build:\\n dockerfile: Dockerfile\\n context: /home/vishal/dev/psmdb\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION}\\n - MONGODB_IMAGE=${MONGODB_IMAGE}\\n hostname: rs102\\n labels:\\n - \\"com.percona.pbm.app=mongod\\"\\n # command: mongod --replSet rs1 --port 27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\\n ports:\\n - \\"${MONGO2_PORT}:${MONGODB_PORT}\\"\\n command: [\\"--replSet\\", \\"${RS_NAME}\\", \\"--bind_ip_all\\", \\"--storageEngine\\", \\"wiredTiger\\" , \\"--keyFile\\", \\"/opt/keyFile\\"]\\n volumes:\\n - data-rs102:/data/db\\n rs103:\\n build:\\n dockerfile: Dockerfile\\n context: /home/vishal/dev/psmdb\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION}\\n - MONGODB_IMAGE=${MONGODB_IMAGE}\\n hostname: rs103\\n labels:\\n - \\"com.percona.pbm.app=mongod\\"\\n # command: mongod --replSet rs1 --port 27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\\n ports:\\n - \\"${MONGO3_PORT}:${MONGODB_PORT}\\"\\n command: [\\"--replSet\\", \\"${RS_NAME}\\", \\"--bind_ip_all\\", \\"--storageEngine\\", \\"wiredTiger\\" , \\"--keyFile\\", \\"/opt/keyFile\\"]\\n volumes:\\n - data-rs103:/data/db\\n agent-rs101:\\n container_name: \\"pbmagent_rs101\\"\\n user: \\"1001\\"\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n - \\"com.percona.pbm.agent.rs=rs1\\"\\n environment:\\n - \\"PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs101:27017\\"\\n build:\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\\n context: /home/vishal/open-source/percona-backup-mongodb/\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\\n volumes:\\n - ./conf:/etc/pbm\\n - ./backups:/opt/backups\\n - data-rs101:/data/db\\n command: pbm-agent\\n cap_add:\\n - NET_ADMIN\\n agent-rs102:\\n container_name: \\"pbmagent_rs102\\"\\n user: \\"1001\\"\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n - \\"com.percona.pbm.agent.rs=rs1\\"\\n environment:\\n - \\"PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs102:27017\\"\\n build:\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\\n context: /home/vishal/open-source/percona-backup-mongodb/\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\\n volumes:\\n - ./conf:/etc/pbm\\n - ./backups:/opt/backups\\n - data-rs102:/data/db\\n command: pbm-agent\\n cap_add:\\n - NET_ADMIN\\n agent-rs103:\\n container_name: \\"pbmagent_rs103\\"\\n user: \\"1001\\"\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n - \\"com.percona.pbm.agent.rs=rs1\\"\\n environment:\\n - \\"PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs103:27017\\"\\n build:\\n labels:\\n - \\"com.percona.pbm.app=agent\\"\\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\\n context: /home/vishal/open-source/percona-backup-mongodb/\\n args:\\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\\n volumes:\\n - ./conf:/etc/pbm\\n - ./backups:/opt/backups\\n - data-rs103:/data/db\\n command: pbm-agent\\n cap_add:\\n - NET_ADMIN\\nvolumes:\\n backups: null\\n data-rs101: null\\n data-rs102: null\\n data-rs103: null\\n\\n```\\n\\n* Run Docker compose\\nThe below command will build and start the docker container for Percona Server MongoDB Primary Secondary Secondary replicaset and Percona Backup Manager Agent for each replicaset\\n\\n```\\n$ psmdb docker compose -f docker-compose-rs.yaml up -d\\n[+] Running 8/8\\n\u283f Container psmdb-rs102-1 Running 0.0s\\n\u283f Container psmdb-rs103-1 Running 0.0s\\n\u283f Container pbmagent_rs103 Running 0.0s\\n\u283f Container pbmagent_rs102 Running 0.0s\\n\u283f Container psmdb-rs101-1 Running 0.0s\\n\u283f Container pbmagent_rs101 Running 0.0s\\n\\n```\\n\\n* Connect to MongoDB replicaset and ensure replication and containers are working\\n\\n```shell\\n$ mongo \\"mongodb://dba:test1234@192.168.50.113:15000,192.168.50.113:15001,192.168.50.113:15002/admin?replicaSet=rs1\\"\\n```\\n\\n* Setup Minio and Minio CLI\\n\\n```shell\\n\\n$ cd ~/downloads && wget https://dl.min.io/server/minio/release/linux-amd64/minio\\n \\n$ wget https://dl.min.io/client/mc/release/linux-amd64/mc\\nchmod +x mc\\n./mc --help\\n \\n$ downloads ./minio server /home/vishal/data --address=0.0.0.0:7000\\n \\n\\nAPI: http://0.0.0.0:7000 \\nRootUser: minioadmin \\nRootPass: minioadmin \\nFinished loading IAM sub-system (took 0.0s of 0.0s to load data).\\n \\nConsole: http://192.168.50.113:43859 http://192.168.160.1:43859 http://172.18.0.1:43859 http://172.19.0.1:43859 http://172.24.0.1:43859 http://172.26.0.1:43859 http://172.17.0.1:43859 http://127.0.0.1:43859 \\nRootUser: minioadmin \\nRootPass: minioadmin \\n \\nCommand-line: https://docs.min.io/docs/minio-client-quickstart-guide\\n $ mc alias set myminio http://0.0.0.0:7000 minioadmin minioadmin\\n \\nDocumentation: https://docs.min.io\\n\\n\\n```\\n\\n* Setup Minio server alias and List buckets\\n\\n```shell\\n$ mc alias set minio-deb http://192.168.50.113:7000 minioadmin minioadmin\\n$ mc ls minio-deb\\n[2022-05-29 14:59:32 IST] 0B nocodb/\\n[2022-05-29 00:19:41 IST] 0B typesense/\\n\\n```\\n\\n* Create a new bucket and name it `pbm`\\n\\n```shell\\n$ mc alias set minio-deb http://192.168.50.113:7000 minioadmin minioadmin\\n$ mc ls minio-deb\\n [2022-05-29 14:59:32 IST] 0B nocodb/\\n [2022-05-29 00:19:41 IST] 0B typesense/\\n```\\n\\n* Setup PBM or compile PBM from the source repository\\n\\n```shell\\n$ sudo apt-get install -y libkrb5-dev\\n$ cd percona-backup-mongodb\\n$ make build\\n$ make install\\n```\\n* create pbm_config.YAML to be used for configuring PBM for using MINIO\\n\\n```YAML\\n\\nstorage:\\n type: s3\\n s3:\\n endpointUrl: http://192.168.50.113:7000\\n bucket: pbm\\n credentials:\\n access-key-id: \\"minioadmin\\"\\n secret-access-key: \\"minioadmin\\"\\n\\n\\n```\\n\\n* Configure PBM\\n\\n```shell\\n$ ./pbm config --file /home/vishal/dev/psmdb/pbm_config.yaml --mongodb-uri=\\"mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1\\"\\n\\n```\\n* Validate agent container logs and run the pbm list command. If MINIO is configured successfully, agent container logs shouldn\u2019t log any errors.\\n\\n```shell\\n2022-05-29T01:31:14.000+0000 D [resync] got backups list: 02022-05-29T01:31:14.000+0000 D [resync] got physical restores list: 0\\n\\n$ bin git:(main) ./pbm list --mongodb-uri=\\"mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1\\"\\nBackup snapshots:\\n2022-05-29T01:29:12Z [complete: 2022-05-29T01:29:16Z]\\n2022-05-29T01:38:38Z [complete: 2022-05-29T01:38:42Z]\\n2022-05-29T04:04:44Z [complete: 2022-05-29T04:04:48Z]\\n```\\n\\n* To run PBM backup and restore execute the below commands\\n\\n```bash \\n$ ./pbm backup --mongodb-uri=\\"mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1\\" \\n$ ./pbm restore 2022-05-29T04:04:44Z --mongodb-uri=\\"mongodb:/\\n```"},{"id":"local-home-lab","metadata":{"permalink":"/local-home-lab","source":"@site/blog/2022-04-10-local-home-lab.md","title":"Local Home Lab DNS Setup with DNSMasq and NGINX","description":"As I explored and set up an increased number of FOSS software using containers(Docker and LXD) and virtual machines(Multipass) in my home lab environment, I realized the difficulty in remembering the different ports the applications and containers are running. The solution to address this problem was to have a Domain Name System for the local network, which works to resolve local and external addresses with a reverse proxy to redirect calls based on DNS resolution.","date":"2022-04-10T00:00:00.000Z","formattedDate":"April 10, 2022","tags":[{"label":"reverse-proxy","permalink":"/tags/reverse-proxy"},{"label":"NGINX","permalink":"/tags/nginx"},{"label":"DNSMASQ","permalink":"/tags/dnsmasq"},{"label":"Lab","permalink":"/tags/lab"}],"readingTime":3.34,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"local-home-lab","title":"Local Home Lab DNS Setup with DNSMasq and NGINX","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["reverse-proxy","NGINX","DNSMASQ","Lab"]},"prevItem":{"title":"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO","permalink":"/percona-mongo-replicaset-minio"},"nextItem":{"title":"Configure Sharding in MongoDB on Docker Containers","permalink":"/sharding-mongo-docker"}},"content":"As I explored and set up an increased number of FOSS software using containers(Docker and LXD) and virtual machines(Multipass) in my home lab environment, I realized the difficulty in remembering the different ports the applications and containers are running. The solution to address this problem was to have a Domain Name System for the local network, which works to resolve local and external addresses with a reverse proxy to redirect calls based on DNS resolution.\\n\\n\x3c!--truncate--\x3e\\n\\n\\nThe below command lists the Docker Containers and ports the container are running on, the requirement is to create a domain for a home setup with domain homelab.net and access the containerized applications with appsmith.homelab.net; typesense.homelab.net; excalidraw.homelab.net\\n\\nLet\u2019s get the list of docker containers with port numbers\\n\\n```shell\\n# get container names and port numbers\\n$ docker container ls --format \\"table {{.ID}}\\\\t{{.Names}}\\\\t{{.Ports}}\\" -a\\n\\n\\nCONTAINER ID NAMES PORTS\\ncbb2ac402270 appsmith 0.0.0.0:9001->9001/tcp, 0.0.0.0:70->80/tcp, 0.0.0.0:444->443/tcp\\nc9875323b989 typesense_typesense-1_1 0.0.0.0:8108->8108/tcp\\nc453288c8496 excalidraw 0.0.0.0:3001->80/tcp\\n5be5d33f1f50 k8s-control-plane 127.0.0.1:34589->6443/tcp\\n4140d2fbf7d5 mysql_nocodb_1 0.0.0.0:8082->8080/tcp\\ne7310461bee9 mysql_root_db_1 3306/tcp, 33060/tcp\\n9b56c33d45d5 meilisearch_ms_1 0.0.0.0:7700->7700/tcp\\n9ac6a0e16b0e mongo2 0.0.0.0:20002->27017/tcp\\n2aaf01d2233f mongo1 0.0.0.0:20001->27017/tcp\\n860b521f97dc mongo3 0.0.0.0:20003->27017/tcp\\nd8ad1ec3cab8 rethinkdb_rethinkdb_1 0.0.0.0:28015->28015/tcp, 0.0.0.0:29015->29015/tcp, 0.0.0.0:8081->8080/tcp\\n```\\n\\nThe containers and applications running on the local home network as shown above do not have a public domain name, the option was to look for setting up a DNS server with [DNSMasq](https://thekelleys.org.uk/dnsmasq/doc.html), and a reverse proxy using [NGINX](https://www.nginx.com/). The containers may not be the only use case scenario for local DNS servers with [DNSMasq](https://thekelleys.org.uk/dnsmasq/doc.html), there could be many others like accessing a local file share across devices; accessing applications from a mobile device, and sharing a printer.\\n\\n\\n[DNSMasq](https://thekelleys.org.uk/dnsmasq/doc.html) - Dnsmasq provides network infrastructure for small networks: DNS, DHCP, router advertisement, and network boot. It is designed to be lightweight and has a small footprint, suitable for resource-constrained routers and firewalls.\\n\\n[NGINX](https://www.nginx.com/) - Reverse Proxy \u2013 A reverse proxy provides an additional level of abstraction and control to ensure the smooth flow of network traffic between clients and servers.\\n\\n**Let us get started with the implementation steps for DNSMasq and NGINX. The below steps are performed on Ubuntu 20.04 (Debian-based distro).**\\n\\nBefore starting the installation of DNSMasq, \\n\\n### Step 1: Disable systemd-resolve which binds to port 53, the default port for DNSMasq\\n\\n```shell\\n\\n sudo systemctl stop systemd-resolved\\n sudo systemctl disable systemd-resolved\\n\\n```\\n\\n### Step 2: Install DNSUtils, DNSMasq\\n\\n```shell\\nsudo apt update && sudo apt install dnsmasq && sudo apt install dnsutils\\n```\\n\\n### Step 3: Create the DNSMasq configuration file\\n\\n```shell\\n$ dnsmasq_conf=\\"no-dhcp-interface=enp2s0f0\\nbogus-priv\\ndomain=homelab.net\\nexpand-hosts\\nlocal=/homelab.net/\\ndomain-needed\\nno-resolv\\nno-poll\\nserver=8.8.8.8\\nserver=8.8.4.4\\"\\n\\n$ sudo echo -e \\"$dnsmasq_conf\\" > /etc/dnsmasq.d/home-lab.net \\n\\n$ sudo systemctl restart dnsmasq\\n```\\n\\n\\n### Step 4: Add container DNS records in the file./etc/hosts. The records in the hosts file will be used by DNSMasq for client responses\\n\\n```shell\\n $ sudo nano /etc/hosts \\n # add the below records to the hosts file\\n #Container DNS records\\n # appsmith\\n 192.168.20.113 appsmith\\n # excalidraw\\n 192.168.20.113 excalidraw\\n # typesense\\n 192.168.20.113 typesense\\n```\\n\\n### Step 5: Restart DNSMasq service\\n\\n```shell\\n$ sudo systemctl restart dnsmasq.service\\n```\\n\\n### Step 6: Install NGINX\\n\\n```shell\\n$ sudo apt update && sudo apt install nginx\\n```\\n\\n\\n### Step 6: To enable reverse proxy feature, create a new NGINX configuration file in `sites-enabled` directory\\n\\n```shell\\n $ sudo nano /etc/nginx/sites-enabled/homelab.conf\\n server {\\n listen 80;\\n listen [::]:80;\\n server_name typesense.homelab.net;\\n location / {\\n proxy_bind 192.168.20.113;\\n proxy_pass http://localhost:3000;\\n }\\n }\\n server {\\n listen 80;\\n listen [::]:80;\\n server_name appsmith.homelab.net;\\n location / {\\n proxy_bind 192.168.20.113;\\n proxy_pass http://localhost:70;\\n }\\n\\n }\\n server {\\n listen 80;\\n listen [::]:80;\\n server_name excalidraw.homelab.net;\\n location / {\\n proxy_bind 192.168.20.113;\\n proxy_pass http://localhost:3001;\\n }\\n\\n }\\n```\\n\\nThe `proxy_pass` argument will forward all incoming client requests to app.homelab.net to the respective app. The IP address and port number can be easily changed.\\n \\n### Step 7 reload NGINX for the configuration to take into effect\\n```shell \\n$ sudo systemctl reload nginx\\n```\\nAfter a successful implementation, we will be able to access container applications using domain URLs as seen in the below screenshot with three panes first pane is appsmith ; second pane is excalidraw and third pane is typesense.\\n\\n![local-home-lab-snapshot](2022-07-19-23-09-57.png)"},{"id":"sharding-mongo-docker","metadata":{"permalink":"/sharding-mongo-docker","source":"@site/blog/2021-10-02-sharding-mongo-docker.md","title":"Configure Sharding in MongoDB on Docker Containers","description":"In my previous blog post, I posted about configuring Replica Set to meet high availability requirements.","date":"2021-10-02T00:00:00.000Z","formattedDate":"October 2, 2021","tags":[{"label":"mongodb","permalink":"/tags/mongodb"},{"label":"docker","permalink":"/tags/docker"},{"label":"sharding","permalink":"/tags/sharding"}],"readingTime":7.095,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"sharding-mongo-docker","title":"Configure Sharding in MongoDB on Docker Containers","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["mongodb","docker","sharding"]},"prevItem":{"title":"Local Home Lab DNS Setup with DNSMasq and NGINX","permalink":"/local-home-lab"},"nextItem":{"title":"MongoDB Replicaset with Persistent Volume using Docker Compose","permalink":"/mongodb-rs-docker-persistent-volume"}},"content":"In my previous [blog](2021-09-18-mongodb-rs-docker-persistent-volume.md) post, I posted about configuring Replica Set to meet high availability requirements. \\n\\nIn this post, i cover \\n\\n* MongoDB Sharded Cluster Components \\n* Steps to create MongoDB Sharded Cluster using Docker Compose\\n* Add Replica Set as a Shard\\n* Sharding Data\\n* Verify Distribution of Data \\n\\n\x3c!--truncate--\x3e\\n\\n## Replica Set vs Sharding \\n\\n**Replica Set** is the way of keeping identical set of data on multiple servers. Sharding refers to the process of splitting data across nodes, also known as horizontal partitioning. \\n\\nA database **shard**, is a horizontal partition of data in a database, each node contains different set of the data. \\n\\nMongoDB supports and implements `auto-sharding` by automating balancing of data across the shards. \\n\\n\\n\\n\\n## MongoDB Sharding Components \\n\\nThe first step in creating a Sharded MongoDB cluster is to understand all the components and processes that constitute a cluster \\n\\n* **Query Router - mongos**\\n\\nmongos is the routing process. The goal of sharding is to make cluster of 100-1000 nodes looks like a single interface for the application and abstract all the complexity of data access from multiple shards. The mongos router is table of contents and knows where the data required by application is located, mongos forwards the application request to appropriate shard(s). \\n\\n* **Config Servers**\\n\\nConfig Servers hold all the metadata about which node is holding which data(chunks). mongos retrieves all the metadata from Config Servers. Config Servers are critical and its important to configure and bring the config servers first, backup config servers and setup config servers as Replica Set. \\n\\n## Steps to create MongoDB Sharded Cluster using Docker Compose\\n\\nBelow image show different components required to setup MongoDB sharding with Replica Set. The image also shows how application communicates to MongoDB sharded cluster. As discussed in the sharding components application always connects first to mongos and mongos communicates with config server (cfg1, cfg2, cfg3 are part of replicaset in below image)\\n\\n```mermaid\\n stateDiagram-v2\\n [*] --\x3e Application\\n direction LR\\n state Application\\n state QueryRouter \\n {\\n \\n mongos \\n }\\n Application --\x3e QueryRouter : Read\\n QueryRouter --\x3e Application: Results\\n state cfg: config \\n {\\n \\n cfg1 \\n cfg2\\n cfg3\\n \\n }\\n QueryRouter --\x3e config\\n config --\x3e QueryRouter\\n state Shard1: rs_mongo1\\n {\\n shard1_mongo1\\n shard1_mongo2\\n shard1_mongo3\\n }\\n state Shard2: rs_mongo2\\n {\\n shard2_mongo1\\n shard2_mongo2\\n shard2_mongo3\\n }\\n \\n state Shard3: rs_mongo3 \\n {\\n shard3_mongo1\\n shard3_mongo2\\n shard3_mongo3\\n }\\n\\n \\n QueryRouter --\x3e rs_mongo1\\n QueryRouter --\x3e rs_mongo2\\n QueryRouter --\x3e rs_mongo3\\n rs_mongo1 --\x3e QueryRouter\\n rs_mongo2 --\x3e QueryRouter\\n rs_mongo3 --\x3e QueryRouter\\n \\n```\\n\\nLets setup above MongoDB Sharding Cluster using docker compose\\n\\n### Step 1 - Author Docker Compose file \\n\\n:::note\\nEnsure directory path mentioned in docker compose for persistent volume before the \u201c:\u201d is existing on local host\\n:::\\n\\n```YAML\\nservices:\\n shard1_mongo1:\\n image: mongo_ssh\\n hostname: shard1_mongo1\\n container_name: shard1_mongo1\\n volumes:\\n - ~/db/shard1_mongo1/mongod.conf:/etc/mongod.conf\\n - ~/db/shard1_mongo1/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard1_mongo1/data/db/:/data/db/\\n - ~/db/shard1_mongo1/log/:/var/log/mongodb/\\n ports:\\n - 20005:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard1_mongo2:\\n image: mongo_ssh\\n hostname: shard1_mongo2\\n container_name: shard1_mongo2\\n volumes:\\n - ~/db/shard1_mongo2/mongod.conf:/etc/mongod.conf\\n - ~/db/shard1_mongo2/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard1_mongo2/data/db/:/data/db/\\n - ~/db/shard1_mongo2/log/:/var/log/mongodb/\\n ports:\\n - 20006:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard1_mongo3:\\n image: mongo_ssh\\n hostname: shard1_mongo3\\n container_name: shard1_mongo3\\n volumes:\\n - ~/db/shard1_mongo3/mongod.conf:/etc/mongod.conf\\n - ~/db/shard1_mongo3/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard1_mongo3/data/db/:/data/db/\\n - ~/db/shard1_mongo3/log/:/var/log/mongodb/\\n ports:\\n - 20007:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard2_mongo1:\\n image: mongo_ssh\\n hostname: shard2_mongo1\\n container_name: shard2_mongo1\\n volumes:\\n - ~/db/shard2_mongo1/mongod.conf:/etc/mongod.conf\\n - ~/db/shard2_mongo1/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard2_mongo1/data/db/:/data/db/\\n - ~/db/shard2_mongo1/log/:/var/log/mongodb/\\n ports:\\n - 20008:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard2_mongo2:\\n image: mongo_ssh\\n hostname: shard2_mongo2\\n container_name: shard2_mongo2\\n volumes:\\n - ~/db/shard2_mongo2/mongod.conf:/etc/mongod.conf\\n - ~/db/shard2_mongo2/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard2_mongo2/data/db/:/data/db/\\n - ~/db/shard2_mongo2/log/:/var/log/mongodb/\\n ports:\\n - 20009:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard2_mongo3:\\n image: mongo_ssh\\n hostname: shard2_mongo3\\n container_name: shard2_mongo3\\n volumes:\\n - ~/db/shard2_mongo3/mongod.conf:/etc/mongod.conf\\n - ~/db/shard2_mongo3/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard2_mongo3/data/db/:/data/db/\\n - ~/db/shard2_mongo3/log/:/var/log/mongodb/\\n ports:\\n - 20010:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard3_mongo1:\\n image: mongo_ssh\\n hostname: shard3_mongo1\\n container_name: shard3_mongo1\\n volumes:\\n - ~/db/shard3_mongo1/mongod.conf:/etc/mongod.conf\\n - ~/db/shard3_mongo1/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard3_mongo1/data/db/:/data/db/\\n - ~/db/shard3_mongo1/log/:/var/log/mongodb/\\n ports:\\n - 20011:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard3_mongo2:\\n image: mongo_ssh\\n hostname: shard3_mongo2\\n container_name: shard3_mongo2\\n volumes:\\n - ~/db/shard3_mongo2/mongod.conf:/etc/mongod.conf\\n - ~/db/shard3_mongo2/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard3_mongo2/data/db/:/data/db/\\n - ~/db/shard3_mongo2/log/:/var/log/mongodb/\\n ports:\\n - 20012:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n shard3_mongo3:\\n image: mongo_ssh\\n hostname: shard3_mongo3\\n container_name: shard3_mongo3\\n volumes:\\n - ~/db/shard3_mongo3/mongod.conf:/etc/mongod.conf\\n - ~/db/shard3_mongo3/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/shard3_mongo3/data/db/:/data/db/\\n - ~/db/shard3_mongo3/log/:/var/log/mongodb/\\n ports:\\n - 20013:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n# MongoDB Confiugration Server \\n cfg1:\\n image: mongo_ssh\\n hostname: cfg1\\n container_name: cfg1\\n volumes:\\n - ~/db/cfg1/mongod.conf:/etc/mongod.conf\\n - ~/db/cfg1/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/cfg1/data/db/:/data/db/\\n - ~/db/cfg1/log/:/var/log/mongodb/\\n ports:\\n - 20014:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n cfg2:\\n image: mongo_ssh\\n hostname: cfg2\\n container_name: cfg2\\n volumes:\\n - ~/db/cfg2/mongod.conf:/etc/mongod.conf\\n - ~/db/cfg2/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/cfg2/data/db/:/data/db/\\n - ~/db/cfg2/log/:/var/log/mongodb/\\n ports:\\n - 20015:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n cfg3:\\n image: mongo_ssh\\n hostname: cfg3\\n container_name: cfg3\\n volumes:\\n - ~/db/cfg3/mongod.conf:/etc/mongod.conf\\n - ~/db/cfg3/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/cfg3/data/db/:/data/db/\\n - ~/db/cfg3/log/:/var/log/mongodb/\\n ports:\\n - 20016:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n \\n mongos:\\n image: mongo_ssh\\n hostname: mongos\\n container_name: mongos\\n volumes:\\n - ~/db/mongos/mongod.conf:/etc/mongod.conf\\n - ~/db/mongos/initdb.d/:/docker-entrypoint-initdb.d/\\n - ~/db/mongos/data/db/:/data/db/\\n - ~/db/mongos/log/:/var/log/mongodb/\\n ports:\\n - 20017:27017\\n command: [\\"mongos\\",\\"-f\\", \\"/etc/mongod.conf\\"]\\n network_mode: mongo_net\\n\\n\\n```\\n\\n### Step 2 - Draft Config Server configuration file (pass clusterRole: configsvr to indicate this server is Config Server)\\n\\n```YAML\\nsystemLog:\\n destination: file\\n logAppend: true\\n path: /var/log/mongodb/mongod.log\\nstorage:\\n dbPath: /data/db\\n journal:\\n enabled: true\\n engine: wiredTiger\\nnet:\\n port: 27017\\n bindIp: 127.0.0.1 # Enter 0.0.0.0,:: to bind to all IPv4 and IPv6 addresses or, alternatively, use the net.bindIpAll setting.\\nsharding:\\n clusterRole: configsvr\\nreplication:\\n replSetName: rs_config\\n```\\n\\n### Step 3 - Draft Query Router mongos configuration file (pass configDB:config server list)\\n\\n```YAML\\nsystemLog:\\n destination: file\\n logAppend: true\\n path: /var/log/mongodb/mongod.log\\n \\nnet:\\n port: 27017\\n bindIp: 127.0.0.1 # Enter 0.0.0.0,:: to bind to all IPv4 and IPv6 addresses or, alternatively, use the net.bindIpAll setting.\\n \\nsharding:\\n configDB: rs_config/cfg1:27017,cfg2:27017,cfg3:27017\\n```\\n\\n### Step 4 - Copy mongod.conf and mongos.conf to the path mentioned in step 1 `docker-compose.yaml`\\n\\n### Step 5 - Spin up Config Server, mongos, all mongod nodes\\n\\n```shell \\n$ docker compose up -d\\n```\\n\\n### Step 6 - Connect to config server and add config server in a Replica Set\\n\\n```javascript \\nrs_config:PRIMARY> rs.initiate() \\nrs_config:PRIMARY> rs.add(\\"cfg2:27017\\")\\nrs_config:PRIMARY> rs.add(\\"cfg3:27017\\")\\n```\\n\\n### Step 7 - Add all data nodes to replicaset\\n\\n```javascript \\n# Connect to shard1_mongo1\\n \\nadmin> rs.initiate()\\nrs_mongo1 [direct: primary] admin> rs.add(\\"shard1_mongo2\\")\\nrs_mongo1 [direct: primary] admin> rs.add(\\"shard1_mongo3\\")\\n \\n# Connect to shard2_mongo1\\n \\nadmin> rs.initiate()\\nrs_mongo2 [direct: primary] test> rs.add(\\"shard2_mongo2\\")\\nrs_mongo2 [direct: primary] test> rs.add(\\"shard2_mongo3\\")\\n \\n# Connect to shard3_mongo1\\n \\ntest> rs.initiate()\\nrs_mongo3 [direct: other] test> rs.add(\\"shard3_mongo2\\")\\nrs_mongo3 [direct: primary] test> rs.add(\\"shard3_mongo3\\")\\n\\n```\\n\\n### Step 8 \u2013 Connect to mongos and convert data replicaset nodes to shards\\n\\n```javascript\\n\\nmongos>sh.addShard(\\"rs_mongo1/shard1_mongo1:27017,shard1_mongo2:27017,shard1_mongo3:27017\\")\\n \\nmongos>sh.addShard(\\"rs_mongo2/shard2_mongo1:27017,shard2_mongo2:27017,shard2_mongo3:27017\\")\\n \\nmongos>sh.addShard(\\"rs_mongo3/shard3_mongo1:27017,shard3_mongo2:27017,\\n\\n```\\n\\n### Step 9 \u2013 Connect to mongos and enable sharding on a test database \u201cEmployee\u201d\\n\\n```javascript\\nmongos> db.adminCommand({enableSharding : \\"employee\\"})\\n```\\n\\n### Step 10 \u2013 Generate test data ; Create an index on the key to be sharded and shard the collection\\n\\n```javascript\\nmongos> use employee\\nswitched to db employee\\n \\nmongos> for (var i = 0; i < 100000; i++) { db.emp_list2.insert({ \\"sr_no\\": \\"emp # \\" + i, \\"create_date\\": new Date() }); }\\n \\nmongos> db.emp_list2.ensureIndex({\\"sr_no\\" : \\"hashed\\"})\\n \\nmongos> sh.shardCollection(\\"employee.emp_list2\\", {\\"sr_no\\":\\"hashed\\"})\\n \\n{\\n \\"collectionsharded\\" : \\"employee.emp_list2\\",\\n \\"collectionUUID\\" : UUID(\\"17195baa-fc6c-4c3e-8a2b-58fb1278e40c\\"),\\n \\"ok\\" : 1,\\n \\"operationTime\\" : Timestamp(1633177398, 26),\\n \\"$clusterTime\\" : {\\n \\"clusterTime\\" : Timestamp(1633177398, 26),\\n \\"signature\\" : {\\n \\"hash\\" : BinData(0,\\"AAAAAAAAAAAAAAAAAAAAAAAAAAA=\\"),\\n \\"keyId\\" : NumberLong(0)\\n }\\n }\\n}\\n```\\n\\n### Step 11 \u2013 Validate sharding status\\n\\n```javascript \\n\\n\\nmongos> sh.status()\\n--- Sharding Status ---\\n sharding version: {\\n \\"_id\\" : 1,\\n \\"minCompatibleVersion\\" : 5,\\n \\"currentVersion\\" : 6,\\n \\"clusterId\\" : ObjectId(\\"6157efd7982782e314f1b651\\")\\n }\\n shards:\\n { \\"_id\\" : \\"rs_mongo1\\", \\"host\\" : \\"rs_mongo1/shard1_mongo1:27017,shard1_mongo2:27017,shard1_mongo3:27017\\", \\"state\\" : 1 }\\n { \\"_id\\" : \\"rs_mongo2\\", \\"host\\" : \\"rs_mongo2/shard2_mongo1:27017,shard2_mongo2:27017,shard2_mongo3:27017\\", \\"state\\" : 1 }\\n { \\"_id\\" : \\"rs_mongo3\\", \\"host\\" : \\"rs_mongo3/shard3_mongo1:27017,shard3_mongo2:27017,shard3_mongo3:27017\\", \\"state\\" : 1 }\\n active mongoses:\\n \\"4.4.8\\" : 1\\n autosplit:\\n Currently enabled: yes\\n balancer:\\n Currently enabled: yes\\n Currently running: no\\n Failed balancer rounds in last 5 attempts: 0\\n Migration Results for the last 24 hours:\\n 682 : Success\\n databases:\\n { \\"_id\\" : \\"config\\", \\"primary\\" : \\"config\\", \\"partitioned\\" : true }\\n config.system.sessions\\n shard key: { \\"_id\\" : 1 }\\n unique: false\\n balancing: true\\n chunks:\\n rs_mongo1 342\\n rs_mongo2 341\\n rs_mongo3 341\\n too many chunks to print, use verbose if you want to force print\\n employee.emp_list2\\n shard key: { \\"sr_no\\" : \\"hashed\\" }\\n unique: false\\n balancing: true\\n chunks:\\n rs_mongo1 2\\n rs_mongo2 2\\n rs_mongo3 \\n```\\n\\n### Step 12 - Validate chunk distribution \\n```javascript\\n\\nmongos> db.getSiblingDB(\\"employee\\").emp_list2.getShardDistribution();\\n \\nShard rs_mongo1 at rs_mongo1/shard1_mongo1:27017,shard1_mongo2:27017,shard1_mongo3:27017\\n data : 2.09MiB docs : 33426 chunks : 2\\n estimated data per chunk : 1.04MiB\\n estimated docs per chunk : 16713\\n \\nShard rs_mongo3 at rs_mongo3/shard3_mongo1:27017,shard3_mongo2:27017,shard3_mongo3:27017\\n data : 2.09MiB docs : 33379 chunks : 2\\n estimated data per chunk : 1.04MiB\\n estimated docs per chunk : 16689\\n \\nShard rs_mongo2 at rs_mongo2/shard2_mongo1:27017,shard2_mongo2:27017,shard2_mongo3:27017\\n data : 2.08MiB docs : 33195 chunks : 2\\n estimated data per chunk : 1.04MiB\\n estimated docs per chunk : 16597\\n \\nTotals\\n data : 6.28MiB docs : 100000 chunks : 6\\n Shard rs_mongo1 contains 33.42% data, 33.42% docs in cluster, avg obj size on shard : 65B\\n Shard rs_mongo3 contains 33.37% data, 33.37% docs in cluster, avg obj size on shard : 65B\\n Shard rs_mongo2 contains 33.19% data, 33.19% docs in cluster, avg \\n\\n```"},{"id":"mongodb-rs-docker-persistent-volume","metadata":{"permalink":"/mongodb-rs-docker-persistent-volume","source":"@site/blog/2021-09-18-mongodb-rs-docker-persistent-volume.md","title":"MongoDB Replicaset with Persistent Volume using Docker Compose","description":"In this article we will see the steps required to create and configure MongoDB replicaset containers on persistent volumes using Docker Compose. Compose was developed to define, configure and spin-up multi-container docker applications with single command, further reducing . Extensive usage of Docker with several container management quickly becomes cumbersome, Compose overcomes this problem and allows to easily handle multiple containers at once using YAML configuration docker-compose.yml","date":"2021-09-18T00:00:00.000Z","formattedDate":"September 18, 2021","tags":[{"label":"mongodb","permalink":"/tags/mongodb"},{"label":"docker","permalink":"/tags/docker"},{"label":"replicaset","permalink":"/tags/replicaset"},{"label":"persistent-volume","permalink":"/tags/persistent-volume"}],"readingTime":2.795,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"mongodb-rs-docker-persistent-volume","title":"MongoDB Replicaset with Persistent Volume using Docker Compose","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["mongodb","docker","replicaset","persistent-volume"]},"prevItem":{"title":"Configure Sharding in MongoDB on Docker Containers","permalink":"/sharding-mongo-docker"},"nextItem":{"title":"Create MongoDB Standalone and Replica Set containers using Docker","permalink":"/create-mongodb-docker"}},"content":"In this article we will see the steps required to create and configure MongoDB replicaset containers on **persistent volumes** using [Docker Compose](https://docs.docker.com/compose/). Compose was developed to define, configure and spin-up multi-container docker applications with single command, further reducing . Extensive usage of Docker with several container management quickly becomes cumbersome, Compose overcomes this problem and allows to easily handle multiple containers at once using YAML configuration `docker-compose.yml`\\n\\n\x3c!--truncate--\x3e\\n\\n## Docker Compose Steps\\n\\n### Step 1: System Configuration\\n\\nTo run Compose, make sure you have installed Compose on your local system where Docker is installed. The Compose setup and installation instructions can be found here.\\n\\n### Step 2: Ensure mongo_net network bridge is already existing\\n\\n```shell\\n$ docker network create mongo_net\\n$ docker network inspect mongo_net \\n```\\n### Step 3: Lets convert the below command as seen in previous blog post to docker-compose.yml. If you are new to Docker and drafting compose files try using composerize to convert docker run commands into compose YAML output\\n\\n```shell \\n$ docker run -d -p 20003:27017 --name mongo3 --network mongo_net mongo:4.4.9-rc0 mongod --replSet rs_mongo\\n```\\nThere are few additional attributes passed in the `docker-compose.yml`. The difference in the options passed in the command line above and `docker-compose.yml` is as below\\n\\n- image: custom image uploaded to docker hub with additional utilities installed on ubuntu build\\nhostname: container host name\\n- volumes: map directory on the host file system to manage and store container data. In the below YAML i use separate directory for all 3 MongoDB replicaset. This helps in creating persistent data store for docker containers and doesn\u2019t bloat the container runtime instance.\\n- Pass mongod configuration options through file mongod.conf\\n\\nCreate the below YAML compose file in your favourite editor, i have been using Visual Studio Code. Save the file as docker-compose.yml\\n\\n\\n```shell\\n$ code .\\n\\n``` \\n\\n```yaml\\n#version: \\"3.3\\"\\nservices:\\n mongo_1:\\n image: ivishalgandhi/mongo-custom:latest\\n hostname: mongo_1\\n container_name: mongo_1\\n volumes:\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_1/mongod.conf:/etc/mongod.conf\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_1/initdb.d/:/docker-entrypoint-initdb.d/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_1/data/db/:/data/db/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_1/log/:/var/log/mongodb/\\n ports:\\n - 20003:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\",\\"--replSet\\", \\"rs_mongo\\"]\\n network_mode: mongo_net\\n \\n mongo_2:\\n image: ivishalgandhi/mongo-custom:latest\\n hostname: mongo_2\\n container_name: mongo_2\\n volumes:\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_2/mongod.conf:/etc/mongod.conf\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_2/initdb.d/:/docker-entrypoint-initdb.d/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_2/data/db/:/data/db/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_2/log/:/var/log/mongodb/\\n ports:\\n - 20004:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\",\\"--replSet\\", \\"rs_mongo\\"]\\n network_mode: mongo_net\\n \\n mongo_3:\\n image: ivishalgandhi/mongo-custom:latest\\n hostname: mongo_3\\n container_name: mongo_3\\n volumes:\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_3/mongod.conf:/etc/mongod.conf\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_3/initdb.d/:/docker-entrypoint-initdb.d/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_3/data/db/:/data/db/\\n - /Users/vishalgandhi/learning/docker/mongo_replset/mongo_3/log/:/var/log/mongodb/\\n ports:\\n - 20005:27017\\n command: [\\"-f\\", \\"/etc/mongod.conf\\",\\"--replSet\\", \\"rs_mongo\\"]\\n network_mode: mongo_net\\n\\n```\\n\\n### Step 4: create mongod.conf\\n\\n```\\n$ code .\\n\\n```\\n\\n```YAML \\n# mongod.conf\\n \\n# for documentation of all options, see:\\n# http://docs.mongodb.org/manual/reference/configuration-options/\\n \\n# where to write logging data.\\nsystemLog:\\n destination: file\\n logAppend: true\\n path: /var/log/mongodb/mongod.log\\n \\n# Where and how to store data.\\nstorage:\\n dbPath: /data/db\\n journal:\\n enabled: true\\n engine: wiredTiger\\n \\n# network interfaces\\nnet:\\n port: 27017\\n bindIp: 127.0.0.1 \\n```\\n\\n### Step 5: Spin-up replicaset containers\\n\\n```shell \\n$ docker compose up -d\\n[+] Running 3/3\\n \u283f Container mongo_2 Created 0.2s\\n \u283f Container mongo_1 Created 0.2s\\n \u283f Container mongo_3 Created\\n```\\n\\n### Step 6: Initiate replicaset\\n\\n```shell\\n$ docker exec -it mongo_1 bash\\n\\nroot@mongo_1:/# mongo\\nrs_mongo:SECONDARY> rs.initiate(\\n {\\n _id: \u201crs_mongo\u201d,\\n version: 1,\\n members: [\\n { _id: 0, host : \u201cmongo_1:27017\u201d },\\n { _id: 1, host : \u201cmongo_2:27017\u201d },\\n { _id: 2, host : \u201cmongo_3:27017\u201d }\\n ]\\n }\\n)\\n \\nrs_mongo:SECONDARY> db.isMaster() \\n{\\n \\"topologyVersion\\" : {\\n \\"processId\\" : ObjectId(\\"614615744d54c08963ef67f6\\"),\\n \\"counter\\" : NumberLong(6)\\n },\\n \\"hosts\\" : [\\n \\"mongo_1:27017\\",\\n \\"mongo_2:27017\\",\\n \\"mongo_3:27017\\"\\n ],\\n \\"setName\\" : \\"rs_mongo\\",\\n \\"setVersion\\" : 1,\\n \\"ismaster\\" : true,\\n \\"secondary\\" : false,\\n \\"primary\\" : \\"mongo_2:27017\\",\\n \\"me\\" : \\"mongo_2:27017\\",\\n\\n```"},{"id":"create-mongodb-docker","metadata":{"permalink":"/create-mongodb-docker","source":"@site/blog/2021-09-12-create-mongodb-docker.md","title":"Create MongoDB Standalone and Replica Set containers using Docker","description":"Docker Containers offer easy setup, customization and scalability. In this article, i will walk you through how to use Docker to setup MongoDB standalone and replica set containers within minutes.","date":"2021-09-12T00:00:00.000Z","formattedDate":"September 12, 2021","tags":[{"label":"mongodb","permalink":"/tags/mongodb"},{"label":"containers","permalink":"/tags/containers"},{"label":"docker","permalink":"/tags/docker"},{"label":"mongo-replicaset","permalink":"/tags/mongo-replicaset"}],"readingTime":4.65,"hasTruncateMarker":true,"authors":[{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"}],"frontMatter":{"slug":"create-mongodb-docker","title":"Create MongoDB Standalone and Replica Set containers using Docker","authors":{"name":"Vishal Gandhi","url":"https://github.com/ivishalgandhi","image_url":"https://github.com/ivishalgandhi.png","imageURL":"https://github.com/ivishalgandhi.png"},"tags":["mongodb","containers","docker","mongo-replicaset"]},"prevItem":{"title":"MongoDB Replicaset with Persistent Volume using Docker Compose","permalink":"/mongodb-rs-docker-persistent-volume"}},"content":"Docker Containers offer easy setup, customization and scalability. In this article, i will walk you through how to use Docker to setup MongoDB standalone and replica set containers within minutes.\\n\\nThe article is divided in two parts, the first part is setting up the standalone MongoDB container and second part is setting up and grouping MongoDB containers as member of replica set with Docker.\\n\\nLet\u2019s get started.\\n\\n\x3c!--truncate--\x3e\\n\\n## System Configuration\\n\\nTo run this setup, Docker Engine is required to be installed on the system. Follow the official documentation to setup Docker Engine on your system.\\n\\n:::caution\\n\\nThe steps and configuration for both standalone and replica set is not to be used for production deployment. The intended use is only for setting up a environment to support learning of MongoDB.\\n\\n:::\\n\\n## Standalone MongoDB Setup\\n\\n* Pull the Docker MongoDB official image from Docker Hub. The following code snippet demonstrates pulling the docker MongoDB 4.4.9 release. To pull the MongoDB 5.0 latest release replace :4.4.9-rc0 with :latest tag\\n\\n```shell \\n\\n$ docker pull mongo:4.4.9-rc0 \\n\\n```\\n\\n* To check if the the image pull from Docker Hub was successful\\n\\n\\n```\\n\\n$ docker images \\nREPOSITORY TAG IMAGE ID CREATED SIZE\\nmongo 4.4.9-rc0 24599d6cde30 9 days ago 413MB\\nmongo latest 31299b956c79 10 days ago 642MB\\n\\n```\\n\\n* Lets start first standalone container \u2013 the below command starts MongoDB docker container with name mongo_449 in detached mode using the 4.4.9-rc0 image\\n\\n```shell\\n\\n$ docker run --name mongo_449 -d mongo:4.4.9-rc0\\n\\n```\\n\\n* List the container status and health by executing\\n\\n```shell\\n\\n$ docker container ls -a\\n\\nCONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\\n96e64ec525a2 24599d6cde30 \\"docker-entrypoint.s\u2026\\" 2 hours ago Up 33 minutes 27017/tcp mongo_449\\n\\n```\\n\\n* To run a command inside the container\\n * docker exec: interact with containers (running/up mode)\\n * -i : interactive STDIN open even if not attached to the container\\n * -t: pseudo TTY\\n\\n\\n* Connect to MongoDB daemon\\n\\n```shell\\nroot@96e64ec525a2:/# mongo\\n\\nMongoDB shell version v4.4.9-rc0\\nconnecting to: mongodb://127.0.0.1:27017/?compressors=disabled&gssapiServiceName=mongodb\\nImplicit session: session { \\"id\\" : UUID(\\"ac624a79-908b-4580-90ae-22d0a7aee07a\\") }\\nMongoDB server version: 4.4.9-rc0\\n\\n```\\n\\n* Install utilities. The utilities ping, systemctl, sudo installed in the containers can be used for troubleshooting during the setup of Docker containers.\\n\\n```shell\\n\\nroot@96e64ec525a2:/# apt-get install iputils-ping\u0335\\nroot@96e64ec525a2:/# apt-get install sudo \\nroot@96e64ec525a2:/# apt-get install systemctl\\n\\n```\\n\\nThis finishes the setup of standalone MongoDB Container. Now let\u2019s look at ReplicaSet setup.\\n\\n## Creating MongoDB ReplicaSet using Docker\\n\\nA replica set consists of a primary node together with two or more secondary nodes. It is recommended to group three or more nodes, with an odd number of total nodes. The primary node accepts all the write requests which are propagated synchronously or asynchronously to the secondary nodes. Below are the steps required to complete the replica set setup using Docker.\\n\\nCreate a new network(bridge) within Docker. The replica set containers will be mapped to the new network.\\n\\n```shell \\n$ docker network create mongo_net\\n$ docker network inspect mongo_net \\n[\\n {\\n \\"Name\\": \\"mongo_net\\",\\n \\"Id\\": \\"e2567806642a9245436371a9b9904c71fadae969fbd11a7bb8203e07976b1b2a\\",\\n \\"Created\\": \\"2021-09-11T00:36:33.989688708Z\\",\\n \\"Scope\\": \\"local\\",\\n \\"Driver\\": \\"bridge\\",\\n \\"EnableIPv6\\": false,\\n \\"IPAM\\": {\\n \\"Driver\\": \\"default\\",\\n \\"Options\\": {},\\n \\"Config\\": [\\n {\\n \\"Subnet\\": \\"172.18.0.0/16\\",\\n \\"Gateway\\": \\"172.18.0.1\\"\\n }\\n ]\\n },\\n...\\n]\\n```\\n\\n* Start 3 containers \u2013 Primary Secondary Secondary\\n * Break down of parameters docker run : start a new container\\n * `-d` : run the container in detached mode\\n * `-p 20001:27017` publish container port to the host and bind 27017 to 20001 on the host. This is useful if connecting mongo client like mongosh to container\\n * `--name` : name of the mongo container\\n * `-- network` : connect to user created network mongo_net\\n * `mongo:4.4.9-rc0` : Docker MongoDB image\\n * `mongod --replSet rs_mongo` : run the mongod daemon and add the container to replica set name rs_mongo\\n\\n```shell \\n$ docker run -d -p 20001:27017 --name mongo1 --network mongo_net mongo:4.4.9-rc0 mongod --replSet rs_mongo\\n$ docker run -d -p 20002:27017 --name mongo2 --network mongo_net mongo:4.4.9-rc0 mongod --replSet rs_mongo\\n$ docker run -d -p 20003:27017 --name mongo3 --network mongo_net mongo:4.4.9-rc0 mongod --replSet rs_mongo\\n```\\n\\n* Set up Replica set. Connect to one of the containers and run the below commands. The container that receives the initiate will pass on the configuration to other containers assigned as members.\\n\\n```js\\nrs_mongo [direct: primary] test_2> config = {\\n \\"_id\\" : \\"rs_mongo\\",\\n \\"members\\" : [\\n {\\n \\"_id\\" : 0,\\n \\"host\\" : \\"mongo1:27017\\"\\n },\\n {\\n \\"_id\\" : 1,\\n \\"host\\" : \\"mongo2:27017\\"\\n },\\n {\\n \\"_id\\" : 2,\\n \\"host\\" : \\"mongo3:27017\\"\\n }\\n ]\\n }\\n\\nrs_mongo [direct: primary] admin> rs.initiate(config)\\n\\n//Insert test data\\n\\nrs_mongo [direct: primary] admin> use test_2\\nrs_mongo [direct: primary] test_2> db.employees.insert({name: \\"vishal\\")\\n\\n//To read queries on secondary run setReadPref. \\nrs_mongo [direct: secondary] test_2>db.getMongo().setReadPref(\'secondary\')\\n\\nrs_mongo [direct: secondary] test_2> db.employees.find()\\n[\\n { _id: ObjectId(\\"613c99801ea796508e3c73f5\\"), name: \'vishal\' }\\n]\\n\\n```\\n\\n* Validate Replica Set Configuration\\n\\n```js\\nrs_mongo [direct: primary] test_2> db.printReplicationInfo()\\n\\nconfigured oplog size\\n\'557174 MB\'\\n---\\nlog length start to end\\n\'71372 secs (19.83 hrs)\'\\n---\\noplog first event time\\n\'Sat Sep 11 2021 15:47:21 GMT+0530 (India Standard Time)\'\\n---\\noplog last event time\\n\'Sun Sep 12 2021 11:36:53 GMT+0530 (India Standard Time)\'\\n---\\nnow\\n\'Sun Sep 12 2021 11:36:54 GMT+0530 (India Standard Time)\'\\n\\n\\nrs_mongo [direct: primary] test_2> rs.conf()\\n{\\n _id: \'rs_mongo\',\\n version: 1,\\n term: 1,\\n protocolVersion: Long(\\"1\\"),\\n writeConcernMajorityJournalDefault: true,\\n members: [\\n {\\n _id: 0,\\n host: \'mongo1:27017\',\\n arbiterOnly: false,\\n buildIndexes: true,\\n hidden: false,\\n priority: 1,\\n tags: {},\\n slaveDelay: Long(\\"0\\"),\\n votes: 1\\n },\\n {\\n _id: 1,\\n host: \'mongo2:27017\',\\n arbiterOnly: false,\\n buildIndexes: true,\\n hidden: false,\\n priority: 1,\\n tags: {},\\n slaveDelay: Long(\\"0\\"),\\n votes: 1\\n },\\n {\\n _id: 2,\\n host: \'mongo3:27017\',\\n arbiterOnly: false,\\n buildIndexes: true,\\n hidden: false,\\n priority: 1,\\n tags: {},\\n slaveDelay: Long(\\"0\\"),\\n votes: 1\\n }\\n\\n```\\nThat concludes this article."}]}')}}]); \ No newline at end of file diff --git a/assets/js/30e9b9ae.fdc0c8d1.js b/assets/js/30e9b9ae.41596ebf.js similarity index 98% rename from assets/js/30e9b9ae.fdc0c8d1.js rename to assets/js/30e9b9ae.41596ebf.js index 667aa92..c98c29c 100644 --- a/assets/js/30e9b9ae.fdc0c8d1.js +++ b/assets/js/30e9b9ae.41596ebf.js @@ -1 +1 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[6982],{5388:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var a=n(7462),s=(n(7294),n(3905));n(1839);const o={slug:"local-home-lab",title:"Local Home Lab DNS Setup with DNSMasq and NGINX",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["reverse-proxy","NGINX","DNSMASQ","Lab"]},r=void 0,l={permalink:"/local-home-lab",source:"@site/blog/2022-04-10-local-home-lab.md",title:"Local Home Lab DNS Setup with DNSMasq and NGINX",description:"As I explored and set up an increased number of FOSS software using containers(Docker and LXD) and virtual machines(Multipass) in my home lab environment, I realized the difficulty in remembering the different ports the applications and containers are running. The solution to address this problem was to have a Domain Name System for the local network, which works to resolve local and external addresses with a reverse proxy to redirect calls based on DNS resolution.",date:"2022-04-10T00:00:00.000Z",formattedDate:"April 10, 2022",tags:[{label:"reverse-proxy",permalink:"/tags/reverse-proxy"},{label:"NGINX",permalink:"/tags/nginx"},{label:"DNSMASQ",permalink:"/tags/dnsmasq"},{label:"Lab",permalink:"/tags/lab"}],readingTime:3.34,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"local-home-lab",title:"Local Home Lab DNS Setup with DNSMasq and NGINX",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["reverse-proxy","NGINX","DNSMASQ","Lab"]},prevItem:{title:"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO",permalink:"/percona-mongo-replicaset-minio"},nextItem:{title:"Configure Sharding in MongoDB on Docker Containers",permalink:"/sharding-mongo-docker"}},i={authorsImageUrls:[void 0]},p=[{value:"Step 1: Disable systemd-resolve which binds to port 53, the default port for DNSMasq",id:"step-1-disable-systemd-resolve-which-binds-to-port-53-the-default-port-for-dnsmasq",level:3},{value:"Step 2: Install DNSUtils, DNSMasq",id:"step-2-install-dnsutils-dnsmasq",level:3},{value:"Step 3: Create the DNSMasq configuration file",id:"step-3-create-the-dnsmasq-configuration-file",level:3},{value:"Step 4: Add container DNS records in the file./etc/hosts. The records in the hosts file will be used by DNSMasq for client responses",id:"step-4-add-container-dns-records-in-the-fileetchosts-the-records-in-the-hosts-file-will-be-used-by-dnsmasq-for-client-responses",level:3},{value:"Step 5: Restart DNSMasq service",id:"step-5-restart-dnsmasq-service",level:3},{value:"Step 6: Install NGINX",id:"step-6-install-nginx",level:3},{value:"Step 6: To enable reverse proxy feature, create a new NGINX configuration file in sites-enabled directory",id:"step-6-to-enable-reverse-proxy-feature-create-a-new-nginx-configuration-file-in-sites-enabled-directory",level:3},{value:"Step 7 reload NGINX for the configuration to take into effect",id:"step-7-reload-nginx-for-the-configuration-to-take-into-effect",level:3}],d={toc:p};function c(e){let{components:t,...o}=e;return(0,s.kt)("wrapper",(0,a.Z)({},d,o,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("p",null,"As I explored and set up an increased number of FOSS software using containers(Docker and LXD) and virtual machines(Multipass) in my home lab environment, I realized the difficulty in remembering the different ports the applications and containers are running. The solution to address this problem was to have a Domain Name System for the local network, which works to resolve local and external addresses with a reverse proxy to redirect calls based on DNS resolution."),(0,s.kt)("p",null,"The below command lists the Docker Containers and ports the container are running on, the requirement is to create a domain for a home setup with domain homelab.net and access the containerized applications with appsmith.homelab.net; typesense.homelab.net; excalidraw.homelab.net"),(0,s.kt)("p",null,"Let\u2019s get the list of docker containers with port numbers"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},'# get container names and port numbers\n$ docker container ls --format "table {{.ID}}\\t{{.Names}}\\t{{.Ports}}" -a\n\n\nCONTAINER ID NAMES PORTS\ncbb2ac402270 appsmith 0.0.0.0:9001->9001/tcp, 0.0.0.0:70->80/tcp, 0.0.0.0:444->443/tcp\nc9875323b989 typesense_typesense-1_1 0.0.0.0:8108->8108/tcp\nc453288c8496 excalidraw 0.0.0.0:3001->80/tcp\n5be5d33f1f50 k8s-control-plane 127.0.0.1:34589->6443/tcp\n4140d2fbf7d5 mysql_nocodb_1 0.0.0.0:8082->8080/tcp\ne7310461bee9 mysql_root_db_1 3306/tcp, 33060/tcp\n9b56c33d45d5 meilisearch_ms_1 0.0.0.0:7700->7700/tcp\n9ac6a0e16b0e mongo2 0.0.0.0:20002->27017/tcp\n2aaf01d2233f mongo1 0.0.0.0:20001->27017/tcp\n860b521f97dc mongo3 0.0.0.0:20003->27017/tcp\nd8ad1ec3cab8 rethinkdb_rethinkdb_1 0.0.0.0:28015->28015/tcp, 0.0.0.0:29015->29015/tcp, 0.0.0.0:8081->8080/tcp\n')),(0,s.kt)("p",null,"The containers and applications running on the local home network as shown above do not have a public domain name, the option was to look for setting up a DNS server with ",(0,s.kt)("a",{parentName:"p",href:"https://thekelleys.org.uk/dnsmasq/doc.html"},"DNSMasq"),", and a reverse proxy using ",(0,s.kt)("a",{parentName:"p",href:"https://www.nginx.com/"},"NGINX"),". The containers may not be the only use case scenario for local DNS servers with ",(0,s.kt)("a",{parentName:"p",href:"https://thekelleys.org.uk/dnsmasq/doc.html"},"DNSMasq"),", there could be many others like accessing a local file share across devices; accessing applications from a mobile device, and sharing a printer."),(0,s.kt)("p",null,(0,s.kt)("a",{parentName:"p",href:"https://thekelleys.org.uk/dnsmasq/doc.html"},"DNSMasq")," - Dnsmasq provides network infrastructure for small networks: DNS, DHCP, router advertisement, and network boot. It is designed to be lightweight and has a small footprint, suitable for resource-constrained routers and firewalls."),(0,s.kt)("p",null,(0,s.kt)("a",{parentName:"p",href:"https://www.nginx.com/"},"NGINX")," - Reverse Proxy \u2013 A reverse proxy provides an additional level of abstraction and control to ensure the smooth flow of network traffic between clients and servers."),(0,s.kt)("p",null,(0,s.kt)("strong",{parentName:"p"},"Let us get started with the implementation steps for DNSMasq and NGINX. The below steps are performed on Ubuntu 20.04 (Debian-based distro).")),(0,s.kt)("p",null,"Before starting the installation of DNSMasq, "),(0,s.kt)("h3",{id:"step-1-disable-systemd-resolve-which-binds-to-port-53-the-default-port-for-dnsmasq"},"Step 1: Disable systemd-resolve which binds to port 53, the default port for DNSMasq"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},"\n sudo systemctl stop systemd-resolved\n sudo systemctl disable systemd-resolved\n\n")),(0,s.kt)("h3",{id:"step-2-install-dnsutils-dnsmasq"},"Step 2: Install DNSUtils, DNSMasq"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},"sudo apt update && sudo apt install dnsmasq && sudo apt install dnsutils\n")),(0,s.kt)("h3",{id:"step-3-create-the-dnsmasq-configuration-file"},"Step 3: Create the DNSMasq configuration file"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},'$ dnsmasq_conf="no-dhcp-interface=enp2s0f0\nbogus-priv\ndomain=homelab.net\nexpand-hosts\nlocal=/homelab.net/\ndomain-needed\nno-resolv\nno-poll\nserver=8.8.8.8\nserver=8.8.4.4"\n\n$ sudo echo -e "$dnsmasq_conf" > /etc/dnsmasq.d/home-lab.net \n\n$ sudo systemctl restart dnsmasq\n')),(0,s.kt)("h3",{id:"step-4-add-container-dns-records-in-the-fileetchosts-the-records-in-the-hosts-file-will-be-used-by-dnsmasq-for-client-responses"},"Step 4: Add container DNS records in the file./etc/hosts. The records in the hosts file will be used by DNSMasq for client responses"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"}," $ sudo nano /etc/hosts \n # add the below records to the hosts file\n #Container DNS records\n # appsmith\n 192.168.20.113 appsmith\n # excalidraw\n 192.168.20.113 excalidraw\n # typesense\n 192.168.20.113 typesense\n")),(0,s.kt)("h3",{id:"step-5-restart-dnsmasq-service"},"Step 5: Restart DNSMasq service"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},"$ sudo systemctl restart dnsmasq.service\n")),(0,s.kt)("h3",{id:"step-6-install-nginx"},"Step 6: Install NGINX"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},"$ sudo apt update && sudo apt install nginx\n")),(0,s.kt)("h3",{id:"step-6-to-enable-reverse-proxy-feature-create-a-new-nginx-configuration-file-in-sites-enabled-directory"},"Step 6: To enable reverse proxy feature, create a new NGINX configuration file in ",(0,s.kt)("inlineCode",{parentName:"h3"},"sites-enabled")," directory"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"}," $ sudo nano /etc/nginx/sites-enabled/homelab.conf\n server {\n listen 80;\n listen [::]:80;\n server_name typesense.homelab.net;\n location / {\n proxy_bind 192.168.20.113;\n proxy_pass http://localhost:3000;\n }\n }\n server {\n listen 80;\n listen [::]:80;\n server_name appsmith.homelab.net;\n location / {\n proxy_bind 192.168.20.113;\n proxy_pass http://localhost:70;\n }\n\n }\n server {\n listen 80;\n listen [::]:80;\n server_name excalidraw.homelab.net;\n location / {\n proxy_bind 192.168.20.113;\n proxy_pass http://localhost:3001;\n }\n\n }\n")),(0,s.kt)("p",null,"The ",(0,s.kt)("inlineCode",{parentName:"p"},"proxy_pass")," argument will forward all incoming client requests to app.homelab.net to the respective app. The IP address and port number can be easily changed."),(0,s.kt)("h3",{id:"step-7-reload-nginx-for-the-configuration-to-take-into-effect"},"Step 7 reload NGINX for the configuration to take into effect"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},"$ sudo systemctl reload nginx\n")),(0,s.kt)("p",null,"After a successful implementation, we will be able to access container applications using domain URLs as seen in the below screenshot with three panes first pane is appsmith ; second pane is excalidraw and third pane is typesense."),(0,s.kt)("p",null,(0,s.kt)("img",{alt:"local-home-lab-snapshot",src:n(7496).Z,width:"2047",height:"1111"})))}c.isMDXComponent=!0},7496:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/2022-07-19-23-09-57-8b3ba580a484e53a7d25c704b6bccf60.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[6982],{5388:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var a=n(7462),s=(n(7294),n(3905));n(1839);const o={slug:"local-home-lab",title:"Local Home Lab DNS Setup with DNSMasq and NGINX",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["reverse-proxy","NGINX","DNSMASQ","Lab"]},r=void 0,l={permalink:"/local-home-lab",source:"@site/blog/2022-04-10-local-home-lab.md",title:"Local Home Lab DNS Setup with DNSMasq and NGINX",description:"As I explored and set up an increased number of FOSS software using containers(Docker and LXD) and virtual machines(Multipass) in my home lab environment, I realized the difficulty in remembering the different ports the applications and containers are running. The solution to address this problem was to have a Domain Name System for the local network, which works to resolve local and external addresses with a reverse proxy to redirect calls based on DNS resolution.",date:"2022-04-10T00:00:00.000Z",formattedDate:"April 10, 2022",tags:[{label:"reverse-proxy",permalink:"/tags/reverse-proxy"},{label:"NGINX",permalink:"/tags/nginx"},{label:"DNSMASQ",permalink:"/tags/dnsmasq"},{label:"Lab",permalink:"/tags/lab"}],readingTime:3.34,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"local-home-lab",title:"Local Home Lab DNS Setup with DNSMasq and NGINX",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["reverse-proxy","NGINX","DNSMASQ","Lab"]},prevItem:{title:"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO",permalink:"/percona-mongo-replicaset-minio"},nextItem:{title:"Configure Sharding in MongoDB on Docker Containers",permalink:"/sharding-mongo-docker"}},i={authorsImageUrls:[void 0]},p=[{value:"Step 1: Disable systemd-resolve which binds to port 53, the default port for DNSMasq",id:"step-1-disable-systemd-resolve-which-binds-to-port-53-the-default-port-for-dnsmasq",level:3},{value:"Step 2: Install DNSUtils, DNSMasq",id:"step-2-install-dnsutils-dnsmasq",level:3},{value:"Step 3: Create the DNSMasq configuration file",id:"step-3-create-the-dnsmasq-configuration-file",level:3},{value:"Step 4: Add container DNS records in the file./etc/hosts. The records in the hosts file will be used by DNSMasq for client responses",id:"step-4-add-container-dns-records-in-the-fileetchosts-the-records-in-the-hosts-file-will-be-used-by-dnsmasq-for-client-responses",level:3},{value:"Step 5: Restart DNSMasq service",id:"step-5-restart-dnsmasq-service",level:3},{value:"Step 6: Install NGINX",id:"step-6-install-nginx",level:3},{value:"Step 6: To enable reverse proxy feature, create a new NGINX configuration file in sites-enabled directory",id:"step-6-to-enable-reverse-proxy-feature-create-a-new-nginx-configuration-file-in-sites-enabled-directory",level:3},{value:"Step 7 reload NGINX for the configuration to take into effect",id:"step-7-reload-nginx-for-the-configuration-to-take-into-effect",level:3}],d={toc:p};function c(e){let{components:t,...o}=e;return(0,s.kt)("wrapper",(0,a.Z)({},d,o,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("p",null,"As I explored and set up an increased number of FOSS software using containers(Docker and LXD) and virtual machines(Multipass) in my home lab environment, I realized the difficulty in remembering the different ports the applications and containers are running. The solution to address this problem was to have a Domain Name System for the local network, which works to resolve local and external addresses with a reverse proxy to redirect calls based on DNS resolution."),(0,s.kt)("p",null,"The below command lists the Docker Containers and ports the container are running on, the requirement is to create a domain for a home setup with domain homelab.net and access the containerized applications with appsmith.homelab.net; typesense.homelab.net; excalidraw.homelab.net"),(0,s.kt)("p",null,"Let\u2019s get the list of docker containers with port numbers"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},'# get container names and port numbers\n$ docker container ls --format "table {{.ID}}\\t{{.Names}}\\t{{.Ports}}" -a\n\n\nCONTAINER ID NAMES PORTS\ncbb2ac402270 appsmith 0.0.0.0:9001->9001/tcp, 0.0.0.0:70->80/tcp, 0.0.0.0:444->443/tcp\nc9875323b989 typesense_typesense-1_1 0.0.0.0:8108->8108/tcp\nc453288c8496 excalidraw 0.0.0.0:3001->80/tcp\n5be5d33f1f50 k8s-control-plane 127.0.0.1:34589->6443/tcp\n4140d2fbf7d5 mysql_nocodb_1 0.0.0.0:8082->8080/tcp\ne7310461bee9 mysql_root_db_1 3306/tcp, 33060/tcp\n9b56c33d45d5 meilisearch_ms_1 0.0.0.0:7700->7700/tcp\n9ac6a0e16b0e mongo2 0.0.0.0:20002->27017/tcp\n2aaf01d2233f mongo1 0.0.0.0:20001->27017/tcp\n860b521f97dc mongo3 0.0.0.0:20003->27017/tcp\nd8ad1ec3cab8 rethinkdb_rethinkdb_1 0.0.0.0:28015->28015/tcp, 0.0.0.0:29015->29015/tcp, 0.0.0.0:8081->8080/tcp\n')),(0,s.kt)("p",null,"The containers and applications running on the local home network as shown above do not have a public domain name, the option was to look for setting up a DNS server with ",(0,s.kt)("a",{parentName:"p",href:"https://thekelleys.org.uk/dnsmasq/doc.html"},"DNSMasq"),", and a reverse proxy using ",(0,s.kt)("a",{parentName:"p",href:"https://www.nginx.com/"},"NGINX"),". The containers may not be the only use case scenario for local DNS servers with ",(0,s.kt)("a",{parentName:"p",href:"https://thekelleys.org.uk/dnsmasq/doc.html"},"DNSMasq"),", there could be many others like accessing a local file share across devices; accessing applications from a mobile device, and sharing a printer."),(0,s.kt)("p",null,(0,s.kt)("a",{parentName:"p",href:"https://thekelleys.org.uk/dnsmasq/doc.html"},"DNSMasq")," - Dnsmasq provides network infrastructure for small networks: DNS, DHCP, router advertisement, and network boot. It is designed to be lightweight and has a small footprint, suitable for resource-constrained routers and firewalls."),(0,s.kt)("p",null,(0,s.kt)("a",{parentName:"p",href:"https://www.nginx.com/"},"NGINX")," - Reverse Proxy \u2013 A reverse proxy provides an additional level of abstraction and control to ensure the smooth flow of network traffic between clients and servers."),(0,s.kt)("p",null,(0,s.kt)("strong",{parentName:"p"},"Let us get started with the implementation steps for DNSMasq and NGINX. The below steps are performed on Ubuntu 20.04 (Debian-based distro).")),(0,s.kt)("p",null,"Before starting the installation of DNSMasq, "),(0,s.kt)("h3",{id:"step-1-disable-systemd-resolve-which-binds-to-port-53-the-default-port-for-dnsmasq"},"Step 1: Disable systemd-resolve which binds to port 53, the default port for DNSMasq"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},"\n sudo systemctl stop systemd-resolved\n sudo systemctl disable systemd-resolved\n\n")),(0,s.kt)("h3",{id:"step-2-install-dnsutils-dnsmasq"},"Step 2: Install DNSUtils, DNSMasq"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},"sudo apt update && sudo apt install dnsmasq && sudo apt install dnsutils\n")),(0,s.kt)("h3",{id:"step-3-create-the-dnsmasq-configuration-file"},"Step 3: Create the DNSMasq configuration file"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},'$ dnsmasq_conf="no-dhcp-interface=enp2s0f0\nbogus-priv\ndomain=homelab.net\nexpand-hosts\nlocal=/homelab.net/\ndomain-needed\nno-resolv\nno-poll\nserver=8.8.8.8\nserver=8.8.4.4"\n\n$ sudo echo -e "$dnsmasq_conf" > /etc/dnsmasq.d/home-lab.net \n\n$ sudo systemctl restart dnsmasq\n')),(0,s.kt)("h3",{id:"step-4-add-container-dns-records-in-the-fileetchosts-the-records-in-the-hosts-file-will-be-used-by-dnsmasq-for-client-responses"},"Step 4: Add container DNS records in the file./etc/hosts. The records in the hosts file will be used by DNSMasq for client responses"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"}," $ sudo nano /etc/hosts \n # add the below records to the hosts file\n #Container DNS records\n # appsmith\n 192.168.20.113 appsmith\n # excalidraw\n 192.168.20.113 excalidraw\n # typesense\n 192.168.20.113 typesense\n")),(0,s.kt)("h3",{id:"step-5-restart-dnsmasq-service"},"Step 5: Restart DNSMasq service"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},"$ sudo systemctl restart dnsmasq.service\n")),(0,s.kt)("h3",{id:"step-6-install-nginx"},"Step 6: Install NGINX"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},"$ sudo apt update && sudo apt install nginx\n")),(0,s.kt)("h3",{id:"step-6-to-enable-reverse-proxy-feature-create-a-new-nginx-configuration-file-in-sites-enabled-directory"},"Step 6: To enable reverse proxy feature, create a new NGINX configuration file in ",(0,s.kt)("inlineCode",{parentName:"h3"},"sites-enabled")," directory"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"}," $ sudo nano /etc/nginx/sites-enabled/homelab.conf\n server {\n listen 80;\n listen [::]:80;\n server_name typesense.homelab.net;\n location / {\n proxy_bind 192.168.20.113;\n proxy_pass http://localhost:3000;\n }\n }\n server {\n listen 80;\n listen [::]:80;\n server_name appsmith.homelab.net;\n location / {\n proxy_bind 192.168.20.113;\n proxy_pass http://localhost:70;\n }\n\n }\n server {\n listen 80;\n listen [::]:80;\n server_name excalidraw.homelab.net;\n location / {\n proxy_bind 192.168.20.113;\n proxy_pass http://localhost:3001;\n }\n\n }\n")),(0,s.kt)("p",null,"The ",(0,s.kt)("inlineCode",{parentName:"p"},"proxy_pass")," argument will forward all incoming client requests to app.homelab.net to the respective app. The IP address and port number can be easily changed."),(0,s.kt)("h3",{id:"step-7-reload-nginx-for-the-configuration-to-take-into-effect"},"Step 7 reload NGINX for the configuration to take into effect"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-shell"},"$ sudo systemctl reload nginx\n")),(0,s.kt)("p",null,"After a successful implementation, we will be able to access container applications using domain URLs as seen in the below screenshot with three panes first pane is appsmith ; second pane is excalidraw and third pane is typesense."),(0,s.kt)("p",null,(0,s.kt)("img",{alt:"local-home-lab-snapshot",src:n(2268).Z,width:"2047",height:"1111"})))}c.isMDXComponent=!0},2268:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/2022-07-19-23-09-57-8b3ba580a484e53a7d25c704b6bccf60.png"}}]); \ No newline at end of file diff --git a/assets/js/4f978119.1e042cd5.js b/assets/js/4f978119.1e042cd5.js new file mode 100644 index 0000000..66b68a6 --- /dev/null +++ b/assets/js/4f978119.1e042cd5.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[7221],{7666:e=>{e.exports=JSON.parse('{"permalink":"/tags/career","page":1,"postsPerPage":10,"totalPages":1,"totalCount":1,"blogDescription":"Blog","blogTitle":"Blog"}')}}]); \ No newline at end of file diff --git a/assets/js/80fa35bf.9349332e.js b/assets/js/80fa35bf.9349332e.js new file mode 100644 index 0000000..35e45c3 --- /dev/null +++ b/assets/js/80fa35bf.9349332e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[9909],{114:s=>{s.exports=JSON.parse('{"label":"essential-skills","permalink":"/tags/essential-skills","allTagsPath":"/tags","count":1}')}}]); \ No newline at end of file diff --git a/assets/js/814f3328.f325de7c.js b/assets/js/814f3328.f325de7c.js new file mode 100644 index 0000000..00798d0 --- /dev/null +++ b/assets/js/814f3328.f325de7c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[2535],{5641:e=>{e.exports=JSON.parse('{"title":"All posts","items":[{"title":"Essential Skills and Tools for Aspiring Software Developers and Database Engineers","permalink":"/essential-skills"},{"title":"My favorite Open Source Projects in 2022","permalink":"/fav-open-source-repo"},{"title":"Documentation as a code","permalink":"/doc-as-code"},{"title":"Understanding REST API Design Rules","permalink":"/rest-api-design-rules"},{"title":"Understanding MongoDB Replicasets and Write Concern - Part 1","permalink":"/mongodb-replicaset-write-concern-read-pref"},{"title":"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI","permalink":"/docker-sbom"},{"title":"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO","permalink":"/percona-mongo-replicaset-minio"},{"title":"Local Home Lab DNS Setup with DNSMasq and NGINX","permalink":"/local-home-lab"},{"title":"Configure Sharding in MongoDB on Docker Containers","permalink":"/sharding-mongo-docker"},{"title":"MongoDB Replicaset with Persistent Volume using Docker Compose","permalink":"/mongodb-rs-docker-persistent-volume"},{"title":"Create MongoDB Standalone and Replica Set containers using Docker","permalink":"/create-mongodb-docker"}]}')}}]); \ No newline at end of file diff --git a/assets/js/814f3328.ff310eb0.js b/assets/js/814f3328.ff310eb0.js deleted file mode 100644 index 7b886fc..0000000 --- a/assets/js/814f3328.ff310eb0.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[2535],{5641:e=>{e.exports=JSON.parse('{"title":"All posts","items":[{"title":"My favorite Open Source Projects in 2022","permalink":"/fav-open-source-repo"},{"title":"Documentation as a code","permalink":"/doc-as-code"},{"title":"Understanding REST API Design Rules","permalink":"/rest-api-design-rules"},{"title":"Understanding MongoDB Replicasets and Write Concern - Part 1","permalink":"/mongodb-replicaset-write-concern-read-pref"},{"title":"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI","permalink":"/docker-sbom"},{"title":"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO","permalink":"/percona-mongo-replicaset-minio"},{"title":"Local Home Lab DNS Setup with DNSMasq and NGINX","permalink":"/local-home-lab"},{"title":"Configure Sharding in MongoDB on Docker Containers","permalink":"/sharding-mongo-docker"},{"title":"MongoDB Replicaset with Persistent Volume using Docker Compose","permalink":"/mongodb-rs-docker-persistent-volume"},{"title":"Create MongoDB Standalone and Replica Set containers using Docker","permalink":"/create-mongodb-docker"}]}')}}]); \ No newline at end of file diff --git a/assets/js/85e21fe3.11295a91.js b/assets/js/85e21fe3.11295a91.js new file mode 100644 index 0000000..c8dc312 --- /dev/null +++ b/assets/js/85e21fe3.11295a91.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[1698],{3520:e=>{e.exports=JSON.parse('{"label":"career","permalink":"/tags/career","allTagsPath":"/tags","count":1}')}}]); \ No newline at end of file diff --git a/assets/js/878d2fef.dd2cb0c2.js b/assets/js/878d2fef.dd2cb0c2.js new file mode 100644 index 0000000..a9cf917 --- /dev/null +++ b/assets/js/878d2fef.dd2cb0c2.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[1682],{1384:e=>{e.exports=JSON.parse('{"permalink":"/tags/fundamentals","page":1,"postsPerPage":10,"totalPages":1,"totalCount":1,"blogDescription":"Blog","blogTitle":"Blog"}')}}]); \ No newline at end of file diff --git a/assets/js/972379d3.30ec5470.js b/assets/js/972379d3.30ec5470.js new file mode 100644 index 0000000..987cbed --- /dev/null +++ b/assets/js/972379d3.30ec5470.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[7909],{7094:a=>{a.exports=JSON.parse('{"label":"fundamentals","permalink":"/tags/fundamentals","allTagsPath":"/tags","count":1}')}}]); \ No newline at end of file diff --git a/assets/js/99797eb7.b7929051.js b/assets/js/99797eb7.a1cbd8c7.js similarity index 98% rename from assets/js/99797eb7.b7929051.js rename to assets/js/99797eb7.a1cbd8c7.js index 59085c2..149484c 100644 --- a/assets/js/99797eb7.b7929051.js +++ b/assets/js/99797eb7.a1cbd8c7.js @@ -1 +1 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[2372],{2822:(e,n,a)=>{a.r(n),a.d(n,{assets:()=>l,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var o=a(7462),t=(a(7294),a(3905));a(1839);const r={slug:"percona-mongo-replicaset-minio",title:"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["mongodb","containers","docker","s3","minio","pbm"]},i=void 0,s={permalink:"/percona-mongo-replicaset-minio",source:"@site/blog/2022-05-29-percona-mongo-replicaset-minio.md",title:"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO",description:"In this blog post, i will walk you through the steps required to containerize Percona Server for MongoDB, Percona Backup Manager, and Agent from source and configure cloud-native S3(Simple Storage Service) compatible distributed object storage MINIO to backup and restore Percona MongoDB snapshot backups.",date:"2022-05-29T00:00:00.000Z",formattedDate:"May 29, 2022",tags:[{label:"mongodb",permalink:"/tags/mongodb"},{label:"containers",permalink:"/tags/containers"},{label:"docker",permalink:"/tags/docker"},{label:"s3",permalink:"/tags/s-3"},{label:"minio",permalink:"/tags/minio"},{label:"pbm",permalink:"/tags/pbm"}],readingTime:4.205,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"percona-mongo-replicaset-minio",title:"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["mongodb","containers","docker","s3","minio","pbm"]},prevItem:{title:"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI",permalink:"/docker-sbom"},nextItem:{title:"Local Home Lab DNS Setup with DNSMasq and NGINX",permalink:"/local-home-lab"}},l={authorsImageUrls:[void 0]},p=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Steps",id:"steps",level:2}],m={toc:p};function c(e){let{components:n,...r}=e;return(0,t.kt)("wrapper",(0,o.Z)({},m,r,{components:n,mdxType:"MDXLayout"}),(0,t.kt)("p",null,"In this blog post, i will walk you through the steps required to containerize ",(0,t.kt)("a",{parentName:"p",href:"https://github.com/percona/percona-server-mongodb"},"Percona Server for MongoDB"),", ",(0,t.kt)("a",{parentName:"p",href:"https://github.com/percona/percona-backup-mongodb"},"Percona Backup Manager"),", and Agent from source and configure cloud-native S3(",(0,t.kt)("strong",{parentName:"p"},"S"),"imple ",(0,t.kt)("strong",{parentName:"p"},"S"),"torage ",(0,t.kt)("strong",{parentName:"p"},"S"),"ervice) compatible distributed object storage ",(0,t.kt)("a",{parentName:"p",href:"https://min.io/"},"MINIO")," to backup and restore Percona MongoDB snapshot backups."),(0,t.kt)("p",null,(0,t.kt)("img",{src:a(8415).Z,width:"834",height:"433"})),(0,t.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,t.kt)("p",null,"Ensure the below binaries are installed before starting the setup and configuration"),(0,t.kt)("p",null,(0,t.kt)("a",{parentName:"p",href:"https://www.docker.com/get-started/"},"Docker")," or ",(0,t.kt)("a",{parentName:"p",href:"https://podman.io/"},"Podman")," to containerize Percona MongoDB replicaset and PBM Agent\n",(0,t.kt)("a",{parentName:"p",href:"https://docs.docker.com/compose/install/"},"Docker Compose"),"\n",(0,t.kt)("a",{parentName:"p",href:"https://go.dev/learn/"},"Golang")," compiler \u2013 Build Percona Backup Manager binaries\n",(0,t.kt)("a",{parentName:"p",href:"https://www.portainer.io/"},"Portainer")," (Optional) \u2013 Intuitive UI for container configuration and monitoring\nLet us perform the below steps to set up PSMDB Replicaset; PBM Agent; Minio, S3 compatible bucket, and PBM configuration to perform backups and restores from the bucket."),(0,t.kt)("h2",{id:"steps"},"Steps"),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Create the Docker environment file with Docker Image, tag, port, and replicaset information. Save the file as .env in the working directory")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"MONGODB_IMAGE=percona/percona-server-mongodb\nMONGODB_VERSION=5.0\nMONGO1_PORT=0.0.0.0:15000\nMONGO2_PORT=0.0.0.0:15001\nMONGO3_PORT=0.0.0.0:15002\nMONGODB_PORT=27017\nMONGODB_DOCKER_NETWORK=mongo_net\nRS_NAME=rs1\n\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Create keyFile , Dockerfile and download percona-backup-manager source code in the working directory")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"$ git clone https://github.com/percona/percona-backup-mongodb.git\n\nARG MONGODB_VERSION\nARG MONGODB_IMAGE\nFROM ${MONGODB_IMAGE}:${MONGODB_VERSION}\nUSER root\nCOPY keyFile /opt/keyFile\nRUN chown mongodb /opt/keyFile && chmod 400 /opt/keyFile && mkdir -p /home/mongodb/ && chown mongodb /home/mongodb\nUSER mongodb\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Create Docker Compose file")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-YAML"},'version: "3.8"\nservices:\n rs101:\n build:\n dockerfile: Dockerfile\n context: /home/vishal/dev/psmdb\n args:\n - MONGODB_VERSION=${MONGODB_VERSION}\n - MONGODB_IMAGE=${MONGODB_IMAGE}\n hostname: rs101\n labels:\n - "com.percona.pbm.app=mongod"\n environment:\n - REPLSET_NAME=rs1\n - MONGO_USER=dba\n - BACKUP_USER=bcp\n - MONGO_PASS=test1234\n ports:\n - "${MONGO1_PORT}:${MONGODB_PORT}"\n # command: mongod --replSet rs1 --port ${MONGO1_PORT}:27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\n command: ["--replSet", "${RS_NAME}", "--bind_ip_all", "--storageEngine", "wiredTiger" , "--keyFile", "/opt/keyFile"]\n volumes:\n - data-rs101:/data/db\n - ./scripts/start.sh:/opt/start.sh\n rs102:\n build:\n dockerfile: Dockerfile\n context: /home/vishal/dev/psmdb\n args:\n - MONGODB_VERSION=${MONGODB_VERSION}\n - MONGODB_IMAGE=${MONGODB_IMAGE}\n hostname: rs102\n labels:\n - "com.percona.pbm.app=mongod"\n # command: mongod --replSet rs1 --port 27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\n ports:\n - "${MONGO2_PORT}:${MONGODB_PORT}"\n command: ["--replSet", "${RS_NAME}", "--bind_ip_all", "--storageEngine", "wiredTiger" , "--keyFile", "/opt/keyFile"]\n volumes:\n - data-rs102:/data/db\n rs103:\n build:\n dockerfile: Dockerfile\n context: /home/vishal/dev/psmdb\n args:\n - MONGODB_VERSION=${MONGODB_VERSION}\n - MONGODB_IMAGE=${MONGODB_IMAGE}\n hostname: rs103\n labels:\n - "com.percona.pbm.app=mongod"\n # command: mongod --replSet rs1 --port 27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\n ports:\n - "${MONGO3_PORT}:${MONGODB_PORT}"\n command: ["--replSet", "${RS_NAME}", "--bind_ip_all", "--storageEngine", "wiredTiger" , "--keyFile", "/opt/keyFile"]\n volumes:\n - data-rs103:/data/db\n agent-rs101:\n container_name: "pbmagent_rs101"\n user: "1001"\n labels:\n - "com.percona.pbm.app=agent"\n - "com.percona.pbm.agent.rs=rs1"\n environment:\n - "PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs101:27017"\n build:\n labels:\n - "com.percona.pbm.app=agent"\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\n context: /home/vishal/open-source/percona-backup-mongodb/\n args:\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\n volumes:\n - ./conf:/etc/pbm\n - ./backups:/opt/backups\n - data-rs101:/data/db\n command: pbm-agent\n cap_add:\n - NET_ADMIN\n agent-rs102:\n container_name: "pbmagent_rs102"\n user: "1001"\n labels:\n - "com.percona.pbm.app=agent"\n - "com.percona.pbm.agent.rs=rs1"\n environment:\n - "PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs102:27017"\n build:\n labels:\n - "com.percona.pbm.app=agent"\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\n context: /home/vishal/open-source/percona-backup-mongodb/\n args:\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\n volumes:\n - ./conf:/etc/pbm\n - ./backups:/opt/backups\n - data-rs102:/data/db\n command: pbm-agent\n cap_add:\n - NET_ADMIN\n agent-rs103:\n container_name: "pbmagent_rs103"\n user: "1001"\n labels:\n - "com.percona.pbm.app=agent"\n - "com.percona.pbm.agent.rs=rs1"\n environment:\n - "PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs103:27017"\n build:\n labels:\n - "com.percona.pbm.app=agent"\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\n context: /home/vishal/open-source/percona-backup-mongodb/\n args:\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\n volumes:\n - ./conf:/etc/pbm\n - ./backups:/opt/backups\n - data-rs103:/data/db\n command: pbm-agent\n cap_add:\n - NET_ADMIN\nvolumes:\n backups: null\n data-rs101: null\n data-rs102: null\n data-rs103: null\n\n')),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Run Docker compose\nThe below command will build and start the docker container for Percona Server MongoDB Primary Secondary Secondary replicaset and Percona Backup Manager Agent for each replicaset")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"$ psmdb docker compose -f docker-compose-rs.yaml up -d\n[+] Running 8/8\n\u283f Container psmdb-rs102-1 Running 0.0s\n\u283f Container psmdb-rs103-1 Running 0.0s\n\u283f Container pbmagent_rs103 Running 0.0s\n\u283f Container pbmagent_rs102 Running 0.0s\n\u283f Container psmdb-rs101-1 Running 0.0s\n\u283f Container pbmagent_rs101 Running 0.0s\n\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Connect to MongoDB replicaset and ensure replication and containers are working")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},'$ mongo "mongodb://dba:test1234@192.168.50.113:15000,192.168.50.113:15001,192.168.50.113:15002/admin?replicaSet=rs1"\n')),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Setup Minio and Minio CLI")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"\n$ cd ~/downloads && wget https://dl.min.io/server/minio/release/linux-amd64/minio\n \n$ wget https://dl.min.io/client/mc/release/linux-amd64/mc\nchmod +x mc\n./mc --help\n \n$ downloads ./minio server /home/vishal/data --address=0.0.0.0:7000\n \n\nAPI: http://0.0.0.0:7000 \nRootUser: minioadmin \nRootPass: minioadmin \nFinished loading IAM sub-system (took 0.0s of 0.0s to load data).\n \nConsole: http://192.168.50.113:43859 http://192.168.160.1:43859 http://172.18.0.1:43859 http://172.19.0.1:43859 http://172.24.0.1:43859 http://172.26.0.1:43859 http://172.17.0.1:43859 http://127.0.0.1:43859 \nRootUser: minioadmin \nRootPass: minioadmin \n \nCommand-line: https://docs.min.io/docs/minio-client-quickstart-guide\n $ mc alias set myminio http://0.0.0.0:7000 minioadmin minioadmin\n \nDocumentation: https://docs.min.io\n\n\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Setup Minio server alias and List buckets")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"$ mc alias set minio-deb http://192.168.50.113:7000 minioadmin minioadmin\n$ mc ls minio-deb\n[2022-05-29 14:59:32 IST] 0B nocodb/\n[2022-05-29 00:19:41 IST] 0B typesense/\n\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Create a new bucket and name it ",(0,t.kt)("inlineCode",{parentName:"li"},"pbm"))),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"$ mc alias set minio-deb http://192.168.50.113:7000 minioadmin minioadmin\n$ mc ls minio-deb\n [2022-05-29 14:59:32 IST] 0B nocodb/\n [2022-05-29 00:19:41 IST] 0B typesense/\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Setup PBM or compile PBM from the source repository")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"$ sudo apt-get install -y libkrb5-dev\n$ cd percona-backup-mongodb\n$ make build\n$ make install\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"create pbm_config.YAML to be used for configuring PBM for using MINIO")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-YAML"},'\nstorage:\n type: s3\n s3:\n endpointUrl: http://192.168.50.113:7000\n bucket: pbm\n credentials:\n access-key-id: "minioadmin"\n secret-access-key: "minioadmin"\n\n\n')),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Configure PBM")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},'$ ./pbm config --file /home/vishal/dev/psmdb/pbm_config.yaml --mongodb-uri="mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1"\n\n')),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Validate agent container logs and run the pbm list command. If MINIO is configured successfully, agent container logs shouldn\u2019t log any errors.")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},'2022-05-29T01:31:14.000+0000 D [resync] got backups list: 02022-05-29T01:31:14.000+0000 D [resync] got physical restores list: 0\n\n$ bin git:(main) ./pbm list --mongodb-uri="mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1"\nBackup snapshots:\n2022-05-29T01:29:12Z [complete: 2022-05-29T01:29:16Z]\n2022-05-29T01:38:38Z [complete: 2022-05-29T01:38:42Z]\n2022-05-29T04:04:44Z [complete: 2022-05-29T04:04:48Z]\n')),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"To run PBM backup and restore execute the below commands")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-bash"},'$ ./pbm backup --mongodb-uri="mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1" \n$ ./pbm restore 2022-05-29T04:04:44Z --mongodb-uri="mongodb:/\n')))}c.isMDXComponent=!0},8415:(e,n,a)=>{a.d(n,{Z:()=>o});const o=a.p+"assets/images/minio-be0df17e540f83ca7fda66435ae2b18c.webp"}}]); \ No newline at end of file +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[2372],{2822:(e,n,a)=>{a.r(n),a.d(n,{assets:()=>l,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var o=a(7462),t=(a(7294),a(3905));a(1839);const r={slug:"percona-mongo-replicaset-minio",title:"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["mongodb","containers","docker","s3","minio","pbm"]},i=void 0,s={permalink:"/percona-mongo-replicaset-minio",source:"@site/blog/2022-05-29-percona-mongo-replicaset-minio.md",title:"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO",description:"In this blog post, i will walk you through the steps required to containerize Percona Server for MongoDB, Percona Backup Manager, and Agent from source and configure cloud-native S3(Simple Storage Service) compatible distributed object storage MINIO to backup and restore Percona MongoDB snapshot backups.",date:"2022-05-29T00:00:00.000Z",formattedDate:"May 29, 2022",tags:[{label:"mongodb",permalink:"/tags/mongodb"},{label:"containers",permalink:"/tags/containers"},{label:"docker",permalink:"/tags/docker"},{label:"s3",permalink:"/tags/s-3"},{label:"minio",permalink:"/tags/minio"},{label:"pbm",permalink:"/tags/pbm"}],readingTime:4.205,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"percona-mongo-replicaset-minio",title:"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["mongodb","containers","docker","s3","minio","pbm"]},prevItem:{title:"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI",permalink:"/docker-sbom"},nextItem:{title:"Local Home Lab DNS Setup with DNSMasq and NGINX",permalink:"/local-home-lab"}},l={authorsImageUrls:[void 0]},p=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Steps",id:"steps",level:2}],m={toc:p};function c(e){let{components:n,...r}=e;return(0,t.kt)("wrapper",(0,o.Z)({},m,r,{components:n,mdxType:"MDXLayout"}),(0,t.kt)("p",null,"In this blog post, i will walk you through the steps required to containerize ",(0,t.kt)("a",{parentName:"p",href:"https://github.com/percona/percona-server-mongodb"},"Percona Server for MongoDB"),", ",(0,t.kt)("a",{parentName:"p",href:"https://github.com/percona/percona-backup-mongodb"},"Percona Backup Manager"),", and Agent from source and configure cloud-native S3(",(0,t.kt)("strong",{parentName:"p"},"S"),"imple ",(0,t.kt)("strong",{parentName:"p"},"S"),"torage ",(0,t.kt)("strong",{parentName:"p"},"S"),"ervice) compatible distributed object storage ",(0,t.kt)("a",{parentName:"p",href:"https://min.io/"},"MINIO")," to backup and restore Percona MongoDB snapshot backups."),(0,t.kt)("p",null,(0,t.kt)("img",{src:a(7329).Z,width:"834",height:"433"})),(0,t.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,t.kt)("p",null,"Ensure the below binaries are installed before starting the setup and configuration"),(0,t.kt)("p",null,(0,t.kt)("a",{parentName:"p",href:"https://www.docker.com/get-started/"},"Docker")," or ",(0,t.kt)("a",{parentName:"p",href:"https://podman.io/"},"Podman")," to containerize Percona MongoDB replicaset and PBM Agent\n",(0,t.kt)("a",{parentName:"p",href:"https://docs.docker.com/compose/install/"},"Docker Compose"),"\n",(0,t.kt)("a",{parentName:"p",href:"https://go.dev/learn/"},"Golang")," compiler \u2013 Build Percona Backup Manager binaries\n",(0,t.kt)("a",{parentName:"p",href:"https://www.portainer.io/"},"Portainer")," (Optional) \u2013 Intuitive UI for container configuration and monitoring\nLet us perform the below steps to set up PSMDB Replicaset; PBM Agent; Minio, S3 compatible bucket, and PBM configuration to perform backups and restores from the bucket."),(0,t.kt)("h2",{id:"steps"},"Steps"),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Create the Docker environment file with Docker Image, tag, port, and replicaset information. Save the file as .env in the working directory")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"MONGODB_IMAGE=percona/percona-server-mongodb\nMONGODB_VERSION=5.0\nMONGO1_PORT=0.0.0.0:15000\nMONGO2_PORT=0.0.0.0:15001\nMONGO3_PORT=0.0.0.0:15002\nMONGODB_PORT=27017\nMONGODB_DOCKER_NETWORK=mongo_net\nRS_NAME=rs1\n\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Create keyFile , Dockerfile and download percona-backup-manager source code in the working directory")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"$ git clone https://github.com/percona/percona-backup-mongodb.git\n\nARG MONGODB_VERSION\nARG MONGODB_IMAGE\nFROM ${MONGODB_IMAGE}:${MONGODB_VERSION}\nUSER root\nCOPY keyFile /opt/keyFile\nRUN chown mongodb /opt/keyFile && chmod 400 /opt/keyFile && mkdir -p /home/mongodb/ && chown mongodb /home/mongodb\nUSER mongodb\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Create Docker Compose file")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-YAML"},'version: "3.8"\nservices:\n rs101:\n build:\n dockerfile: Dockerfile\n context: /home/vishal/dev/psmdb\n args:\n - MONGODB_VERSION=${MONGODB_VERSION}\n - MONGODB_IMAGE=${MONGODB_IMAGE}\n hostname: rs101\n labels:\n - "com.percona.pbm.app=mongod"\n environment:\n - REPLSET_NAME=rs1\n - MONGO_USER=dba\n - BACKUP_USER=bcp\n - MONGO_PASS=test1234\n ports:\n - "${MONGO1_PORT}:${MONGODB_PORT}"\n # command: mongod --replSet rs1 --port ${MONGO1_PORT}:27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\n command: ["--replSet", "${RS_NAME}", "--bind_ip_all", "--storageEngine", "wiredTiger" , "--keyFile", "/opt/keyFile"]\n volumes:\n - data-rs101:/data/db\n - ./scripts/start.sh:/opt/start.sh\n rs102:\n build:\n dockerfile: Dockerfile\n context: /home/vishal/dev/psmdb\n args:\n - MONGODB_VERSION=${MONGODB_VERSION}\n - MONGODB_IMAGE=${MONGODB_IMAGE}\n hostname: rs102\n labels:\n - "com.percona.pbm.app=mongod"\n # command: mongod --replSet rs1 --port 27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\n ports:\n - "${MONGO2_PORT}:${MONGODB_PORT}"\n command: ["--replSet", "${RS_NAME}", "--bind_ip_all", "--storageEngine", "wiredTiger" , "--keyFile", "/opt/keyFile"]\n volumes:\n - data-rs102:/data/db\n rs103:\n build:\n dockerfile: Dockerfile\n context: /home/vishal/dev/psmdb\n args:\n - MONGODB_VERSION=${MONGODB_VERSION}\n - MONGODB_IMAGE=${MONGODB_IMAGE}\n hostname: rs103\n labels:\n - "com.percona.pbm.app=mongod"\n # command: mongod --replSet rs1 --port 27017 --storageEngine wiredTiger --keyFile /opt/keyFile --wiredTigerCacheSizeGB 1\n ports:\n - "${MONGO3_PORT}:${MONGODB_PORT}"\n command: ["--replSet", "${RS_NAME}", "--bind_ip_all", "--storageEngine", "wiredTiger" , "--keyFile", "/opt/keyFile"]\n volumes:\n - data-rs103:/data/db\n agent-rs101:\n container_name: "pbmagent_rs101"\n user: "1001"\n labels:\n - "com.percona.pbm.app=agent"\n - "com.percona.pbm.agent.rs=rs1"\n environment:\n - "PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs101:27017"\n build:\n labels:\n - "com.percona.pbm.app=agent"\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\n context: /home/vishal/open-source/percona-backup-mongodb/\n args:\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\n volumes:\n - ./conf:/etc/pbm\n - ./backups:/opt/backups\n - data-rs101:/data/db\n command: pbm-agent\n cap_add:\n - NET_ADMIN\n agent-rs102:\n container_name: "pbmagent_rs102"\n user: "1001"\n labels:\n - "com.percona.pbm.app=agent"\n - "com.percona.pbm.agent.rs=rs1"\n environment:\n - "PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs102:27017"\n build:\n labels:\n - "com.percona.pbm.app=agent"\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\n context: /home/vishal/open-source/percona-backup-mongodb/\n args:\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\n volumes:\n - ./conf:/etc/pbm\n - ./backups:/opt/backups\n - data-rs102:/data/db\n command: pbm-agent\n cap_add:\n - NET_ADMIN\n agent-rs103:\n container_name: "pbmagent_rs103"\n user: "1001"\n labels:\n - "com.percona.pbm.app=agent"\n - "com.percona.pbm.agent.rs=rs1"\n environment:\n - "PBM_MONGODB_URI=mongodb://${BACKUP_USER:-bcp}:${MONGO_PASS:-test1234}@rs103:27017"\n build:\n labels:\n - "com.percona.pbm.app=agent"\n dockerfile: /home/vishal/open-source/percona-backup-mongodb/docker/Dockerfile\n context: /home/vishal/open-source/percona-backup-mongodb/\n args:\n - MONGODB_VERSION=${MONGODB_VERSION:-5.0}\n volumes:\n - ./conf:/etc/pbm\n - ./backups:/opt/backups\n - data-rs103:/data/db\n command: pbm-agent\n cap_add:\n - NET_ADMIN\nvolumes:\n backups: null\n data-rs101: null\n data-rs102: null\n data-rs103: null\n\n')),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Run Docker compose\nThe below command will build and start the docker container for Percona Server MongoDB Primary Secondary Secondary replicaset and Percona Backup Manager Agent for each replicaset")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre"},"$ psmdb docker compose -f docker-compose-rs.yaml up -d\n[+] Running 8/8\n\u283f Container psmdb-rs102-1 Running 0.0s\n\u283f Container psmdb-rs103-1 Running 0.0s\n\u283f Container pbmagent_rs103 Running 0.0s\n\u283f Container pbmagent_rs102 Running 0.0s\n\u283f Container psmdb-rs101-1 Running 0.0s\n\u283f Container pbmagent_rs101 Running 0.0s\n\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Connect to MongoDB replicaset and ensure replication and containers are working")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},'$ mongo "mongodb://dba:test1234@192.168.50.113:15000,192.168.50.113:15001,192.168.50.113:15002/admin?replicaSet=rs1"\n')),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Setup Minio and Minio CLI")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"\n$ cd ~/downloads && wget https://dl.min.io/server/minio/release/linux-amd64/minio\n \n$ wget https://dl.min.io/client/mc/release/linux-amd64/mc\nchmod +x mc\n./mc --help\n \n$ downloads ./minio server /home/vishal/data --address=0.0.0.0:7000\n \n\nAPI: http://0.0.0.0:7000 \nRootUser: minioadmin \nRootPass: minioadmin \nFinished loading IAM sub-system (took 0.0s of 0.0s to load data).\n \nConsole: http://192.168.50.113:43859 http://192.168.160.1:43859 http://172.18.0.1:43859 http://172.19.0.1:43859 http://172.24.0.1:43859 http://172.26.0.1:43859 http://172.17.0.1:43859 http://127.0.0.1:43859 \nRootUser: minioadmin \nRootPass: minioadmin \n \nCommand-line: https://docs.min.io/docs/minio-client-quickstart-guide\n $ mc alias set myminio http://0.0.0.0:7000 minioadmin minioadmin\n \nDocumentation: https://docs.min.io\n\n\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Setup Minio server alias and List buckets")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"$ mc alias set minio-deb http://192.168.50.113:7000 minioadmin minioadmin\n$ mc ls minio-deb\n[2022-05-29 14:59:32 IST] 0B nocodb/\n[2022-05-29 00:19:41 IST] 0B typesense/\n\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Create a new bucket and name it ",(0,t.kt)("inlineCode",{parentName:"li"},"pbm"))),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"$ mc alias set minio-deb http://192.168.50.113:7000 minioadmin minioadmin\n$ mc ls minio-deb\n [2022-05-29 14:59:32 IST] 0B nocodb/\n [2022-05-29 00:19:41 IST] 0B typesense/\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Setup PBM or compile PBM from the source repository")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},"$ sudo apt-get install -y libkrb5-dev\n$ cd percona-backup-mongodb\n$ make build\n$ make install\n")),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"create pbm_config.YAML to be used for configuring PBM for using MINIO")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-YAML"},'\nstorage:\n type: s3\n s3:\n endpointUrl: http://192.168.50.113:7000\n bucket: pbm\n credentials:\n access-key-id: "minioadmin"\n secret-access-key: "minioadmin"\n\n\n')),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Configure PBM")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},'$ ./pbm config --file /home/vishal/dev/psmdb/pbm_config.yaml --mongodb-uri="mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1"\n\n')),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"Validate agent container logs and run the pbm list command. If MINIO is configured successfully, agent container logs shouldn\u2019t log any errors.")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-shell"},'2022-05-29T01:31:14.000+0000 D [resync] got backups list: 02022-05-29T01:31:14.000+0000 D [resync] got physical restores list: 0\n\n$ bin git:(main) ./pbm list --mongodb-uri="mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1"\nBackup snapshots:\n2022-05-29T01:29:12Z [complete: 2022-05-29T01:29:16Z]\n2022-05-29T01:38:38Z [complete: 2022-05-29T01:38:42Z]\n2022-05-29T04:04:44Z [complete: 2022-05-29T04:04:48Z]\n')),(0,t.kt)("ul",null,(0,t.kt)("li",{parentName:"ul"},"To run PBM backup and restore execute the below commands")),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-bash"},'$ ./pbm backup --mongodb-uri="mongodb://bcp:test1234@192.168.50.113:15000/?replSetName=rs1" \n$ ./pbm restore 2022-05-29T04:04:44Z --mongodb-uri="mongodb:/\n')))}c.isMDXComponent=!0},7329:(e,n,a)=>{a.d(n,{Z:()=>o});const o=a.p+"assets/images/minio-be0df17e540f83ca7fda66435ae2b18c.webp"}}]); \ No newline at end of file diff --git a/assets/js/a5557bb9.062fe8da.js b/assets/js/a5557bb9.57b26a07.js similarity index 51% rename from assets/js/a5557bb9.062fe8da.js rename to assets/js/a5557bb9.57b26a07.js index 33ac448..e834c9a 100644 --- a/assets/js/a5557bb9.062fe8da.js +++ b/assets/js/a5557bb9.57b26a07.js @@ -1 +1 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[5991],{3885:e=>{e.exports=JSON.parse('{"permalink":"/","page":1,"postsPerPage":10,"totalPages":1,"totalCount":10,"blogDescription":"Blog","blogTitle":"Blog"}')}}]); \ No newline at end of file +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[5991],{3885:e=>{e.exports=JSON.parse('{"permalink":"/","page":1,"postsPerPage":10,"totalPages":2,"totalCount":11,"nextPage":"/page/2","blogDescription":"Blog","blogTitle":"Blog"}')}}]); \ No newline at end of file diff --git a/assets/js/a7ed2b5f.70e1532c.js b/assets/js/a7ed2b5f.e254de28.js similarity index 65% rename from assets/js/a7ed2b5f.70e1532c.js rename to assets/js/a7ed2b5f.e254de28.js index 870474a..f7a099e 100644 --- a/assets/js/a7ed2b5f.70e1532c.js +++ b/assets/js/a7ed2b5f.e254de28.js @@ -1 +1 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[87],{3473:(e,t,o)=>{o.r(t),o.d(t,{assets:()=>u,contentTitle:()=>n,default:()=>p,frontMatter:()=>i,metadata:()=>s,toc:()=>c});var a=o(7462),r=(o(7294),o(3905));o(1839);const i={slug:"fav-open-source-repo",title:"My favorite Open Source Projects in 2022",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["open-source",2022,"favourite-repo"]},n=void 0,s={permalink:"/fav-open-source-repo",source:"@site/blog/2022-12-30-fav-open-source-repo.md",title:"My favorite Open Source Projects in 2022",description:"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022.",date:"2022-12-30T00:00:00.000Z",formattedDate:"December 30, 2022",tags:[{label:"open-source",permalink:"/tags/open-source"},{label:"2022",permalink:"/tags/2022"},{label:"favourite-repo",permalink:"/tags/favourite-repo"}],readingTime:6.49,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"fav-open-source-repo",title:"My favorite Open Source Projects in 2022",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["open-source","2022","favourite-repo"]},nextItem:{title:"Documentation as a code",permalink:"/doc-as-code"}},u={authorsImageUrls:[void 0]},c=[],h={toc:c};function p(e){let{components:t,...o}=e;return(0,r.kt)("wrapper",(0,a.Z)({},h,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022."))}p.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[87],{3473:(e,t,o)=>{o.r(t),o.d(t,{assets:()=>u,contentTitle:()=>n,default:()=>l,frontMatter:()=>i,metadata:()=>s,toc:()=>c});var a=o(7462),r=(o(7294),o(3905));o(1839);const i={slug:"fav-open-source-repo",title:"My favorite Open Source Projects in 2022",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["open-source",2022,"favourite-repo"]},n=void 0,s={permalink:"/fav-open-source-repo",source:"@site/blog/2022-12-30-fav-open-source-repo.md",title:"My favorite Open Source Projects in 2022",description:"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022.",date:"2022-12-30T00:00:00.000Z",formattedDate:"December 30, 2022",tags:[{label:"open-source",permalink:"/tags/open-source"},{label:"2022",permalink:"/tags/2022"},{label:"favourite-repo",permalink:"/tags/favourite-repo"}],readingTime:6.49,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"fav-open-source-repo",title:"My favorite Open Source Projects in 2022",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["open-source","2022","favourite-repo"]},prevItem:{title:"Essential Skills and Tools for Aspiring Software Developers and Database Engineers",permalink:"/essential-skills"},nextItem:{title:"Documentation as a code",permalink:"/doc-as-code"}},u={authorsImageUrls:[void 0]},c=[],p={toc:c};function l(e){let{components:t,...o}=e;return(0,r.kt)("wrapper",(0,a.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022."))}l.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/ab779234.060c03ba.js b/assets/js/ab779234.060c03ba.js deleted file mode 100644 index 8ea3602..0000000 --- a/assets/js/ab779234.060c03ba.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[9358],{3589:(t,a,e)=>{e.r(a),e.d(a,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>m});var n=e(7462),r=(e(7294),e(3905));e(1839);const o={slug:"fav-open-source-repo",title:"My favorite Open Source Projects in 2022",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["open-source",2022,"favourite-repo"]},i=void 0,l={permalink:"/fav-open-source-repo",source:"@site/blog/2022-12-30-fav-open-source-repo.md",title:"My favorite Open Source Projects in 2022",description:"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022.",date:"2022-12-30T00:00:00.000Z",formattedDate:"December 30, 2022",tags:[{label:"open-source",permalink:"/tags/open-source"},{label:"2022",permalink:"/tags/2022"},{label:"favourite-repo",permalink:"/tags/favourite-repo"}],readingTime:6.49,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"fav-open-source-repo",title:"My favorite Open Source Projects in 2022",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["open-source","2022","favourite-repo"]},nextItem:{title:"Documentation as a code",permalink:"/doc-as-code"}},s={authorsImageUrls:[void 0]},m=[],p={toc:m};function d(t){let{components:a,...e}=t;return(0,r.kt)("wrapper",(0,n.Z)({},p,e,{components:a,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022.\xa0 "),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Category"),(0,r.kt)("th",{parentName:"tr",align:null},"Repository"),(0,r.kt)("th",{parentName:"tr",align:null},"Site URL"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Browser")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://gitlab.com/librewolf-community"},(0,r.kt)("strong",{parentName:"a"},"LibreWolf"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://librewolf.net/"},"https://librewolf.net/")),(0,r.kt)("td",{parentName:"tr",align:null},"LibreWolf is a privacy-oriented web browser based on Firefox")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Browser")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/brave/brave-browser"},(0,r.kt)("strong",{parentName:"a"},"Brave"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://brave.com/"},"https://brave.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Brave is a privacy-oriented web browser based on Chromium")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Task Runner")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/go-task/task"},(0,r.kt)("strong",{parentName:"a"},"go-task"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://taskfile.dev/"},"https://taskfile.dev/")),(0,r.kt)("td",{parentName:"tr",align:null},"Task is a task runner / simpler Make alternative written in Go")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Data Analysis")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/pandas-dev/pandas"},(0,r.kt)("strong",{parentName:"a"},"Pandas"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://pandas.pydata.org/"},"https://pandas.pydata.org/")),(0,r.kt)("td",{parentName:"tr",align:null},"Pandas is a data analysis library for Python")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Object Storage")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/minio/minio"},(0,r.kt)("strong",{parentName:"a"},"Minio"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://min.io/"},"https://min.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Minio is an object storage server that is compatible with Amazon S3")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Terminal")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/Eugeny/tabby"},(0,r.kt)("strong",{parentName:"a"},"Tabby"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://tabby.sh/"},"https://tabby.sh/")),(0,r.kt)("td",{parentName:"tr",align:null},"A Terminal for the modern age")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Terminal")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tmux/tmux"},(0,r.kt)("strong",{parentName:"a"},"Tmux"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tmux/tmux"},"https://github.com/tmux/tmux")),(0,r.kt)("td",{parentName:"tr",align:null},"tmux is a terminal multiplexer: it enables a number of terminals to be created, accessed, and controlled from a single screen. tmux may be detached from a screen and continue running in the background, then later reattached.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Terminal")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tmuxinator/tmuxinator"},(0,r.kt)("strong",{parentName:"a"},"Tmuxinator"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tmuxinator/tmuxinator"},"https://github.com/tmuxinator/tmuxinator")),(0,r.kt)("td",{parentName:"tr",align:null},"Tmuxinator is a tool for managing complex tmux sessions easily.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Code Editor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"http://https://github.com/microsoft/vscode"},(0,r.kt)("strong",{parentName:"a"},"Visual Studio Code"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://code.visualstudio.com/"},"https://code.visualstudio.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Visual Studio Code is a code editor redefined and optimized for building and debugging modern web and cloud applications. Built on top of ",(0,r.kt)("a",{parentName:"td",href:"https://github.com/electron/electron"},"Electron"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Code Editor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/neovim/neovim"},(0,r.kt)("strong",{parentName:"a"},"neovim"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://neovim.io/"},"https://neovim.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Neovim is a text editor based on Vim. Last few months, i have been using ",(0,r.kt)("inlineCode",{parentName:"td"},"neovim")," more often compared to ",(0,r.kt)("inlineCode",{parentName:"td"},"Visual Studio Code"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Productivity - Note Taking")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/dendronhq/dendron"},(0,r.kt)("strong",{parentName:"a"},"Dendron"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://dendron.so/"},"https://dendron.so/")),(0,r.kt)("td",{parentName:"tr",align:null},"Dendron is my goto note taking tool. Its available as a plugin for VSCode and allows hirearchy and graph based note taking. Awesome solution to mantain a ",(0,r.kt)("inlineCode",{parentName:"td"},"second-brain"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Productivity - Bookmark Manager")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/jarun/buku"},(0,r.kt)("strong",{parentName:"a"},"Buku"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/jarun/buku#quickstart"},"https://github.com/jarun/buku#quickstart")),(0,r.kt)("td",{parentName:"tr",align:null},"Buku is a command-line bookmark manager.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Private Network VPN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tailscale/tailscale"},(0,r.kt)("strong",{parentName:"a"},"Tailscale"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://tailscale.com/"},"https://tailscale.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Tailscale is a private network VPN. Helps me create a private network for all my home lab machines and devices")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Tunnelling")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/inconshreveable/ngrok"},(0,r.kt)("strong",{parentName:"a"},"ngrok"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://ngrok.com/"},"https://ngrok.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"ngrok exposes local servers behind NATs and firewalls to the public internet over secure tunnels.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Tunnelling")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/localtunnel/localtunnel"},(0,r.kt)("strong",{parentName:"a"},"localtunnel"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://localtunnel.github.io/www/"},"https://localtunnel.github.io/www/")),(0,r.kt)("td",{parentName:"tr",align:null},"localtunnel exposes your localhost to the world for easy testing and sharing! No need to mess with DNS or deploy just to have others test out your changes.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Containers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/containers/podman"},(0,r.kt)("strong",{parentName:"a"},"Podman"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://podman.io/"},"https://podman.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Podman is a daemonless, open source, Linux native tool designed to make it easy to find, run, build, share and deploy applications using Open Containers Initiative (OCI) Containers and Container Images.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Container Scheduling and Management")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/kubernetes/kubernetes"},(0,r.kt)("strong",{parentName:"a"},"Kubernetes"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kubernetes.io/"},"https://kubernetes.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Kubernetes is an open-source system for automating deployment, scaling, and management of containerized applications.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Static Site Generator")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/facebook/docusaurus"},(0,r.kt)("strong",{parentName:"a"},"Docusaurus"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docusaurus.io/"},"https://docusaurus.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Docusaurus is a static site generator written in JavaScript. Build optimized websites quickly, focus on your content.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Static Site Generator")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/gohugoio/hugo"},(0,r.kt)("strong",{parentName:"a"},"Hugo"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://gohugo.io/"},"https://gohugo.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Hugo is a static site generator written in ",(0,r.kt)("inlineCode",{parentName:"td"},"Go"),".")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Diagram as a Code")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/mermaid-js/mermaid"},(0,r.kt)("strong",{parentName:"a"},"Mermaid"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://mermaid.js.org/"},"https://mermaid.js.org/")),(0,r.kt)("td",{parentName:"tr",align:null},"Generation of diagrams like flowcharts or sequence diagrams from text in a similar manner as markdown")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Diagram as a Code")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/plantuml/plantuml"},(0,r.kt)("strong",{parentName:"a"},"PlantUML"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://plantuml.com/"},"https://plantuml.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Generate diagrams from textual description")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Diagram as a Code")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/jgraph/drawio"},(0,r.kt)("strong",{parentName:"a"},"Draw.io"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://app.diagrams.net/"},"https://app.diagrams.net/")),(0,r.kt)("td",{parentName:"tr",align:null},"draw.io, this project, is a configurable diagramming/whiteboarding visualization application. draw.io is jointly owned and developed by JGraph Ltd and draw.io AG.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Diagram as a Code")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/excalidraw/excalidraw"},(0,r.kt)("strong",{parentName:"a"},"Excalidraw"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://excalidraw.com/"},"https://excalidraw.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Excalidraw is a free software that offers a whiteboard tool that lets you easily sketch diagrams with a hand-drawn feel. Another features are the collaborative mode, and the ability to export the diagrams to PNG or SVG formats, and to save them locally in a JSON format")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Diagram as a Code")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/mingrammer/diagrams"},(0,r.kt)("strong",{parentName:"a"},"mingrammer"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://diagrams.mingrammer.com/"},"https://diagrams.mingrammer.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Diagrams as code for prototyping cloud system architecture.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Web Framework")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/gin-gonic/gin"},(0,r.kt)("strong",{parentName:"a"},"Gin"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://gin-gonic.com/"},"https://gin-gonic.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Gin is a HTTP web framework written in Go (Golang). It features a Martini-like API, but with performance up to 40 times faster than Martini. If you need smashing performance, get yourself some Gin.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Web Framework")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tiangolo/fastapi"},(0,r.kt)("strong",{parentName:"a"},"FAST API"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://fastapi.tiangolo.com/"},"https://fastapi.tiangolo.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"FastAPI is a Web framework for developing RESTful APIs in Python. FastAPI is based on Pydantic and type hints to validate, serialize, and deserialize data, and automatically auto-generate OpenAPI documents. It fully supports asynchronous programming and can run with Gunicorn and ASGI servers for production such as Uvicorn and Hypercorn. To improve developer-friendliness, editor support was considered since the earliest days of the project.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Web Framework")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/withastro/astro"},(0,r.kt)("strong",{parentName:"a"},"Astro"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://astro.build/"},"https://astro.build/")),(0,r.kt)("td",{parentName:"tr",align:null},"Astro works with your favorite content sources. Pull content from the filesystem or fetch it remotely from your favorite CMS, database, or API. Astro supports both static output\xa0(SSG) and live server output\xa0(SSR) that can render your content on-demand.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Search Engine")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/typesense/typesense"},(0,r.kt)("strong",{parentName:"a"},"Typesense"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://typesense.org/"},"https://typesense.org/")),(0,r.kt)("td",{parentName:"tr",align:null},"Typesense is a modern, privacy-friendly, open source search engine built from the ground up using cutting-edge search algorithms, that take advantage of the latest advances in hardware capabilities.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Nocode Platform")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/nocodb/nocodb"},(0,r.kt)("strong",{parentName:"a"},"NOCODB"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://nocodb.com/"},"https://nocodb.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"NocoDB is an open-source low-code platform for building and managing internal tools and turning your SQL Databases into a smart spreadsheet. It is a self-hosted alternative to Airtable, Notion, and Airtable.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Distributed Database")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/rqlite/rqlite"},(0,r.kt)("strong",{parentName:"a"},"rqlite"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://rqlite.com/"},"https://rqlite.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"rqlite is an easy-to-use, lightweight, distributed relational database, which uses SQLite as its storage engine. rqlite is simple to deploy, operating it is very straightforward, and its clustering capabilities provide you with fault-tolerance and high-availability.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Multi-modal Database")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/surrealdb/surrealdb"},(0,r.kt)("strong",{parentName:"a"},"SurrealDB"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://surrealdb.com/"},"https://surrealdb.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"SurrealDB combines the database layer, the querying layer, and the API and authentication layer into one platform. Advanced table-based and row-based customisable access permissions allow for granular data access patterns for different types of users. There's no need for custom backend code and security rules with complicated database development.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Multi-modal Database")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/arangodb/arangodb"},(0,r.kt)("strong",{parentName:"a"},"ArangoDB"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.arangodb.com/"},"https://www.arangodb.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"ArangoDB is a free and open-source native graph database system developed by ArangoDB Inc. ArangoDB is a multi-model database system since it supports three data models with one database core and a unified query language AQL. AQL is mainly a declarative language and allows the combination of different data access patterns in a single query.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Git for Data")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/dolthub/dolt"},(0,r.kt)("strong",{parentName:"a"},"Dolt"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://dolthub.com/"},"https://dolthub.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Dolt is a SQL database that you can fork, clone, branch, merge, push and pull just like a Git repository. Connect to Dolt just like any MySQL database to run queries or update the data using SQL commands. Use the command line interface to import CSV files, commit your changes, push them to a remote, or merge your teammate's changes.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Personal Finance")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/firefly-iii/firefly-iii"},(0,r.kt)("strong",{parentName:"a"},"Firefly III"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://firefly-iii.org/"},"https://firefly-iii.org/")),(0,r.kt)("td",{parentName:"tr",align:null},'"Firefly III" is a (self-hosted) manager for your personal finances. It can help you keep track of your expenses and income, so you can spend less and save more. Firefly III supports the use of budgets, categories and tags. Using a bunch of external tools, you can import data. It also has many neat financial reports available.')),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Monitoring and TSDB")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/prometheus/prometheus"},"Prometheus")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://prometheus.io/"},"https://prometheus.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Prometheus is a systems and service monitoring system. It collects metrics from configured targets at given intervals, evaluates rule expressions, displays the results, and can trigger alerts if some condition is observed to be true.")))))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/ab779234.9aed2f3b.js b/assets/js/ab779234.9aed2f3b.js new file mode 100644 index 0000000..2a355f8 --- /dev/null +++ b/assets/js/ab779234.9aed2f3b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[9358],{3589:(t,a,e)=>{e.r(a),e.d(a,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>m});var n=e(7462),r=(e(7294),e(3905));e(1839);const o={slug:"fav-open-source-repo",title:"My favorite Open Source Projects in 2022",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["open-source",2022,"favourite-repo"]},i=void 0,l={permalink:"/fav-open-source-repo",source:"@site/blog/2022-12-30-fav-open-source-repo.md",title:"My favorite Open Source Projects in 2022",description:"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022.",date:"2022-12-30T00:00:00.000Z",formattedDate:"December 30, 2022",tags:[{label:"open-source",permalink:"/tags/open-source"},{label:"2022",permalink:"/tags/2022"},{label:"favourite-repo",permalink:"/tags/favourite-repo"}],readingTime:6.49,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"fav-open-source-repo",title:"My favorite Open Source Projects in 2022",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["open-source","2022","favourite-repo"]},prevItem:{title:"Essential Skills and Tools for Aspiring Software Developers and Database Engineers",permalink:"/essential-skills"},nextItem:{title:"Documentation as a code",permalink:"/doc-as-code"}},s={authorsImageUrls:[void 0]},m=[],p={toc:m};function d(t){let{components:a,...e}=t;return(0,r.kt)("wrapper",(0,n.Z)({},p,e,{components:a,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"Open Source is a great way to learn and contribute to the community. With a nearly endless array of open source projects available to consume and contribute to these days, Need some inspiration? Whether you\u2019re new to the world of open source, are gearing up for Open Source in the new year, or just want to see what other folks are excited about, check out a few of my favorite open source projects i discovered during 2022.\xa0 "),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Category"),(0,r.kt)("th",{parentName:"tr",align:null},"Repository"),(0,r.kt)("th",{parentName:"tr",align:null},"Site URL"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Browser")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://gitlab.com/librewolf-community"},(0,r.kt)("strong",{parentName:"a"},"LibreWolf"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://librewolf.net/"},"https://librewolf.net/")),(0,r.kt)("td",{parentName:"tr",align:null},"LibreWolf is a privacy-oriented web browser based on Firefox")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Browser")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/brave/brave-browser"},(0,r.kt)("strong",{parentName:"a"},"Brave"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://brave.com/"},"https://brave.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Brave is a privacy-oriented web browser based on Chromium")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Task Runner")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/go-task/task"},(0,r.kt)("strong",{parentName:"a"},"go-task"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://taskfile.dev/"},"https://taskfile.dev/")),(0,r.kt)("td",{parentName:"tr",align:null},"Task is a task runner / simpler Make alternative written in Go")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Data Analysis")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/pandas-dev/pandas"},(0,r.kt)("strong",{parentName:"a"},"Pandas"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://pandas.pydata.org/"},"https://pandas.pydata.org/")),(0,r.kt)("td",{parentName:"tr",align:null},"Pandas is a data analysis library for Python")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Object Storage")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/minio/minio"},(0,r.kt)("strong",{parentName:"a"},"Minio"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://min.io/"},"https://min.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Minio is an object storage server that is compatible with Amazon S3")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Terminal")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/Eugeny/tabby"},(0,r.kt)("strong",{parentName:"a"},"Tabby"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://tabby.sh/"},"https://tabby.sh/")),(0,r.kt)("td",{parentName:"tr",align:null},"A Terminal for the modern age")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Terminal")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tmux/tmux"},(0,r.kt)("strong",{parentName:"a"},"Tmux"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tmux/tmux"},"https://github.com/tmux/tmux")),(0,r.kt)("td",{parentName:"tr",align:null},"tmux is a terminal multiplexer: it enables a number of terminals to be created, accessed, and controlled from a single screen. tmux may be detached from a screen and continue running in the background, then later reattached.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Terminal")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tmuxinator/tmuxinator"},(0,r.kt)("strong",{parentName:"a"},"Tmuxinator"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tmuxinator/tmuxinator"},"https://github.com/tmuxinator/tmuxinator")),(0,r.kt)("td",{parentName:"tr",align:null},"Tmuxinator is a tool for managing complex tmux sessions easily.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Code Editor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"http://https://github.com/microsoft/vscode"},(0,r.kt)("strong",{parentName:"a"},"Visual Studio Code"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://code.visualstudio.com/"},"https://code.visualstudio.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Visual Studio Code is a code editor redefined and optimized for building and debugging modern web and cloud applications. Built on top of ",(0,r.kt)("a",{parentName:"td",href:"https://github.com/electron/electron"},"Electron"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Code Editor")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/neovim/neovim"},(0,r.kt)("strong",{parentName:"a"},"neovim"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://neovim.io/"},"https://neovim.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Neovim is a text editor based on Vim. Last few months, i have been using ",(0,r.kt)("inlineCode",{parentName:"td"},"neovim")," more often compared to ",(0,r.kt)("inlineCode",{parentName:"td"},"Visual Studio Code"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Productivity - Note Taking")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/dendronhq/dendron"},(0,r.kt)("strong",{parentName:"a"},"Dendron"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://dendron.so/"},"https://dendron.so/")),(0,r.kt)("td",{parentName:"tr",align:null},"Dendron is my goto note taking tool. Its available as a plugin for VSCode and allows hirearchy and graph based note taking. Awesome solution to mantain a ",(0,r.kt)("inlineCode",{parentName:"td"},"second-brain"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Productivity - Bookmark Manager")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/jarun/buku"},(0,r.kt)("strong",{parentName:"a"},"Buku"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/jarun/buku#quickstart"},"https://github.com/jarun/buku#quickstart")),(0,r.kt)("td",{parentName:"tr",align:null},"Buku is a command-line bookmark manager.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Private Network VPN")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tailscale/tailscale"},(0,r.kt)("strong",{parentName:"a"},"Tailscale"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://tailscale.com/"},"https://tailscale.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Tailscale is a private network VPN. Helps me create a private network for all my home lab machines and devices")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Tunnelling")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/inconshreveable/ngrok"},(0,r.kt)("strong",{parentName:"a"},"ngrok"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://ngrok.com/"},"https://ngrok.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"ngrok exposes local servers behind NATs and firewalls to the public internet over secure tunnels.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Tunnelling")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/localtunnel/localtunnel"},(0,r.kt)("strong",{parentName:"a"},"localtunnel"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://localtunnel.github.io/www/"},"https://localtunnel.github.io/www/")),(0,r.kt)("td",{parentName:"tr",align:null},"localtunnel exposes your localhost to the world for easy testing and sharing! No need to mess with DNS or deploy just to have others test out your changes.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Containers")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/containers/podman"},(0,r.kt)("strong",{parentName:"a"},"Podman"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://podman.io/"},"https://podman.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Podman is a daemonless, open source, Linux native tool designed to make it easy to find, run, build, share and deploy applications using Open Containers Initiative (OCI) Containers and Container Images.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Container Scheduling and Management")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/kubernetes/kubernetes"},(0,r.kt)("strong",{parentName:"a"},"Kubernetes"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kubernetes.io/"},"https://kubernetes.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Kubernetes is an open-source system for automating deployment, scaling, and management of containerized applications.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Static Site Generator")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/facebook/docusaurus"},(0,r.kt)("strong",{parentName:"a"},"Docusaurus"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docusaurus.io/"},"https://docusaurus.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Docusaurus is a static site generator written in JavaScript. Build optimized websites quickly, focus on your content.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Static Site Generator")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/gohugoio/hugo"},(0,r.kt)("strong",{parentName:"a"},"Hugo"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://gohugo.io/"},"https://gohugo.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Hugo is a static site generator written in ",(0,r.kt)("inlineCode",{parentName:"td"},"Go"),".")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Diagram as a Code")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/mermaid-js/mermaid"},(0,r.kt)("strong",{parentName:"a"},"Mermaid"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://mermaid.js.org/"},"https://mermaid.js.org/")),(0,r.kt)("td",{parentName:"tr",align:null},"Generation of diagrams like flowcharts or sequence diagrams from text in a similar manner as markdown")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Diagram as a Code")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/plantuml/plantuml"},(0,r.kt)("strong",{parentName:"a"},"PlantUML"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://plantuml.com/"},"https://plantuml.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Generate diagrams from textual description")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Diagram as a Code")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/jgraph/drawio"},(0,r.kt)("strong",{parentName:"a"},"Draw.io"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://app.diagrams.net/"},"https://app.diagrams.net/")),(0,r.kt)("td",{parentName:"tr",align:null},"draw.io, this project, is a configurable diagramming/whiteboarding visualization application. draw.io is jointly owned and developed by JGraph Ltd and draw.io AG.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Diagram as a Code")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/excalidraw/excalidraw"},(0,r.kt)("strong",{parentName:"a"},"Excalidraw"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://excalidraw.com/"},"https://excalidraw.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Excalidraw is a free software that offers a whiteboard tool that lets you easily sketch diagrams with a hand-drawn feel. Another features are the collaborative mode, and the ability to export the diagrams to PNG or SVG formats, and to save them locally in a JSON format")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Diagram as a Code")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/mingrammer/diagrams"},(0,r.kt)("strong",{parentName:"a"},"mingrammer"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://diagrams.mingrammer.com/"},"https://diagrams.mingrammer.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Diagrams as code for prototyping cloud system architecture.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Web Framework")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/gin-gonic/gin"},(0,r.kt)("strong",{parentName:"a"},"Gin"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://gin-gonic.com/"},"https://gin-gonic.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Gin is a HTTP web framework written in Go (Golang). It features a Martini-like API, but with performance up to 40 times faster than Martini. If you need smashing performance, get yourself some Gin.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Web Framework")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/tiangolo/fastapi"},(0,r.kt)("strong",{parentName:"a"},"FAST API"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://fastapi.tiangolo.com/"},"https://fastapi.tiangolo.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"FastAPI is a Web framework for developing RESTful APIs in Python. FastAPI is based on Pydantic and type hints to validate, serialize, and deserialize data, and automatically auto-generate OpenAPI documents. It fully supports asynchronous programming and can run with Gunicorn and ASGI servers for production such as Uvicorn and Hypercorn. To improve developer-friendliness, editor support was considered since the earliest days of the project.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Web Framework")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/withastro/astro"},(0,r.kt)("strong",{parentName:"a"},"Astro"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://astro.build/"},"https://astro.build/")),(0,r.kt)("td",{parentName:"tr",align:null},"Astro works with your favorite content sources. Pull content from the filesystem or fetch it remotely from your favorite CMS, database, or API. Astro supports both static output\xa0(SSG) and live server output\xa0(SSR) that can render your content on-demand.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Search Engine")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/typesense/typesense"},(0,r.kt)("strong",{parentName:"a"},"Typesense"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://typesense.org/"},"https://typesense.org/")),(0,r.kt)("td",{parentName:"tr",align:null},"Typesense is a modern, privacy-friendly, open source search engine built from the ground up using cutting-edge search algorithms, that take advantage of the latest advances in hardware capabilities.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Nocode Platform")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/nocodb/nocodb"},(0,r.kt)("strong",{parentName:"a"},"NOCODB"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://nocodb.com/"},"https://nocodb.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"NocoDB is an open-source low-code platform for building and managing internal tools and turning your SQL Databases into a smart spreadsheet. It is a self-hosted alternative to Airtable, Notion, and Airtable.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Distributed Database")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/rqlite/rqlite"},(0,r.kt)("strong",{parentName:"a"},"rqlite"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://rqlite.com/"},"https://rqlite.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"rqlite is an easy-to-use, lightweight, distributed relational database, which uses SQLite as its storage engine. rqlite is simple to deploy, operating it is very straightforward, and its clustering capabilities provide you with fault-tolerance and high-availability.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Multi-modal Database")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/surrealdb/surrealdb"},(0,r.kt)("strong",{parentName:"a"},"SurrealDB"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://surrealdb.com/"},"https://surrealdb.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"SurrealDB combines the database layer, the querying layer, and the API and authentication layer into one platform. Advanced table-based and row-based customisable access permissions allow for granular data access patterns for different types of users. There's no need for custom backend code and security rules with complicated database development.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Multi-modal Database")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/arangodb/arangodb"},(0,r.kt)("strong",{parentName:"a"},"ArangoDB"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.arangodb.com/"},"https://www.arangodb.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"ArangoDB is a free and open-source native graph database system developed by ArangoDB Inc. ArangoDB is a multi-model database system since it supports three data models with one database core and a unified query language AQL. AQL is mainly a declarative language and allows the combination of different data access patterns in a single query.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Git for Data")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/dolthub/dolt"},(0,r.kt)("strong",{parentName:"a"},"Dolt"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://dolthub.com/"},"https://dolthub.com/")),(0,r.kt)("td",{parentName:"tr",align:null},"Dolt is a SQL database that you can fork, clone, branch, merge, push and pull just like a Git repository. Connect to Dolt just like any MySQL database to run queries or update the data using SQL commands. Use the command line interface to import CSV files, commit your changes, push them to a remote, or merge your teammate's changes.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Personal Finance")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/firefly-iii/firefly-iii"},(0,r.kt)("strong",{parentName:"a"},"Firefly III"))),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://firefly-iii.org/"},"https://firefly-iii.org/")),(0,r.kt)("td",{parentName:"tr",align:null},'"Firefly III" is a (self-hosted) manager for your personal finances. It can help you keep track of your expenses and income, so you can spend less and save more. Firefly III supports the use of budgets, categories and tags. Using a bunch of external tools, you can import data. It also has many neat financial reports available.')),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"Monitoring and TSDB")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/prometheus/prometheus"},"Prometheus")),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://prometheus.io/"},"https://prometheus.io/")),(0,r.kt)("td",{parentName:"tr",align:null},"Prometheus is a systems and service monitoring system. It collects metrics from configured targets at given intervals, evaluates rule expressions, displays the results, and can trigger alerts if some condition is observed to be true.")))))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/b6d33a4b.6d4943d3.js b/assets/js/b6d33a4b.6d4943d3.js new file mode 100644 index 0000000..40759ff --- /dev/null +++ b/assets/js/b6d33a4b.6d4943d3.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[3640],{1652:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>r,contentTitle:()=>l,default:()=>d,frontMatter:()=>n,metadata:()=>o,toc:()=>h});var i=t(7462),s=(t(7294),t(3905));t(1839);const n={slug:"essential-skills",title:"Essential Skills and Tools for Aspiring Software Developers and Database Engineers",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["career","fundamentals","essential-skills"]},l=void 0,o={permalink:"/essential-skills",source:"@site/blog/2024-01-21-essentials-software-developer.md",title:"Essential Skills and Tools for Aspiring Software Developers and Database Engineers",description:"Introduction",date:"2024-01-21T00:00:00.000Z",formattedDate:"January 21, 2024",tags:[{label:"career",permalink:"/tags/career"},{label:"fundamentals",permalink:"/tags/fundamentals"},{label:"essential-skills",permalink:"/tags/essential-skills"}],readingTime:14.235,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"essential-skills",title:"Essential Skills and Tools for Aspiring Software Developers and Database Engineers",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["career","fundamentals","essential-skills"]},nextItem:{title:"My favorite Open Source Projects in 2022",permalink:"/fav-open-source-repo"}},r={authorsImageUrls:[void 0]},h=[{value:"Introduction",id:"introduction",level:2}],g={toc:h};function d(e){let{components:a,...t}=e;return(0,s.kt)("wrapper",(0,i.Z)({},g,t,{components:a,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"introduction"},"Introduction"),(0,s.kt)("p",null,"Starting a career as a software developer in the fast-paced and ever-evolving IT industry can be both exciting and challenging. To thrive in this field, it is essential to equip yourself with the right skills, tools, and technologies. In this article, we will highlight key areas of focus that will help you establish a solid foundation as a software developer."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c2d7c295.5eff4a39.js b/assets/js/c2d7c295.5eff4a39.js new file mode 100644 index 0000000..f8f7d4f --- /dev/null +++ b/assets/js/c2d7c295.5eff4a39.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[9102],{489:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>l,contentTitle:()=>r,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>u});var n=a(7462),i=(a(7294),a(3905));a(1839);const o={slug:"essential-skills",title:"Essential Skills and Tools for Aspiring Software Developers and Database Engineers",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["career","fundamentals","essential-skills"]},r=void 0,s={permalink:"/essential-skills",source:"@site/blog/2024-01-21-essentials-software-developer.md",title:"Essential Skills and Tools for Aspiring Software Developers and Database Engineers",description:"Introduction",date:"2024-01-21T00:00:00.000Z",formattedDate:"January 21, 2024",tags:[{label:"career",permalink:"/tags/career"},{label:"fundamentals",permalink:"/tags/fundamentals"},{label:"essential-skills",permalink:"/tags/essential-skills"}],readingTime:14.235,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"essential-skills",title:"Essential Skills and Tools for Aspiring Software Developers and Database Engineers",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["career","fundamentals","essential-skills"]},nextItem:{title:"My favorite Open Source Projects in 2022",permalink:"/fav-open-source-repo"}},l={authorsImageUrls:[void 0]},u=[{value:"Introduction",id:"introduction",level:2},{value:"Data Structures and Algorithms",id:"data-structures-and-algorithms",level:2},{value:"Programming Languages",id:"programming-languages",level:2},{value:"Design Patterns",id:"design-patterns",level:2},{value:"Cloud Native Development",id:"cloud-native-development",level:2},{value:"Databases",id:"databases",level:2},{value:"Distributed Systems",id:"distributed-systems",level:2},{value:"CI/CD Pipelines",id:"cicd-pipelines",level:2},{value:"Testing frameworks",id:"testing-frameworks",level:2},{value:"Documentation Skills",id:"documentation-skills",level:2},{value:"REST API Knowledge",id:"rest-api-knowledge",level:2},{value:"Linux knowledge",id:"linux-knowledge",level:2},{value:"Contribute to Open Source Solutions",id:"contribute-to-open-source-solutions",level:2},{value:"Learn Standard Way of Writing Solutions",id:"learn-standard-way-of-writing-solutions",level:2},{value:"Conclusion",id:"conclusion",level:2}],p={toc:u};function d(e){let{components:t,...a}=e;return(0,i.kt)("wrapper",(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"introduction"},"Introduction"),(0,i.kt)("p",null,"Starting a career as a software developer in the fast-paced and ever-evolving IT industry can be both exciting and challenging. To thrive in this field, it is essential to equip yourself with the right skills, tools, and technologies. In this article, we will highlight key areas of focus that will help you establish a solid foundation as a software developer."),(0,i.kt)("h2",{id:"data-structures-and-algorithms"},"Data Structures and Algorithms"),(0,i.kt)("p",null,"Data structures and algorithms are fundamental concepts in computer science. They are used to organize and manipulate data efficiently. As a software developer, you will need to understand how data structures and algorithms work to solve problems and build applications. Learn about common data structures such as arrays, linked lists, stacks, queues, trees, and graphs. Familiarize yourself with common algorithms such as sorting, searching, and graph traversal. This knowledge will help you write efficient code and optimize your applications."),(0,i.kt)("p",null,"Resources to learn Data Structures and Algorithms:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.coursera.org/specializations/data-structures-algorithms"},"Data Structures and Algorithms Specialization")," by UC San Diego"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Algorithmic-Thinking-Problem-Based-Daniel-Zingaro/dp/1718500807/ref=sr_1_1?keywords=Algorithmic+thinking&qid=1705894084&sr=8-1"},"Algorithmic Thinking: A Problem-Based Introduction")," by Daniel Zingaro"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Introduction-Algorithms-3rd-MIT-Press/dp/0262033844"},"Introduction to Algorithms")," by Thomas H. Cormen"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.coursera.org/learn/algorithms-part1"},"Algorithms, Part I")," by Princeton University on Coursera"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.coursera.org/learn/algorithms-part2"},"Algorithms, Part II")," by Princeton University on Coursera"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Structures-Algorithms-Python-Michael-Goodrich/dp/1118290275"},"Data Structures and Algorithms in Python")," by Michael T. Goodrich, Roberto Tamassia, and Michael H. Goldwasser"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.geeksforgeeks.org/data-structures/"},"GeeksforGeeks")," - A computer science portal with resources on various topics including data structures and algorithms"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://leetcode.com/"},"LeetCode")," - A platform for preparing technical coding interviews with a focus on algorithms and data structures problems")),(0,i.kt)("h2",{id:"programming-languages"},"Programming Languages"),(0,i.kt)("p",null,"Programming languages are the building blocks of software development. As a software developer, you will need to learn multiple programming languages to build applications and solve problems. While there are many programming languages to choose from, it is essential to focus on the ones that are in demand and align with your career goals. Learn about popular programming languages such as Go, Python, Rust, and Javascript. Familiarize yourself with their syntax, features, and use cases. This knowledge will enable you to write clean, maintainable, and extensible code."),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"https://survey.stackoverflow.co/2023/#section-admired-and-desired-programming-scripting-and-markup-languages"},"Stack Overflow Most Admired and Desired Programming Languages 2023")),(0,i.kt)("p",null,"Resources to learn Go:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Go-Programming-Language-Addison-Wesley-Professional/dp/0134190440"},"The Go Programming Language")," by Alan A. A. Donovan and Brian W. Kernighan"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://gobyexample.com/"},"Go by Example")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://tour.golang.org/welcome/1"},"A Tour of Go"))),(0,i.kt)("p",null,"Resources to learn Rust:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Rust-Programming-Language-Steve-Klabnik/dp/1593278284"},"The Rust Programming Language")," by Steve Klabnik and Carol Nichols"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://doc.rust-lang.org/rust-by-example/"},"Rust by Example")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://github.com/rust-lang/rustlings"},"Rustlings"))),(0,i.kt)("p",null,"Resources to learn Python:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Python-Crash-Course-2nd-Edition/dp/1593279280"},"Python Crash Course")," by Eric Matthes"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://automatetheboringstuff.com/"},"Automate the Boring Stuff with Python")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://learnpythonthehardway.org/"},"Learn Python the Hard Way"))),(0,i.kt)("p",null,"Roadmaps to learn Go, Rust, and Python:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://roadmap.sh/golang"},"Go Developer Roadmap")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://roadmap.sh/rust"},"Rust Developer Roadmap")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://roadmap.sh/python"},"Python Developer Roadmap"))),(0,i.kt)("p",null,"You can find an extensive range of developer roadmaps at ",(0,i.kt)("a",{parentName:"p",href:"http://roadmap.sh"},"Roadmap.sh"),"."),(0,i.kt)("h2",{id:"design-patterns"},"Design Patterns"),(0,i.kt)("p",null,"Design patterns are reusable solutions to common problems in software development. They are used to solve problems and improve the quality of code. As a software developer, you will need to understand how design patterns work to build robust and scalable applications. Learn about common design patterns such as creational, structural, and behavioral patterns. Familiarize yourself with the SOLID principles of object-oriented design. This knowledge will enable you to write clean, maintainable, and extensible code."),(0,i.kt)("p",null,"Resources to learn Design Patterns:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Design-Patterns-Elements-Reusable-Object-Oriented/dp/0201633612"},"Design Patterns: Elements of Reusable Object-Oriented Software")," by Erich Gamma, Richard Helm, Ralph Johnson, and John Vlissides"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Head-First-Design-Patterns-Brain-Friendly/dp/0596007124"},"Head First Design Patterns")," by Eric Freeman and Elisabeth Robson"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://refactoring.guru/design-patterns"},"Refactoring.Guru")," - A website that explains 22 design patterns and 8 principles of object-oriented design in a simple and intuitive way"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://sourcemaking.com/design_patterns"},"SourceMaking")," - A resource for learning about design patterns, anti-patterns, and refactoring")),(0,i.kt)("h2",{id:"cloud-native-development"},"Cloud Native Development"),(0,i.kt)("p",null,"Cloud native development is a software development approach that leverages cloud computing to build and deploy applications. It enables developers to focus on writing code instead of managing infrastructure. Learn about cloud computing concepts such as virtualization, containers, and serverless computing. Familiarize yourself with cloud platforms such as Amazon Web Services (AWS), Microsoft Azure, and Google Cloud Platform (GCP). This knowledge will enable you to build scalable and resilient applications."),(0,i.kt)("p",null,"Resources to learn Cloud Native Development:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Cloud-Native-Containers-Functions-Next-Generation/dp/1492040762"},"Cloud Native: Using Containers, Functions, and Data to Build Next-Generation Applications")," by Boris Scholl, Trent Swanson, and Peter Jausovec"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Kubernetes-Running-Dive-Future-Infrastructure/dp/1492046531"},"Kubernetes: Up and Running: Dive into the Future of Infrastructure")," by Brendan Burns, Joe Beda, and Kelsey Hightower"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Cloud-Native-DevOps-Kubernetes-Applications/dp/1492040762"},"Cloud Native DevOps with Kubernetes: Building, Deploying, and Scaling Modern Applications in the Cloud")," by John Arundel and Justin Domingus"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://learnk8s.io/"},"The Cloud Native Learning Resources")," by Learnk8s"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.cncf.io/"},"Cloud Native Computing Foundation")," - The foundation hosts critical components of the global technology infrastructure, including Kubernetes, Prometheus, and Envoy")),(0,i.kt)("h2",{id:"databases"},"Databases"),(0,i.kt)("p",null,"Databases are used to store and retrieve data. As a software developer, you will need to understand how databases work to build applications that interact with them. Learn about relational databases such as MySQL and PostgreSQL, NoSQL databases such as MongoDB and Redis, and cloud databases such as Amazon DynamoDB and Google Cloud Firestore. Familiarize yourself with database design, data modeling, and query languages such as SQL. This knowledge will enable you to build robust and scalable applications."),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"https://survey.stackoverflow.co/2023/#section-admired-and-desired-databases"},"StackOverflow Most Admired and Desired Databases 2023")),(0,i.kt)("p",null,"Resources to learn Databases:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Database-Internals-Deep-Distributed-Systems/dp/1492040347"},"Database Internals: A Deep Dive into How Distributed Data Systems Work")," by Alex Petrov"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Designing-Data-Intensive-Applications-Reliable-Maintainable/dp/1449373321"},"Designing Data-Intensive Applications: The Big Ideas Behind Reliable, Scalable, and Maintainable Systems")," by Martin Kleppmann"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Database-Design-Mere-Mortals-Hands/dp/0321884493"},"Database Design for Mere Mortals: A Hands-On Guide to Relational Database Design")," by Michael J. Hernandez"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://sqlzoo.net/"},"SQLZOO")," - A website that provides interactive SQL tutorials and exercises"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://sqlbolt.com/"},"SQLBolt")," - A website that provides interactive SQL tutorials and exercises")),(0,i.kt)("p",null,"Don't underestimate the power of SQLite. It's a lightweight, file-based database that's great for small applications and prototyping. Also, consider exploring open-source databases like ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/rqlite/rqlite"},"rqlite"),", which is a lightweight, distributed relational database built on SQLite."),(0,i.kt)("h2",{id:"distributed-systems"},"Distributed Systems"),(0,i.kt)("p",null,"Distributed systems are groups of networked computers that interact with each other to achieve a common goal. Key concepts in distributed systems include consensus algorithms like ",(0,i.kt)("a",{parentName:"p",href:"https://raft.github.io/"},"RAFT"),", and principles like the CAP Theorem."),(0,i.kt)("p",null,"Resources to learn Distributed Systems:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Designing-Data-Intensive-Applications-Reliable-Maintainable/dp/1449373321"},"Designing Data-Intensive Applications: The Big Ideas Behind Reliable, Scalable, and Maintainable Systems")," by Martin Kleppmann"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"http://book.mixu.net/distsys/single-page.html"},"Distributed Systems for Fun and Profit")," by Mikito Takada"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://raft.github.io/"},"RAFT"))),(0,i.kt)("p",null,"Understanding the CAP Theorem is crucial when working with distributed databases. It states that it's impossible for a distributed data store to simultaneously provide more than two out of the following three guarantees: Consistency, Availability, and Partition tolerance."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.ibm.com/cloud/learn/cap-theorem"},"CAP Theorem: Explained")," by IBM Cloud Education")),(0,i.kt)("h2",{id:"cicd-pipelines"},"CI/CD Pipelines"),(0,i.kt)("p",null,"Continuous Integration (CI) and Continuous Delivery (CD) are software development practices that enable developers to build, test, and deploy code frequently and reliably. Learn about CI/CD pipelines and how they work. Familiarize yourself with popular CI/CD tools such as Jenkins, CircleCI, and Travis CI. This knowledge will enable you to automate the software development lifecycle and deliver high-quality software faster."),(0,i.kt)("p",null,"Resources to learn CI/CD:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Continuous-Delivery-Deployment-Automation-Addison-Wesley/dp/0321601912"},"Continuous Delivery: Reliable Software Releases through Build, Test, and Deployment Automation")," by Jez Humble and David Farley"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://docs.travis-ci.com/"},"Travis CI Documentation")," - Comprehensive guide on how to use Travis CI"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://docs.github.com/en/actions"},"GitHub Actions Documentation")," - Learn how to automate, customize, and execute your software development workflows right in your repository with GitHub Actions")),(0,i.kt)("p",null,"Resources to learn Git:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://git-scm.com/book/en/v2"},"Pro Git")," by Scott Chacon and Ben Straub"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://lab.github.com/"},"GitHub Learning Lab")," - Interactive courses on Git and GitHub"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://learngitbranching.js.org/"},"Learn Git Branching")," - An interactive Git visualization tool to educate and challenge"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"http://gitimmersion.com/"},"Git Immersion")," - A guided tour that walks through the fundamentals of Git")),(0,i.kt)("h2",{id:"testing-frameworks"},"Testing frameworks"),(0,i.kt)("p",null,"Testing frameworks are used to automate the testing process. They enable developers to write tests that can be executed automatically. Learn about popular testing frameworks such as JUnit, TestNG, and Selenium. Familiarize yourself with unit testing, integration testing, and end-to-end testing. This knowledge will enable you to write robust and reliable code."),(0,i.kt)("p",null,"Resources to learn Testing Frameworks:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://junit.org/junit5/docs/current/user-guide/"},"JUnit 5 User Guide")," - The official guide for JUnit 5, a modern testing framework for Java"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://docs.pytest.org/en/latest/"},"PyTest Documentation")," - Comprehensive guide on how to use PyTest, a popular testing framework for Python"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://jestjs.io/docs/getting-started"},"Jest Documentation")," - Learn how to use Jest, a delightful JavaScript Testing Framework with a focus on simplicity"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://docs.cypress.io/guides/overview/why-cypress"},"Cypress Documentation")," - Learn how to use Cypress, a next generation front end testing tool built for the modern web"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://mochajs.org/"},"Mocha Documentation")," - Mocha is a feature-rich JavaScript test framework running on Node.js and in the browser, making asynchronous testing simple and fun")),(0,i.kt)("h2",{id:"documentation-skills"},"Documentation Skills"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"Clear and concise documentation is crucial in software development. This includes not only code comments and README files, but also blog posts and articles that share your knowledge with others. ")),(0,i.kt)("p",null,"Here are some key skills and tools to master:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("a",{parentName:"p",href:"https://www.markdownguide.org/"},"Markdown")," - A lightweight markup language that you can use to write easy-to-read and easy-to-write plain text format, which then converts to structurally valid HTML. It's widely used for README files, documentation, and writing articles on platforms like GitHub and Jekyll blogs.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("a",{parentName:"p",href:"https://mermaid.js.org/"},"Mermaid.js")," - A JavaScript library that allows you to create diagrams and flowcharts using text. It integrates well with Markdown, making it great for blog posts that need to explain complex ideas visually.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("a",{parentName:"p",href:"https://microsoft.github.io/language-server-protocol/"},"Language Server Protocol")," - A protocol developed by Microsoft that allows code editing tools to provide features like auto-complete, go to definition, find all references and alike without each tool needing to understand each programming language.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("a",{parentName:"p",href:"https://diagrams.mingrammer.com/"},"Diagram as a Code")," - A Python library that allows you to create diagrams using code, which is great for prototyping a new system architecture design or explaining system design in your blog posts."))),(0,i.kt)("p",null,"Remember, good documentation skills involve clear communication, good organization, and the ability to explain complex ideas in a simple, understandable way. Practice writing blog posts and articles to improve these skills. You can also contribute to open-source projects by writing documentation for them. Maintain documentation in tools like ",(0,i.kt)("a",{parentName:"p",href:"http://dendron.so"},"Dendron"),", ",(0,i.kt)("a",{parentName:"p",href:"http://logseq.com"},"Logseq")," which is a powerful note-taking system that allows you to organize and navigate your knowledge effectively."),(0,i.kt)("p",null,"Once a big knowledge base is built, you can use it to capture coding patterns, solutions to common problems, and snippets of code. Combine it with Markdown files and notebook tools like Jupyter Notebook (NB) to create well-structured and executable documentation. This approach will streamline your workflow and serve as a valuable resource for future reference which can be used with Large Language Models to gain insights."),(0,i.kt)("p",null,"Consider all documentation as a code. Refer to my blog post ",(0,i.kt)("a",{parentName:"p",href:"https://vishalgandhi.in/doc-as-code"},"Documentation as Code")," for more information."),(0,i.kt)("h2",{id:"rest-api-knowledge"},"REST API Knowledge"),(0,i.kt)("p",null,"REST (Representational State Transfer) is an architectural style commonly used for web services. Understanding how REST APIs work and being able to design, build, and consume them is a fundamental skill for software developers. Learn about HTTP methods (GET, POST, PUT, DELETE), URL structure, request/response formats (JSON, XML), authentication, and common best practices for building RESTful APIs. This knowledge will empower you to interact with various web services and build robust and scalable applications."),(0,i.kt)("p",null,"In today's software development landscape, REST (Representational State Transfer) APIs have become a fundamental building block. They allow different software systems to communicate and exchange data over the internet. Most interfaces in software organizations are now through REST APIs, and it's nearly impossible to start any product without an \"API First\" approach."),(0,i.kt)("p",null,"Understanding how REST APIs work and being able to design, build, and consume them is a crucial skill for software developers. Learn about HTTP methods (GET, POST, PUT, DELETE), URL structure, request/response formats (JSON, XML), and authentication."),(0,i.kt)("p",null,"Here are some resources to get you started:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://restfulapi.net/"},"RESTful API Design")," - A comprehensive resource for understanding and designing RESTful APIs."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.youtube.com/watch?v=pKd0Rpw7O48"},"Building a RESTful API with Node.js")," - A YouTube tutorial by Academind."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://vishalgandhi.in/rest-api-design-rules"},"My Blog Post on REST API")," - A deep dive into REST API from my personal experience and understanding.")),(0,i.kt)("p",null,"In addition to understanding the principles of REST APIs, it's also important to familiarize yourself with some of the popular frameworks that can help you build REST APIs more efficiently:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://fastapi.tiangolo.com/"},"FastAPI")," - A modern, fast (high-performance), web framework for building APIs with Python 3.6+ based on standard Python type hints."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://gin-gonic.com/"},"Gin Web Framework")," - Gin is a web framework written in Go. It features a martini-like API with performance that is up to 40 times faster thanks to httprouter.")),(0,i.kt)("p",null,"These frameworks provide a set of tools and libraries that simplify the process of building robust and scalable APIs. They handle a lot of the boilerplate code and allow you to focus on the business logic of your application."),(0,i.kt)("p",null,"Remember, a well-designed API can be a powerful tool for an organization, enabling it to expose its services to a variety of different clients and create new digital products and services."),(0,i.kt)("h2",{id:"linux-knowledge"},"Linux knowledge"),(0,i.kt)("p",null,"Familiarizing yourself with Linux is valuable because many development environments, servers, and cloud services are based on Linux. Gain proficiency in the command line interface (CLI), file navigation, package management, and shell scripting. Understanding Linux will enhance your ability to work with open-source solutions, deploy applications, and troubleshoot issues efficiently."),(0,i.kt)("p",null,"To gain a deeper understanding of Linux, consider the following resources:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("a",{parentName:"p",href:"https://linuxjourney.com/"},"Linux Journey")," - A free, self-guided tour to help you learn Linux.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("a",{parentName:"p",href:"https://www.amazon.com/Linux-Command-Line-Complete-Introduction/dp/1593273894"},"The Linux Command Line: A Complete Introduction")," by William E. Shotts Jr. - This book is a comprehensive guide to using the command line to perform various tasks in Linux.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("a",{parentName:"p",href:"https://www.amazon.com/How-Linux-Works-3rd-Superuser/dp/1718500408"},"How Linux Works, 3rd Edition: What Every Superuser Should Know")," by Brian Ward - This book offers a comprehensive, updated guide to understanding how Linux operates.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"Shell scripting is a powerful tool that can automate tasks and manage system configurations. Here are some of the best books to learn shell scripting:")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("a",{parentName:"p",href:"https://www.amazon.com/Learning-bash-Shell-Programming-Third/dp/0596009658"},"Learning the bash Shell: Unix Shell Programming")," by Cameron Newham - This book is a complete guide to bash, the default shell for Linux.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("a",{parentName:"p",href:"https://www.amazon.com/Shell-Scripting-Expert-Recipes-Linux/dp/1118024486"},"Shell Scripting: Expert Recipes for Linux, Bash, and More")," by Steve Parker - A compendium of shell scripting recipes that can immediately be used, adjusted, and applied."))),(0,i.kt)("h2",{id:"contribute-to-open-source-solutions"},"Contribute to Open Source Solutions"),(0,i.kt)("p",null,"Embrace the open-source community as a software developer. Contributing to open-source projects not only allows you to collaborate with experienced developers but also helps you refine your coding skills. By studying open-source projects, you can learn about software architecture, coding standards, best practices, and gain exposure to different programming languages and frameworks. Explore popular open-source repositories such as GitHub and start contributing to projects aligned with your interests."),(0,i.kt)("p",null,"Here are some resources to get you started:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.firsttimersonly.com/"},"First Timers Only")," - A site dedicated to helping newcomers get started with contributing to open-source projects."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://github.com/topics/good-first-issue"},'GitHub "Good First Issue" Label'),' - GitHub\'s "Good First Issue" label is a great way to find projects that are beginner-friendly. These issues are specifically marked by project maintainers as good entry points for new contributors.'),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://opensource.guide/"},"Open Source Guide")," - This guide provides resources for contributing to open source, from finding a project to making a contribution."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://up-for-grabs.net/#/"},"Up For Grabs")," - This site aggregates (groups together) projects that have tasks specifically for new contributors.")),(0,i.kt)("p",null,"Remember, contributing to open source is not just about coding. You can contribute in many ways, including documentation, design, teaching, and more."),(0,i.kt)("h2",{id:"learn-standard-way-of-writing-solutions"},"Learn Standard Way of Writing Solutions"),(0,i.kt)("p",null,"Developers often encounter challenges in writing clean, maintainable, and well-documented code. Embrace standard practices and methodologies to overcome these challenges."),(0,i.kt)("p",null,"For example, when building command-line interfaces (CLI), consider using libraries like docopt to create command-line interfaces with ease. "),(0,i.kt)("p",null,"Resources for CLI:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"http://docopt.org/"},"docopt")," - A command-line interface description language that will make you smile."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://clig.dev/"},"Command Line Interface Guidelines")," - A guide to help you write command-line programs that respect the conventions of the Unix philosophy.")),(0,i.kt)("p",null,"When developing REST APIs, adhere to established standards such as the Richardson Maturity Model or OpenAPI specifications to ensure consistency and interoperability. "),(0,i.kt)("p",null,"Resources for REST APIs:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://martinfowler.com/articles/richardsonMaturityModel.html"},"Richardson Maturity Model")," - A model (developed by Leonard Richardson) that breaks down the principal elements of a REST approach into three steps."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://swagger.io/specification/"},"OpenAPI Specification")," - A standard, language-agnostic interface to RESTful APIs which allows both humans and computers to discover and understand the capabilities of the service without access to source code, documentation, or through network traffic inspection.")),(0,i.kt)("p",null,"These practices will make your code more robust, readable, and easier to maintain."),(0,i.kt)("p",null,"Resources for Clean Code:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Clean-Code-Handbook-Software-Craftsmanship/dp/0132350882"},"Clean Code: A Handbook of Agile Software Craftsmanship")," by Robert C. Martin - A book that describes the principles, patterns, and practices of writing clean code."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"https://www.amazon.com/Refactoring-Improving-Design-Existing-Code/dp/0201485672"},"Refactoring: Improving the Design of Existing Code")," by Martin Fowler - A book about how to clean up code to minimize the chance of introducing bugs.")),(0,i.kt)("h2",{id:"conclusion"},"Conclusion"),(0,i.kt)("p",null,"Embarking on a career as a software developer requires continuous learning and adaptation. By focusing on the key skills, tools, and technologies mentioned in this article, you will be able to establish a solid foundation and thrive in this field. Remember, it's not about how much you know but how well you can apply what you know. So, keep learning and practicing!"))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c573638f.3f7958f2.js b/assets/js/c573638f.3f7958f2.js deleted file mode 100644 index 60e1af4..0000000 --- a/assets/js/c573638f.3f7958f2.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[964],{8866:e=>{e.exports=JSON.parse('[{"label":"open-source","permalink":"/tags/open-source","count":1},{"label":"2022","permalink":"/tags/2022","count":1},{"label":"favourite-repo","permalink":"/tags/favourite-repo","count":1},{"label":"docops","permalink":"/tags/docops","count":1},{"label":"doc-as-code","permalink":"/tags/doc-as-code","count":1},{"label":"rest","permalink":"/tags/rest","count":1},{"label":"restapi","permalink":"/tags/restapi","count":1},{"label":"rest-api-design-rules","permalink":"/tags/rest-api-design-rules","count":1},{"label":"mongodb","permalink":"/tags/mongodb","count":5},{"label":"replicaset","permalink":"/tags/replicaset","count":2},{"label":"write-concern","permalink":"/tags/write-concern","count":1},{"label":"docker-desktop","permalink":"/tags/docker-desktop","count":1},{"label":"SBOM","permalink":"/tags/sbom","count":1},{"label":"docker","permalink":"/tags/docker","count":5},{"label":"containers","permalink":"/tags/containers","count":2},{"label":"s3","permalink":"/tags/s-3","count":1},{"label":"minio","permalink":"/tags/minio","count":1},{"label":"pbm","permalink":"/tags/pbm","count":1},{"label":"reverse-proxy","permalink":"/tags/reverse-proxy","count":1},{"label":"NGINX","permalink":"/tags/nginx","count":1},{"label":"DNSMASQ","permalink":"/tags/dnsmasq","count":1},{"label":"Lab","permalink":"/tags/lab","count":1},{"label":"sharding","permalink":"/tags/sharding","count":1},{"label":"persistent-volume","permalink":"/tags/persistent-volume","count":1},{"label":"mongo-replicaset","permalink":"/tags/mongo-replicaset","count":1}]')}}]); \ No newline at end of file diff --git a/assets/js/c573638f.9cdf5a27.js b/assets/js/c573638f.9cdf5a27.js new file mode 100644 index 0000000..8c8fba2 --- /dev/null +++ b/assets/js/c573638f.9cdf5a27.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[964],{8866:e=>{e.exports=JSON.parse('[{"label":"career","permalink":"/tags/career","count":1},{"label":"fundamentals","permalink":"/tags/fundamentals","count":1},{"label":"essential-skills","permalink":"/tags/essential-skills","count":1},{"label":"open-source","permalink":"/tags/open-source","count":1},{"label":"2022","permalink":"/tags/2022","count":1},{"label":"favourite-repo","permalink":"/tags/favourite-repo","count":1},{"label":"docops","permalink":"/tags/docops","count":1},{"label":"doc-as-code","permalink":"/tags/doc-as-code","count":1},{"label":"rest","permalink":"/tags/rest","count":1},{"label":"restapi","permalink":"/tags/restapi","count":1},{"label":"rest-api-design-rules","permalink":"/tags/rest-api-design-rules","count":1},{"label":"mongodb","permalink":"/tags/mongodb","count":5},{"label":"replicaset","permalink":"/tags/replicaset","count":2},{"label":"write-concern","permalink":"/tags/write-concern","count":1},{"label":"docker-desktop","permalink":"/tags/docker-desktop","count":1},{"label":"SBOM","permalink":"/tags/sbom","count":1},{"label":"docker","permalink":"/tags/docker","count":5},{"label":"containers","permalink":"/tags/containers","count":2},{"label":"s3","permalink":"/tags/s-3","count":1},{"label":"minio","permalink":"/tags/minio","count":1},{"label":"pbm","permalink":"/tags/pbm","count":1},{"label":"reverse-proxy","permalink":"/tags/reverse-proxy","count":1},{"label":"NGINX","permalink":"/tags/nginx","count":1},{"label":"DNSMASQ","permalink":"/tags/dnsmasq","count":1},{"label":"Lab","permalink":"/tags/lab","count":1},{"label":"sharding","permalink":"/tags/sharding","count":1},{"label":"persistent-volume","permalink":"/tags/persistent-volume","count":1},{"label":"mongo-replicaset","permalink":"/tags/mongo-replicaset","count":1}]')}}]); \ No newline at end of file diff --git a/assets/js/d0fec31a.7010eaa3.js b/assets/js/d0fec31a.7010eaa3.js deleted file mode 100644 index b310bd4..0000000 --- a/assets/js/d0fec31a.7010eaa3.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[6363],{2776:s=>{s.exports=JSON.parse('{"name":"@easyops-cn/docusaurus-search-local","id":"default"}')}}]); \ No newline at end of file diff --git a/assets/js/d887860d.425c7ed8.js b/assets/js/d887860d.425c7ed8.js new file mode 100644 index 0000000..54d3433 --- /dev/null +++ b/assets/js/d887860d.425c7ed8.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[5547],{5745:e=>{e.exports=JSON.parse('{"name":"docusaurus-plugin-content-pages","id":"default"}')}}]); \ No newline at end of file diff --git a/assets/js/f110011f.3f65858b.js b/assets/js/f110011f.3f65858b.js deleted file mode 100644 index 9367bda..0000000 --- a/assets/js/f110011f.3f65858b.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[6782],{2661:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>s,contentTitle:()=>r,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var n=t(7462),o=(t(7294),t(3905));t(1839);const i={slug:"docker-sbom",title:"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["docker-desktop","SBOM","docker"]},r=void 0,l={permalink:"/docker-sbom",source:"@site/blog/2022-07-09-docker-sbom.md",title:"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI",description:"An Software Bill of Materials (SBoM) is a formal record containing the details and supply chain relationships of various components used in building the software. These components, including libraries and modules, can be proprietary or open source,free or paid and the data can be widely available or access-restricted.",date:"2022-07-09T00:00:00.000Z",formattedDate:"July 9, 2022",tags:[{label:"docker-desktop",permalink:"/tags/docker-desktop"},{label:"SBOM",permalink:"/tags/sbom"},{label:"docker",permalink:"/tags/docker"}],readingTime:2.895,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"docker-sbom",title:"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["docker-desktop","SBOM","docker"]},prevItem:{title:"Understanding MongoDB Replicasets and Write Concern - Part 1",permalink:"/mongodb-replicaset-write-concern-read-pref"},nextItem:{title:"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO",permalink:"/percona-mongo-replicaset-minio"}},s={authorsImageUrls:[void 0]},p=[{value:"When should SBoM be used \u2013 Use cases ?",id:"when-should-sbom-be-used--use-cases-",level:2},{value:"Why SBOM ?",id:"why-sbom-",level:2},{value:"SBOM Formats",id:"sbom-formats",level:2},{value:"Docker Desktop \u2013 SBOM CLI",id:"docker-desktop--sbom-cli",level:2},{value:"Usage",id:"usage",level:2}],c={toc:p};function d(e){let{components:a,...i}=e;return(0,o.kt)("wrapper",(0,n.Z)({},c,i,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"An ",(0,o.kt)("strong",{parentName:"p"},"Software Bill of Materials (SBoM)")," is a formal record containing the details and supply chain relationships of various components used in building the software. These components, including libraries and modules, can be proprietary or open source,free or paid and the data can be widely available or access-restricted."),(0,o.kt)("p",null,(0,o.kt)("img",{src:t(84).Z,width:"1024",height:"652"})),(0,o.kt)("p",null,"SBoM is analogous to a list of ingredients on food packaging. In May 2021, the US President released the ",(0,o.kt)("a",{parentName:"p",href:"https://www.whitehouse.gov/briefing-room/presidential-actions/2021/05/12/executive-order-on-improving-the-nations-cybersecurity/"},"Executive Order")," on improving the Nation\u2019s Cybersecurity. The Software Bill of Materials (SBoM) directly impacts all developers. The SBoM requires third-party software companies to provide customers with the code equivalent of a \u201cnutrition chart.\u201d"),(0,o.kt)("h2",{id:"when-should-sbom-be-used--use-cases-"},"When should SBoM be used \u2013 Use cases ?"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"Developing products")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Scan vulnerabilities in the components"),(0,o.kt)("li",{parentName:"ul"},"Keep codebase to bare minimum, reduce the number of dependencies and size"),(0,o.kt)("li",{parentName:"ul"},"Generate SBoM for end users"))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"IT Operations")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Understand operational risk"),(0,o.kt)("li",{parentName:"ul"},"Understand potential exploitations"),(0,o.kt)("li",{parentName:"ul"},"Real time asset inventory"),(0,o.kt)("li",{parentName:"ul"},"Software Selection"),(0,o.kt)("li",{parentName:"ul"},"Identify known vulnerabilities and compliance"))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"EOL")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Complete visibility to components before evaluation or deploying in production"),(0,o.kt)("li",{parentName:"ul"},"Understand the software architecture and the dependencies of the software")))),(0,o.kt)("h2",{id:"why-sbom-"},"Why SBOM ?"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"Requirement from regulatory bodies to track the components used in the software")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"Transparency of components getting shipped")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"Container ecosystem has exploded and the need to track the components getting shipped is a must")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"Software Vulnerabilities are bugs")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"Detecting and remediating Vulnerabilities"))),(0,o.kt)("h2",{id:"sbom-formats"},"SBOM Formats"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"SPDX (Software Package Data Exchange )")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Open standard for communicating software bill of material information, including components, licenses, copyrights and security references. Reduces redundant work by providing a common format for organizations and communities to share and use"))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"CycloneDX")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Open Web Application Security Project(OWASP) CycloneDX is a lightweight Software Bill of Materials (SBOM) standard designed for use in application security contexts and supply chain component analysis."))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"SWID (Software Identification Tags)")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"SWID is used primarily to identify installed software and is the preferred format of the NVD. SWID tags are used in the National Vulnerability Database to describe vulnerable components. The CycloneDX specification compliments this work as CycloneDX documents can incorporate SWID tags and other high-level SWID metadata and optionally include entire SWID documents. Use of SWID tag ID\u2019s are useful in determining if a specific component has known vulnerabilities.")))),(0,o.kt)("h2",{id:"docker-desktop--sbom-cli"},"Docker Desktop \u2013 SBOM CLI"),(0,o.kt)("p",null,"In Docker Desktop 4.7.0 Docker introduced and included a new experimental docker sbom CLI that is used for displaying SBoM for any container image. docker sbom scans the layer of container images using the Syft Project"),(0,o.kt)("h2",{id:"usage"},"Usage"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Display SBOM in CyloneDX format")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},' \n$ docker sbom mongo:latest --format cyclonedx-json | more\n \n{\n "type": "library",\n "publisher": "MongoDB Packaging \\u003cpackaging@mongodb.com\\u003e",\n "name": "mongodb-org-server",\n "version": "5.0.9",\n "cpe": "cpe:2.3:a:mongodb-org-server:mongodb-org-server:5.0.9:*:*:*:*:*:*:*",\n "purl": "pkg:deb/ubuntu/mongodb-org-server@5.0.9?arch=arm64\\u0026upstream=mongodb-org\\u0026distro=ubuntu-20.04",\n "properties": [\n {\n "name": "syft:package:foundBy",\n "value": "dpkgdb-cataloger"\n },\n {\n "name": "syft:package:metadataType",\n "value": "DpkgMetadata"\n }\n')),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Display SBOM summary of packages. e.g. using the below command we can check for the log4j vulnerabilities")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ docker sbom neo4j | grep log4j\n \nlog4j-api 2.17.1 java-archive\nlog4j-core 2.17.1 java-archive\n \n$ docker sbom neo4j:4.4.1 | grep log4j\n \nlog4j-api 2.15.0 java-archive\nlog4j-core 2.15.0 java-archive\n \n$ docker sbom elasticsearch:7.16.3 | grep log4j\n \nelasticsearch-log4j 7.16.3 java-archive\nlog4j-1.2-api 2.17.1 java-archive\nlog4j-api 2.17.1 java-archive\nlog4j-core 2.17.1 java-archive\nlog4j-slf4j-impl 2.17.1 java-archive\n")),(0,o.kt)("p",null,"There are many benefits to generate SBOM for compliance and vulnerability analysis. Further SBOM can be used for input to open source vulnerability databases like ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/snyk/cli"},"Snyk")," and open source vulnerability scanning tools like ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/anchore/grype"},"Grype")))}d.isMDXComponent=!0},84:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/sbom-e9e0845791a6a0077ec17fe1845f4c36.webp"}}]); \ No newline at end of file diff --git a/assets/js/f110011f.463486e9.js b/assets/js/f110011f.463486e9.js new file mode 100644 index 0000000..cb8f2a5 --- /dev/null +++ b/assets/js/f110011f.463486e9.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[6782],{2661:(e,a,t)=>{t.r(a),t.d(a,{assets:()=>s,contentTitle:()=>r,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var n=t(7462),o=(t(7294),t(3905));t(1839);const i={slug:"docker-sbom",title:"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png"},tags:["docker-desktop","SBOM","docker"]},r=void 0,l={permalink:"/docker-sbom",source:"@site/blog/2022-07-09-docker-sbom.md",title:"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI",description:"An Software Bill of Materials (SBoM) is a formal record containing the details and supply chain relationships of various components used in building the software. These components, including libraries and modules, can be proprietary or open source,free or paid and the data can be widely available or access-restricted.",date:"2022-07-09T00:00:00.000Z",formattedDate:"July 9, 2022",tags:[{label:"docker-desktop",permalink:"/tags/docker-desktop"},{label:"SBOM",permalink:"/tags/sbom"},{label:"docker",permalink:"/tags/docker"}],readingTime:2.895,hasTruncateMarker:!0,authors:[{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"}],frontMatter:{slug:"docker-sbom",title:"Guide to Software Bill of Materials(SBoM) and Docker SBOM CLI",authors:{name:"Vishal Gandhi",url:"https://github.com/ivishalgandhi",image_url:"https://github.com/ivishalgandhi.png",imageURL:"https://github.com/ivishalgandhi.png"},tags:["docker-desktop","SBOM","docker"]},prevItem:{title:"Understanding MongoDB Replicasets and Write Concern - Part 1",permalink:"/mongodb-replicaset-write-concern-read-pref"},nextItem:{title:"How to configure Percona MongoDB Replicaset, Percona Backup Manager, Backup Agent using Docker and perform Replicaset backup, restore using S3 compatible object storage \u2013 MINIO",permalink:"/percona-mongo-replicaset-minio"}},s={authorsImageUrls:[void 0]},p=[{value:"When should SBoM be used \u2013 Use cases ?",id:"when-should-sbom-be-used--use-cases-",level:2},{value:"Why SBOM ?",id:"why-sbom-",level:2},{value:"SBOM Formats",id:"sbom-formats",level:2},{value:"Docker Desktop \u2013 SBOM CLI",id:"docker-desktop--sbom-cli",level:2},{value:"Usage",id:"usage",level:2}],c={toc:p};function d(e){let{components:a,...i}=e;return(0,o.kt)("wrapper",(0,n.Z)({},c,i,{components:a,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"An ",(0,o.kt)("strong",{parentName:"p"},"Software Bill of Materials (SBoM)")," is a formal record containing the details and supply chain relationships of various components used in building the software. These components, including libraries and modules, can be proprietary or open source,free or paid and the data can be widely available or access-restricted."),(0,o.kt)("p",null,(0,o.kt)("img",{src:t(3464).Z,width:"1024",height:"652"})),(0,o.kt)("p",null,"SBoM is analogous to a list of ingredients on food packaging. In May 2021, the US President released the ",(0,o.kt)("a",{parentName:"p",href:"https://www.whitehouse.gov/briefing-room/presidential-actions/2021/05/12/executive-order-on-improving-the-nations-cybersecurity/"},"Executive Order")," on improving the Nation\u2019s Cybersecurity. The Software Bill of Materials (SBoM) directly impacts all developers. The SBoM requires third-party software companies to provide customers with the code equivalent of a \u201cnutrition chart.\u201d"),(0,o.kt)("h2",{id:"when-should-sbom-be-used--use-cases-"},"When should SBoM be used \u2013 Use cases ?"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"Developing products")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Scan vulnerabilities in the components"),(0,o.kt)("li",{parentName:"ul"},"Keep codebase to bare minimum, reduce the number of dependencies and size"),(0,o.kt)("li",{parentName:"ul"},"Generate SBoM for end users"))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"IT Operations")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Understand operational risk"),(0,o.kt)("li",{parentName:"ul"},"Understand potential exploitations"),(0,o.kt)("li",{parentName:"ul"},"Real time asset inventory"),(0,o.kt)("li",{parentName:"ul"},"Software Selection"),(0,o.kt)("li",{parentName:"ul"},"Identify known vulnerabilities and compliance"))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"EOL")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Complete visibility to components before evaluation or deploying in production"),(0,o.kt)("li",{parentName:"ul"},"Understand the software architecture and the dependencies of the software")))),(0,o.kt)("h2",{id:"why-sbom-"},"Why SBOM ?"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"Requirement from regulatory bodies to track the components used in the software")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"Transparency of components getting shipped")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"Container ecosystem has exploded and the need to track the components getting shipped is a must")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"Software Vulnerabilities are bugs")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"Detecting and remediating Vulnerabilities"))),(0,o.kt)("h2",{id:"sbom-formats"},"SBOM Formats"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"SPDX (Software Package Data Exchange )")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Open standard for communicating software bill of material information, including components, licenses, copyrights and security references. Reduces redundant work by providing a common format for organizations and communities to share and use"))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"CycloneDX")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Open Web Application Security Project(OWASP) CycloneDX is a lightweight Software Bill of Materials (SBOM) standard designed for use in application security contexts and supply chain component analysis."))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},(0,o.kt)("strong",{parentName:"p"},"SWID (Software Identification Tags)")),(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"SWID is used primarily to identify installed software and is the preferred format of the NVD. SWID tags are used in the National Vulnerability Database to describe vulnerable components. The CycloneDX specification compliments this work as CycloneDX documents can incorporate SWID tags and other high-level SWID metadata and optionally include entire SWID documents. Use of SWID tag ID\u2019s are useful in determining if a specific component has known vulnerabilities.")))),(0,o.kt)("h2",{id:"docker-desktop--sbom-cli"},"Docker Desktop \u2013 SBOM CLI"),(0,o.kt)("p",null,"In Docker Desktop 4.7.0 Docker introduced and included a new experimental docker sbom CLI that is used for displaying SBoM for any container image. docker sbom scans the layer of container images using the Syft Project"),(0,o.kt)("h2",{id:"usage"},"Usage"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Display SBOM in CyloneDX format")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},' \n$ docker sbom mongo:latest --format cyclonedx-json | more\n \n{\n "type": "library",\n "publisher": "MongoDB Packaging \\u003cpackaging@mongodb.com\\u003e",\n "name": "mongodb-org-server",\n "version": "5.0.9",\n "cpe": "cpe:2.3:a:mongodb-org-server:mongodb-org-server:5.0.9:*:*:*:*:*:*:*",\n "purl": "pkg:deb/ubuntu/mongodb-org-server@5.0.9?arch=arm64\\u0026upstream=mongodb-org\\u0026distro=ubuntu-20.04",\n "properties": [\n {\n "name": "syft:package:foundBy",\n "value": "dpkgdb-cataloger"\n },\n {\n "name": "syft:package:metadataType",\n "value": "DpkgMetadata"\n }\n')),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Display SBOM summary of packages. e.g. using the below command we can check for the log4j vulnerabilities")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-shell"},"$ docker sbom neo4j | grep log4j\n \nlog4j-api 2.17.1 java-archive\nlog4j-core 2.17.1 java-archive\n \n$ docker sbom neo4j:4.4.1 | grep log4j\n \nlog4j-api 2.15.0 java-archive\nlog4j-core 2.15.0 java-archive\n \n$ docker sbom elasticsearch:7.16.3 | grep log4j\n \nelasticsearch-log4j 7.16.3 java-archive\nlog4j-1.2-api 2.17.1 java-archive\nlog4j-api 2.17.1 java-archive\nlog4j-core 2.17.1 java-archive\nlog4j-slf4j-impl 2.17.1 java-archive\n")),(0,o.kt)("p",null,"There are many benefits to generate SBOM for compliance and vulnerability analysis. Further SBOM can be used for input to open source vulnerability databases like ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/snyk/cli"},"Snyk")," and open source vulnerability scanning tools like ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/anchore/grype"},"Grype")))}d.isMDXComponent=!0},3464:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/sbom-e9e0845791a6a0077ec17fe1845f4c36.webp"}}]); \ No newline at end of file diff --git a/assets/js/main.265b48e2.js b/assets/js/main.265b48e2.js new file mode 100644 index 0000000..46efeb8 --- /dev/null +++ b/assets/js/main.265b48e2.js @@ -0,0 +1,2 @@ +/*! For license information please see main.265b48e2.js.LICENSE.txt */ +(self.webpackChunk=self.webpackChunk||[]).push([[179],{723:(e,t,n)=>{"use strict";n.d(t,{Z:()=>p});var r=n(7294),a=n(7462),o=n(8356),i=n.n(o),l=n(6887);const s={"01a85c17":[()=>Promise.all([n.e(532),n.e(4013)]).then(n.bind(n,1223)),"@theme/BlogTagsListPage",1223],"0a5b64c3":[()=>n.e(1358).then(n.t.bind(n,926,19)),"~blog/default/tags-pbm-7b8-list.json",926],"0c071de2":[()=>n.e(321).then(n.t.bind(n,3125,19)),"~blog/default/page-2-b45.json",3125],"0fdf99ec":[()=>n.e(5100).then(n.t.bind(n,2776,19)),"/Users/vishal/Sync/dev/blog-source/.docusaurus/@easyops-cn/docusaurus-search-local/default/plugin-route-context-module-100.json",2776],"109030a3":[()=>n.e(5673).then(n.t.bind(n,7161,19)),"~blog/default/tags-sbom-305-list.json",7161],"12a3d327":[()=>Promise.all([n.e(3932),n.e(935)]).then(n.bind(n,3277)),"@site/blog/2022-07-09-docker-sbom.md?truncated=true",3277],"13816aa6":[()=>n.e(6410).then(n.t.bind(n,4469,19)),"/Users/vishal/Sync/dev/blog-source/.docusaurus/docusaurus-plugin-content-blog/default/plugin-route-context-module-100.json",4469],"14cf12ef":[()=>n.e(768).then(n.t.bind(n,2648,19)),"~blog/default/tags-write-concern-516-list.json",2648],"17b60851":[()=>n.e(4349).then(n.t.bind(n,2798,19)),"~blog/default/tags-open-source-f3d-list.json",2798],"184bd313":[()=>n.e(8691).then(n.t.bind(n,4084,19)),"~blog/default/tags-2022-564.json",4084],"1a4e3797":[()=>Promise.all([n.e(532),n.e(7920)]).then(n.bind(n,1473)),"@theme/SearchPage",1473],"1cdce760":[()=>n.e(5535).then(n.t.bind(n,6377,19)),"~blog/default/tags-restapi-46d-list.json",6377],"1f391b9e":[()=>Promise.all([n.e(532),n.e(1020),n.e(3085)]).then(n.bind(n,4247)),"@theme/MDXPage",4247],"25e6df96":[()=>n.e(2107).then(n.t.bind(n,1115,19)),"~blog/default/tags-essential-skills-259-list.json",1115],"2721e3cf":[()=>Promise.all([n.e(3932),n.e(4625)]).then(n.bind(n,6198)),"@site/blog/2022-08-21-mongodb-replicaset-write-concern-read-pref.md?truncated=true",6198],"28e7c830":[()=>Promise.all([n.e(3932),n.e(8019)]).then(n.bind(n,7020)),"@site/blog/2021-09-18-mongodb-rs-docker-persistent-volume.md",7020],"29f94bc6":[()=>Promise.all([n.e(3932),n.e(2171)]).then(n.bind(n,9281)),"@site/blog/2021-10-02-sharding-mongo-docker.md?truncated=true",9281],"2a5075fc":[()=>n.e(9114).then(n.t.bind(n,5737,19)),"~blog/default/tags-minio-749-list.json",5737],"2bba67bb":[()=>n.e(1811).then(n.t.bind(n,9462,19)),"~blog/default/tags-replicaset-a88.json",9462],"2e801cce":[()=>n.e(9450).then(n.t.bind(n,6029,19)),"~blog/default/archive-3ef.json",6029],"2eb0aaa5":[()=>n.e(3494).then(n.t.bind(n,1915,19)),"~blog/default/tags-docops-ca7-list.json",1915],"30e9b9ae":[()=>Promise.all([n.e(3932),n.e(6982)]).then(n.bind(n,5388)),"@site/blog/2022-04-10-local-home-lab.md",5388],"33c72c09":[()=>n.e(6752).then(n.t.bind(n,4895,19)),"~blog/default/tags-mongo-replicaset-8fb-list.json",4895],"393be207":[()=>n.e(7414).then(n.bind(n,9286)),"@site/src/pages/markdown-page.md",9286],"3d714d7c":[()=>n.e(4132).then(n.t.bind(n,6544,19)),"~blog/default/tags-persistent-volume-559-list.json",6544],"3f6967b9":[()=>n.e(6984).then(n.t.bind(n,1066,19)),"~blog/default/tags-rest-5d8-list.json",1066],"41439a7e":[()=>n.e(7574).then(n.t.bind(n,3299,19)),"~blog/default/tags-write-concern-516.json",3299],"4195a5da":[()=>n.e(1834).then(n.t.bind(n,2718,19)),"~blog/default/tags-favourite-repo-121.json",2718],"44304b85":[()=>n.e(5491).then(n.t.bind(n,1079,19)),"~blog/default/tags-sharding-bfb.json",1079],"457717b0":[()=>n.e(4470).then(n.t.bind(n,9165,19)),"~blog/default/tags-rest-api-design-rules-7ef-list.json",9165],"4cf2419f":[()=>Promise.all([n.e(3932),n.e(9578)]).then(n.bind(n,7237)),"@site/blog/2021-09-12-create-mongodb-docker.md?truncated=true",7237],"4f978119":[()=>n.e(7221).then(n.t.bind(n,7666,19)),"~blog/default/tags-career-375-list.json",7666],"57880cba":[()=>n.e(6958).then(n.t.bind(n,5919,19)),"~blog/default/tags-doc-as-code-270-list.json",5919],"5cb770ff":[()=>n.e(3336).then(n.t.bind(n,2985,19)),"~blog/default/tags-sharding-bfb-list.json",2985],"5e9f5e1a":[()=>Promise.resolve().then(n.bind(n,6809)),"@generated/docusaurus.config",6809],"67fc1b12":[()=>n.e(9848).then(n.t.bind(n,7829,19)),"~blog/default/tags-sbom-305.json",7829],"6875c492":[()=>Promise.all([n.e(532),n.e(1020),n.e(5044),n.e(8610)]).then(n.bind(n,1714)),"@theme/BlogTagsPostsPage",1714],"6a0664f4":[()=>n.e(70).then(n.t.bind(n,3574,19)),"~blog/default/tags-pbm-7b8.json",3574],"6baaf896":[()=>n.e(6004).then(n.t.bind(n,4454,19)),"~blog/default/tags-persistent-volume-559.json",4454],"6d59fe52":[()=>Promise.all([n.e(3932),n.e(4008)]).then(n.bind(n,5638)),"@site/blog/2022-10-22-doc-as-code.md",5638],"74511e25":[()=>n.e(3958).then(n.t.bind(n,5531,19)),"~blog/default/tags-mongodb-b78.json",5531],"76afaad9":[()=>n.e(1526).then(n.t.bind(n,3938,19)),"~blog/default/tags-dnsmasq-d98-list.json",3938],"7792a21f":[()=>Promise.all([n.e(532),n.e(6716)]).then(n.bind(n,5794)),"@site/src/pages/about.js",5794],"7a2b416f":[()=>n.e(4747).then(n.t.bind(n,7991,19)),"~blog/default/tags-s-3-8e0.json",7991],"7de0f443":[()=>n.e(2376).then(n.t.bind(n,7697,19)),"~blog/default/tags-docker-desktop-2f1-list.json",7697],"7ed45d0c":[()=>n.e(821).then(n.t.bind(n,6627,19)),"~blog/default/tags-nginx-57f-list.json",6627],"80fa35bf":[()=>n.e(9909).then(n.t.bind(n,114,19)),"~blog/default/tags-essential-skills-259.json",114],"814f3328":[()=>n.e(2535).then(n.t.bind(n,5641,19)),"~blog/default/blog-post-list-prop-default.json",5641],"84a500a5":[()=>n.e(364).then(n.t.bind(n,8577,19)),"~blog/default/tags-reverse-proxy-1d6-list.json",8577],"85e21fe3":[()=>n.e(1698).then(n.t.bind(n,3520,19)),"~blog/default/tags-career-375.json",3520],"85f79cb6":[()=>n.e(2548).then(n.t.bind(n,9861,19)),"~blog/default/tags-doc-as-code-270.json",9861],"878d2fef":[()=>n.e(1682).then(n.t.bind(n,1384,19)),"~blog/default/tags-fundamentals-59b-list.json",1384],"89f8c0c7":[()=>n.e(6398).then(n.t.bind(n,4246,19)),"~blog/default/tags-mongodb-b78-list.json",4246],"8bc2febf":[()=>n.e(6078).then(n.t.bind(n,1259,19)),"~blog/default/tags-dnsmasq-d98.json",1259],"8ce664e8":[()=>n.e(698).then(n.t.bind(n,2409,19)),"~blog/default/tags-docker-28b.json",2409],"8d014d99":[()=>n.e(5759).then(n.t.bind(n,9534,19)),"~blog/default/tags-docker-desktop-2f1.json",9534],"8d613c03":[()=>n.e(4183).then(n.t.bind(n,3697,19)),"~blog/default/tags-rest-api-design-rules-7ef.json",3697],"908133f5":[()=>n.e(4328).then(n.t.bind(n,6724,19)),"~blog/default/tags-2022-564-list.json",6724],"972379d3":[()=>n.e(7909).then(n.t.bind(n,7094,19)),"~blog/default/tags-fundamentals-59b.json",7094],"98eda86f":[()=>Promise.all([n.e(3932),n.e(8830)]).then(n.bind(n,9793)),"@site/blog/2022-05-29-percona-mongo-replicaset-minio.md?truncated=true",9793],"99797eb7":[()=>Promise.all([n.e(3932),n.e(2372)]).then(n.bind(n,2822)),"@site/blog/2022-05-29-percona-mongo-replicaset-minio.md",2822],"9aaa2984":[()=>n.e(2997).then(n.t.bind(n,8618,19)),"~blog/default/tags-minio-749.json",8618],"9e4087bc":[()=>n.e(3608).then(n.bind(n,3169)),"@theme/BlogArchivePage",3169],a377ac53:[()=>n.e(9400).then(n.t.bind(n,8667,19)),"~blog/default/tags-lab-274.json",8667],a4391afa:[()=>n.e(1360).then(n.t.bind(n,9320,19)),"~blog/default/tags-containers-c29.json",9320],a5557bb9:[()=>n.e(5991).then(n.t.bind(n,3885,19)),"~blog/default/index.json",3885],a6aa9e1f:[()=>Promise.all([n.e(532),n.e(1020),n.e(5044),n.e(3089)]).then(n.bind(n,46)),"@theme/BlogListPage",46],a7687096:[()=>Promise.all([n.e(3932),n.e(643)]).then(n.bind(n,3644)),"@site/blog/2022-08-21-mongodb-replicaset-write-concern-read-pref.md",3644],a7ed2b5f:[()=>Promise.all([n.e(3932),n.e(87)]).then(n.bind(n,3473)),"@site/blog/2022-12-30-fav-open-source-repo.md?truncated=true",3473],ab779234:[()=>Promise.all([n.e(3932),n.e(9358)]).then(n.bind(n,3589)),"@site/blog/2022-12-30-fav-open-source-repo.md",3589],b2301113:[()=>n.e(7538).then(n.t.bind(n,6146,19)),"~blog/default/tags-docker-28b-list.json",6146],b3a75538:[()=>n.e(3131).then(n.t.bind(n,5923,19)),"~blog/default/tags-restapi-46d.json",5923],b6d33a4b:[()=>Promise.all([n.e(3932),n.e(3640)]).then(n.bind(n,1652)),"@site/blog/2024-01-21-essentials-software-developer.md?truncated=true",1652],bae643d2:[()=>n.e(1231).then(n.t.bind(n,2927,19)),"~blog/default/tags-mongo-replicaset-8fb.json",2927],bfe19917:[()=>Promise.all([n.e(3932),n.e(5126)]).then(n.bind(n,1864)),"@site/blog/2022-10-22-doc-as-code.md?truncated=true",1864],c2d7c295:[()=>Promise.all([n.e(3932),n.e(9102)]).then(n.bind(n,489)),"@site/blog/2024-01-21-essentials-software-developer.md",489],c573638f:[()=>n.e(964).then(n.t.bind(n,8866,19)),"~blog/default/tags-tags-c2b.json",8866],cbc87ea9:[()=>n.e(9589).then(n.t.bind(n,7622,19)),"~blog/default/tags-reverse-proxy-1d6.json",7622],cc51ae9d:[()=>n.e(6792).then(n.t.bind(n,4554,19)),"~blog/default/tags-replicaset-a88-list.json",4554],ccc49370:[()=>Promise.all([n.e(532),n.e(1020),n.e(5044),n.e(6103)]).then(n.bind(n,5203)),"@theme/BlogPostPage",5203],cd7df11b:[()=>n.e(5616).then(n.t.bind(n,8327,19)),"~blog/default/tags-favourite-repo-121-list.json",8327],d0caa075:[()=>Promise.all([n.e(3932),n.e(6503)]).then(n.bind(n,7240)),"@site/blog/2021-10-02-sharding-mongo-docker.md",7240],d2d99c14:[()=>n.e(2553).then(n.t.bind(n,2281,19)),"~blog/default/tags-containers-c29-list.json",2281],d500b5dd:[()=>Promise.all([n.e(3932),n.e(7058)]).then(n.bind(n,4641)),"@site/blog/2022-04-10-local-home-lab.md?truncated=true",4641],d628ca79:[()=>n.e(5590).then(n.t.bind(n,1850,19)),"~blog/default/tags-docops-ca7.json",1850],d7aad063:[()=>Promise.all([n.e(3932),n.e(6822)]).then(n.bind(n,7108)),"@site/blog/2021-09-12-create-mongodb-docker.md",7108],d80d3b6b:[()=>Promise.all([n.e(3932),n.e(4165)]).then(n.bind(n,1933)),"@site/blog/2022-10-02-rest-api-design-rules.md?truncated=true",1933],d887860d:[()=>n.e(5547).then(n.t.bind(n,5745,19)),"/Users/vishal/Sync/dev/blog-source/.docusaurus/docusaurus-plugin-content-pages/default/plugin-route-context-module-100.json",5745],de805e08:[()=>n.e(206).then(n.t.bind(n,8867,19)),"~blog/default/tags-s-3-8e0-list.json",8867],e21f437b:[()=>n.e(1023).then(n.t.bind(n,1996,19)),"~blog/default/tags-rest-5d8.json",1996],ea5ae9f9:[()=>n.e(1830).then(n.t.bind(n,4443,19)),"~blog/default/tags-nginx-57f.json",4443],ebaee0aa:[()=>n.e(5397).then(n.t.bind(n,2230,19)),"~blog/default/tags-open-source-f3d.json",2230],ef69168f:[()=>Promise.all([n.e(3932),n.e(3667)]).then(n.bind(n,9517)),"@site/blog/2021-09-18-mongodb-rs-docker-persistent-volume.md?truncated=true",9517],f110011f:[()=>Promise.all([n.e(3932),n.e(6782)]).then(n.bind(n,2661)),"@site/blog/2022-07-09-docker-sbom.md",2661],fc1830e0:[()=>n.e(5015).then(n.t.bind(n,4435,19)),"~blog/default/tags-lab-274-list.json",4435],ff9e4026:[()=>Promise.all([n.e(3932),n.e(5522)]).then(n.bind(n,167)),"@site/blog/2022-10-02-rest-api-design-rules.md",167]};function u(e){let{error:t,retry:n,pastDelay:a}=e;return t?r.createElement("div",{style:{textAlign:"center",color:"#fff",backgroundColor:"#fa383e",borderColor:"#fa383e",borderStyle:"solid",borderRadius:"0.25rem",borderWidth:"1px",boxSizing:"border-box",display:"block",padding:"1rem",flex:"0 0 50%",marginLeft:"25%",marginRight:"25%",marginTop:"5rem",maxWidth:"50%",width:"100%"}},r.createElement("p",null,String(t)),r.createElement("div",null,r.createElement("button",{type:"button",onClick:n},"Retry"))):a?r.createElement("div",{style:{display:"flex",justifyContent:"center",alignItems:"center",height:"100vh"}},r.createElement("svg",{id:"loader",style:{width:128,height:110,position:"absolute",top:"calc(100vh - 64%)"},viewBox:"0 0 45 45",xmlns:"http://www.w3.org/2000/svg",stroke:"#61dafb"},r.createElement("g",{fill:"none",fillRule:"evenodd",transform:"translate(1 1)",strokeWidth:"2"},r.createElement("circle",{cx:"22",cy:"22",r:"6",strokeOpacity:"0"},r.createElement("animate",{attributeName:"r",begin:"1.5s",dur:"3s",values:"6;22",calcMode:"linear",repeatCount:"indefinite"}),r.createElement("animate",{attributeName:"stroke-opacity",begin:"1.5s",dur:"3s",values:"1;0",calcMode:"linear",repeatCount:"indefinite"}),r.createElement("animate",{attributeName:"stroke-width",begin:"1.5s",dur:"3s",values:"2;0",calcMode:"linear",repeatCount:"indefinite"})),r.createElement("circle",{cx:"22",cy:"22",r:"6",strokeOpacity:"0"},r.createElement("animate",{attributeName:"r",begin:"3s",dur:"3s",values:"6;22",calcMode:"linear",repeatCount:"indefinite"}),r.createElement("animate",{attributeName:"stroke-opacity",begin:"3s",dur:"3s",values:"1;0",calcMode:"linear",repeatCount:"indefinite"}),r.createElement("animate",{attributeName:"stroke-width",begin:"3s",dur:"3s",values:"2;0",calcMode:"linear",repeatCount:"indefinite"})),r.createElement("circle",{cx:"22",cy:"22",r:"8"},r.createElement("animate",{attributeName:"r",begin:"0s",dur:"1.5s",values:"6;1;2;3;4;5;6",calcMode:"linear",repeatCount:"indefinite"}))))):null}var c=n(9670),d=n(226);function f(e,t){if("*"===e)return i()({loading:u,loader:()=>n.e(4972).then(n.bind(n,4972)),modules:["@theme/NotFound"],webpack:()=>[4972],render(e,t){const n=e.default;return r.createElement(d.z,{value:{plugin:{name:"native",id:"default"}}},r.createElement(n,t))}});const o=l[`${e}-${t}`],f={},p=[],h=[],m=(0,c.Z)(o);return Object.entries(m).forEach((e=>{let[t,n]=e;const r=s[n];r&&(f[t]=r[0],p.push(r[1]),h.push(r[2]))})),i().Map({loading:u,loader:f,modules:p,webpack:()=>h,render(t,n){const i=JSON.parse(JSON.stringify(o));Object.entries(t).forEach((t=>{let[n,r]=t;const a=r.default;if(!a)throw new Error(`The page component at ${e} doesn't have a default export. This makes it impossible to render anything. Consider default-exporting a React component.`);"object"!=typeof a&&"function"!=typeof a||Object.keys(r).filter((e=>"default"!==e)).forEach((e=>{a[e]=r[e]}));let o=i;const l=n.split(".");l.slice(0,-1).forEach((e=>{o=o[e]})),o[l[l.length-1]]=a}));const l=i.__comp;delete i.__comp;const s=i.__context;return delete i.__context,r.createElement(d.z,{value:s},r.createElement(l,(0,a.Z)({},i,n)))}})}const p=[{path:"/about",component:f("/about","9e5"),exact:!0},{path:"/archive",component:f("/archive","c56"),exact:!0},{path:"/create-mongodb-docker",component:f("/create-mongodb-docker","2eb"),exact:!0},{path:"/doc-as-code",component:f("/doc-as-code","d6b"),exact:!0},{path:"/docker-sbom",component:f("/docker-sbom","37b"),exact:!0},{path:"/essential-skills",component:f("/essential-skills","a55"),exact:!0},{path:"/fav-open-source-repo",component:f("/fav-open-source-repo","939"),exact:!0},{path:"/local-home-lab",component:f("/local-home-lab","289"),exact:!0},{path:"/markdown-page",component:f("/markdown-page","b8a"),exact:!0},{path:"/mongodb-replicaset-write-concern-read-pref",component:f("/mongodb-replicaset-write-concern-read-pref","bff"),exact:!0},{path:"/mongodb-rs-docker-persistent-volume",component:f("/mongodb-rs-docker-persistent-volume","a78"),exact:!0},{path:"/page/2",component:f("/page/2","59a"),exact:!0},{path:"/percona-mongo-replicaset-minio",component:f("/percona-mongo-replicaset-minio","d3e"),exact:!0},{path:"/rest-api-design-rules",component:f("/rest-api-design-rules","617"),exact:!0},{path:"/search",component:f("/search","735"),exact:!0},{path:"/sharding-mongo-docker",component:f("/sharding-mongo-docker","ffa"),exact:!0},{path:"/tags",component:f("/tags","8bc"),exact:!0},{path:"/tags/2022",component:f("/tags/2022","f80"),exact:!0},{path:"/tags/career",component:f("/tags/career","9d7"),exact:!0},{path:"/tags/containers",component:f("/tags/containers","e29"),exact:!0},{path:"/tags/dnsmasq",component:f("/tags/dnsmasq","da0"),exact:!0},{path:"/tags/doc-as-code",component:f("/tags/doc-as-code","437"),exact:!0},{path:"/tags/docker",component:f("/tags/docker","02f"),exact:!0},{path:"/tags/docker-desktop",component:f("/tags/docker-desktop","ac7"),exact:!0},{path:"/tags/docops",component:f("/tags/docops","233"),exact:!0},{path:"/tags/essential-skills",component:f("/tags/essential-skills","b11"),exact:!0},{path:"/tags/favourite-repo",component:f("/tags/favourite-repo","f9b"),exact:!0},{path:"/tags/fundamentals",component:f("/tags/fundamentals","04b"),exact:!0},{path:"/tags/lab",component:f("/tags/lab","f1a"),exact:!0},{path:"/tags/minio",component:f("/tags/minio","20f"),exact:!0},{path:"/tags/mongo-replicaset",component:f("/tags/mongo-replicaset","ad7"),exact:!0},{path:"/tags/mongodb",component:f("/tags/mongodb","135"),exact:!0},{path:"/tags/nginx",component:f("/tags/nginx","a49"),exact:!0},{path:"/tags/open-source",component:f("/tags/open-source","44e"),exact:!0},{path:"/tags/pbm",component:f("/tags/pbm","515"),exact:!0},{path:"/tags/persistent-volume",component:f("/tags/persistent-volume","f2e"),exact:!0},{path:"/tags/replicaset",component:f("/tags/replicaset","71a"),exact:!0},{path:"/tags/rest",component:f("/tags/rest","6cc"),exact:!0},{path:"/tags/rest-api-design-rules",component:f("/tags/rest-api-design-rules","1de"),exact:!0},{path:"/tags/restapi",component:f("/tags/restapi","342"),exact:!0},{path:"/tags/reverse-proxy",component:f("/tags/reverse-proxy","045"),exact:!0},{path:"/tags/s-3",component:f("/tags/s-3","36d"),exact:!0},{path:"/tags/sbom",component:f("/tags/sbom","827"),exact:!0},{path:"/tags/sharding",component:f("/tags/sharding","4b2"),exact:!0},{path:"/tags/write-concern",component:f("/tags/write-concern","7c2"),exact:!0},{path:"/",component:f("/","8d1"),exact:!0},{path:"*",component:f("*")}]},8934:(e,t,n)=>{"use strict";n.d(t,{_:()=>a,t:()=>o});var r=n(7294);const a=r.createContext(!1);function o(e){let{children:t}=e;const[n,o]=(0,r.useState)(!1);return(0,r.useEffect)((()=>{o(!0)}),[]),r.createElement(a.Provider,{value:n},t)}},9908:(e,t,n)=>{"use strict";var r=n(7294),a=n(3935),o=n(3727),i=n(405),l=n(412);const s=[n(4367),n(2497),n(3310),n(8320),n(2295)];var u=n(723),c=n(6550),d=n(7462);function f(e,t,n){return void 0===n&&(n=[]),e.some((function(e){var r=e.path?(0,c.LX)(t,e):n.length?n[n.length-1].match:c.F0.computeRootMatch(t);return r&&(n.push({route:e,match:r}),e.routes&&f(e.routes,t,n)),r})),n}function p(e){let{children:t}=e;return r.createElement(r.Fragment,null,t)}var h=n(5742),m=n(2263),g=n(4996),b=n(6668),v=n(1944),y=n(4711),w=n(9727);const k="default";var E=n(197);function S(){const{i18n:{defaultLocale:e,localeConfigs:t}}=(0,m.Z)(),n=(0,y.l)();return r.createElement(h.Z,null,Object.entries(t).map((e=>{let[t,{htmlLang:a}]=e;return r.createElement("link",{key:t,rel:"alternate",href:n.createUrl({locale:t,fullyQualified:!0}),hrefLang:a})})),r.createElement("link",{rel:"alternate",href:n.createUrl({locale:e,fullyQualified:!0}),hrefLang:"x-default"}))}function x(e){let{permalink:t}=e;const{siteConfig:{url:n}}=(0,m.Z)(),a=function(){const{siteConfig:{url:e}}=(0,m.Z)(),{pathname:t}=(0,c.TH)();return e+(0,g.Z)(t)}(),o=t?`${n}${t}`:a;return r.createElement(h.Z,null,r.createElement("meta",{property:"og:url",content:o}),r.createElement("link",{rel:"canonical",href:o}))}function _(){const{i18n:{currentLocale:e}}=(0,m.Z)(),{metadata:t,image:n}=(0,b.L)();return r.createElement(r.Fragment,null,r.createElement(h.Z,null,r.createElement("meta",{name:"twitter:card",content:"summary_large_image"}),r.createElement("body",{className:w.h})),n&&r.createElement(v.d,{image:n}),r.createElement(x,null),r.createElement(S,null),r.createElement(E.Z,{tag:k,locale:e}),r.createElement(h.Z,null,t.map(((e,t)=>r.createElement("meta",(0,d.Z)({key:t},e))))))}const C=new Map;function T(e){if(C.has(e.pathname))return{...e,pathname:C.get(e.pathname)};if(f(u.Z,e.pathname).some((e=>{let{route:t}=e;return!0===t.exact})))return C.set(e.pathname,e.pathname),e;const t=e.pathname.trim().replace(/(?:\/index)?\.html$/,"")||"/";return C.set(e.pathname,t),{...e,pathname:t}}var L=n(8934),P=n(8940);function O(e){for(var t=arguments.length,n=new Array(t>1?t-1:0),r=1;r(t.default?.[e]??t[e])?.(...n)));return()=>a.forEach((e=>e?.()))}const N=function(e){let{children:t,location:n,previousLocation:a}=e;return(0,r.useLayoutEffect)((()=>{a!==n&&(a&&function(e){const{hash:t}=e;if(t){const e=decodeURIComponent(t.substring(1));document.getElementById(e)?.scrollIntoView()}else window.scrollTo(0,0)}(n),O("onRouteDidUpdate",{previousLocation:a,location:n}))}),[a,n]),t};function A(e){const t=Array.from(new Set([e,decodeURI(e)])).map((e=>f(u.Z,e))).flat();return Promise.all(t.map((e=>e.route.component.preload?.())))}class I extends r.Component{constructor(e){super(e),this.previousLocation=void 0,this.routeUpdateCleanupCb=void 0,this.previousLocation=null,this.routeUpdateCleanupCb=l.Z.canUseDOM?O("onRouteUpdate",{previousLocation:null,location:this.props.location}):()=>{},this.state={nextRouteHasLoaded:!0}}shouldComponentUpdate(e,t){if(e.location===this.props.location)return t.nextRouteHasLoaded;const n=e.location;return this.previousLocation=this.props.location,this.setState({nextRouteHasLoaded:!1}),this.routeUpdateCleanupCb=O("onRouteUpdate",{previousLocation:this.previousLocation,location:n}),A(n.pathname).then((()=>{this.routeUpdateCleanupCb(),this.setState({nextRouteHasLoaded:!0})})).catch((e=>{console.warn(e),window.location.reload()})),!1}render(){const{children:e,location:t}=this.props;return r.createElement(N,{previousLocation:this.previousLocation,location:t},r.createElement(c.AW,{location:t,render:()=>e}))}}const R=I,D="docusaurus-base-url-issue-banner-container",M="docusaurus-base-url-issue-banner-suggestion-container",F="__DOCUSAURUS_INSERT_BASEURL_BANNER";function B(e){return`\nwindow['${F}'] = true;\n\ndocument.addEventListener('DOMContentLoaded', maybeInsertBanner);\n\nfunction maybeInsertBanner() {\n var shouldInsert = window['${F}'];\n shouldInsert && insertBanner();\n}\n\nfunction insertBanner() {\n var bannerContainer = document.getElementById('${D}');\n if (!bannerContainer) {\n return;\n }\n var bannerHtml = ${JSON.stringify(function(e){return`\n
\n

Your Docusaurus site did not load properly.

\n

A very common reason is a wrong site baseUrl configuration.

\n

Current configured baseUrl = ${e} ${"/"===e?" (default value)":""}

\n

We suggest trying baseUrl =

\n
\n`}(e)).replace(/{window[F]=!1}),[]),r.createElement(r.Fragment,null,!l.Z.canUseDOM&&r.createElement(h.Z,null,r.createElement("script",null,B(e))),r.createElement("div",{id:D}))}function $(){const{siteConfig:{baseUrl:e,baseUrlIssueBanner:t}}=(0,m.Z)(),{pathname:n}=(0,c.TH)();return t&&n===e?r.createElement(j,null):null}function z(){const{siteConfig:{favicon:e,title:t,noIndex:n},i18n:{currentLocale:a,localeConfigs:o}}=(0,m.Z)(),i=(0,g.Z)(e),{htmlLang:l,direction:s}=o[a];return r.createElement(h.Z,null,r.createElement("html",{lang:l,dir:s}),r.createElement("title",null,t),r.createElement("meta",{property:"og:title",content:t}),r.createElement("meta",{name:"viewport",content:"width=device-width, initial-scale=1.0"}),n&&r.createElement("meta",{name:"robots",content:"noindex, nofollow"}),e&&r.createElement("link",{rel:"icon",href:i}))}var U=n(4763);function H(){const e=function(e,t,n){return void 0===t&&(t={}),void 0===n&&(n={}),e?r.createElement(c.rs,n,e.map((function(e,n){return r.createElement(c.AW,{key:e.key||n,path:e.path,exact:e.exact,strict:e.strict,render:function(n){return e.render?e.render((0,d.Z)({},n,{},t,{route:e})):r.createElement(e.component,(0,d.Z)({},n,t,{route:e}))}})}))):null}(u.Z),t=(0,c.TH)();return r.createElement(U.Z,null,r.createElement(P.M,null,r.createElement(L.t,null,r.createElement(p,null,r.createElement(z,null),r.createElement(_,null),r.createElement($,null),r.createElement(R,{location:T(t)},e)))))}var Q=n(6887);const Z=function(e){try{return document.createElement("link").relList.supports(e)}catch{return!1}}("prefetch")?function(e){return new Promise(((t,n)=>{if("undefined"==typeof document)return void n();const r=document.createElement("link");r.setAttribute("rel","prefetch"),r.setAttribute("href",e),r.onload=()=>t(),r.onerror=()=>n();(document.getElementsByTagName("head")[0]??document.getElementsByName("script")[0]?.parentNode)?.appendChild(r)}))}:function(e){return new Promise(((t,n)=>{const r=new XMLHttpRequest;r.open("GET",e,!0),r.withCredentials=!0,r.onload=()=>{200===r.status?t():n()},r.send(null)}))};var V=n(9670);const W=new Set,G=new Set,q=()=>navigator.connection?.effectiveType.includes("2g")||navigator.connection?.saveData,Y={prefetch(e){if(!(e=>!q()&&!G.has(e)&&!W.has(e))(e))return!1;W.add(e);const t=f(u.Z,e).flatMap((e=>{return t=e.route.path,Object.entries(Q).filter((e=>{let[n]=e;return n.replace(/-[^-]+$/,"")===t})).flatMap((e=>{let[,t]=e;return Object.values((0,V.Z)(t))}));var t}));return Promise.all(t.map((e=>{const t=n.gca(e);return t&&!t.includes("undefined")?Z(t).catch((()=>{})):Promise.resolve()})))},preload:e=>!!(e=>!q()&&!G.has(e))(e)&&(G.add(e),A(e))},K=Object.freeze(Y);if(l.Z.canUseDOM){window.docusaurus=K;const e=a.hydrate;A(window.location.pathname).then((()=>{e(r.createElement(i.B6,null,r.createElement(o.VK,null,r.createElement(H,null))),document.getElementById("__docusaurus"))}))}},8940:(e,t,n)=>{"use strict";n.d(t,{_:()=>u,M:()=>c});var r=n(7294),a=n(6809);const o=JSON.parse('{"defaultLocale":"en","locales":["en"],"path":"i18n","currentLocale":"en","localeConfigs":{"en":{"label":"English","direction":"ltr","htmlLang":"en","calendar":"gregory","path":"en"}}}');var i=n(7529);const l=JSON.parse('{"docusaurusVersion":"2.2.0","siteVersion":"0.0.0","pluginVersions":{"docusaurus-plugin-content-blog":{"type":"package","name":"@docusaurus/plugin-content-blog","version":"2.2.0"},"docusaurus-plugin-content-pages":{"type":"package","name":"@docusaurus/plugin-content-pages","version":"2.2.0"},"docusaurus-plugin-google-analytics":{"type":"package","name":"@docusaurus/plugin-google-analytics","version":"2.2.0"},"docusaurus-plugin-sitemap":{"type":"package","name":"@docusaurus/plugin-sitemap","version":"2.2.0"},"docusaurus-theme-classic":{"type":"package","name":"@docusaurus/theme-classic","version":"2.2.0"},"@easyops-cn/docusaurus-search-local":{"type":"package","name":"@easyops-cn/docusaurus-search-local","version":"0.30.2"}}}'),s={siteConfig:a.default,siteMetadata:l,globalData:{},i18n:o,codeTranslations:i},u=r.createContext(s);function c(e){let{children:t}=e;return r.createElement(u.Provider,{value:s},t)}},4763:(e,t,n)=>{"use strict";n.d(t,{Z:()=>c});var r=n(7294),a=n(412),o=n(5742),i=n(6274);function l(e){let{error:t,tryAgain:n}=e;return r.createElement("div",{style:{display:"flex",flexDirection:"column",justifyContent:"center",alignItems:"center",height:"50vh",width:"100%",fontSize:"20px"}},r.createElement("h1",null,"This page crashed."),r.createElement("p",null,t.message),r.createElement("button",{type:"button",onClick:n},"Try again"))}function s(e){let{error:t,tryAgain:n}=e;return r.createElement(c,{fallback:()=>r.createElement(l,{error:t,tryAgain:n})},r.createElement(o.Z,null,r.createElement("title",null,"Page Error")),r.createElement(i.Z,null,r.createElement(l,{error:t,tryAgain:n})))}const u=e=>r.createElement(s,e);class c extends r.Component{constructor(e){super(e),this.state={error:null}}componentDidCatch(e){a.Z.canUseDOM&&this.setState({error:e})}render(){const{children:e}=this.props,{error:t}=this.state;if(t){const e={error:t,tryAgain:()=>this.setState({error:null})};return(this.props.fallback??u)(e)}return e??null}}},412:(e,t,n)=>{"use strict";n.d(t,{Z:()=>a});const r="undefined"!=typeof window&&"document"in window&&"createElement"in window.document,a={canUseDOM:r,canUseEventListeners:r&&("addEventListener"in window||"attachEvent"in window),canUseIntersectionObserver:r&&"IntersectionObserver"in window,canUseViewport:r&&"screen"in window}},5742:(e,t,n)=>{"use strict";n.d(t,{Z:()=>o});var r=n(7294),a=n(405);function o(e){return r.createElement(a.ql,e)}},9960:(e,t,n)=>{"use strict";n.d(t,{Z:()=>p});var r=n(7462),a=n(7294),o=n(3727),i=n(8780),l=n(2263),s=n(3919),u=n(412);const c=a.createContext({collectLink:()=>{}});var d=n(4996);function f(e,t){let{isNavLink:n,to:f,href:p,activeClassName:h,isActive:m,"data-noBrokenLinkCheck":g,autoAddBaseUrl:b=!0,...v}=e;const{siteConfig:{trailingSlash:y,baseUrl:w}}=(0,l.Z)(),{withBaseUrl:k}=(0,d.C)(),E=(0,a.useContext)(c),S=(0,a.useRef)(null);(0,a.useImperativeHandle)(t,(()=>S.current));const x=f||p;const _=(0,s.Z)(x),C=x?.replace("pathname://","");let T=void 0!==C?(L=C,b&&(e=>e.startsWith("/"))(L)?k(L):L):void 0;var L;T&&_&&(T=(0,i.applyTrailingSlash)(T,{trailingSlash:y,baseUrl:w}));const P=(0,a.useRef)(!1),O=n?o.OL:o.rU,N=u.Z.canUseIntersectionObserver,A=(0,a.useRef)(),I=()=>{P.current||null==T||(window.docusaurus.preload(T),P.current=!0)};(0,a.useEffect)((()=>(!N&&_&&null!=T&&window.docusaurus.prefetch(T),()=>{N&&A.current&&A.current.disconnect()})),[A,T,N,_]);const R=T?.startsWith("#")??!1,D=!T||!_||R;return D||g||E.collectLink(T),D?a.createElement("a",(0,r.Z)({ref:S,href:T},x&&!_&&{target:"_blank",rel:"noopener noreferrer"},v)):a.createElement(O,(0,r.Z)({},v,{onMouseEnter:I,onTouchStart:I,innerRef:e=>{S.current=e,N&&e&&_&&(A.current=new window.IntersectionObserver((t=>{t.forEach((t=>{e===t.target&&(t.isIntersecting||t.intersectionRatio>0)&&(A.current.unobserve(e),A.current.disconnect(),null!=T&&window.docusaurus.prefetch(T))}))})),A.current.observe(e))},to:T},n&&{isActive:m,activeClassName:h}))}const p=a.forwardRef(f)},5999:(e,t,n)=>{"use strict";n.d(t,{Z:()=>s,I:()=>l});var r=n(7294);function a(e,t){const n=e.split(/(\{\w+\})/).map(((e,n)=>{if(n%2==1){const n=t?.[e.slice(1,-1)];if(void 0!==n)return n}return e}));return n.some((e=>(0,r.isValidElement)(e)))?n.map(((e,t)=>(0,r.isValidElement)(e)?r.cloneElement(e,{key:t}):e)).filter((e=>""!==e)):n.join("")}var o=n(7529);function i(e){let{id:t,message:n}=e;if(void 0===t&&void 0===n)throw new Error("Docusaurus translation declarations must have at least a translation id or a default translation message");return o[t??n]??n??t}function l(e,t){let{message:n,id:r}=e;return a(i({message:n,id:r}),t)}function s(e){let{children:t,id:n,values:o}=e;if(t&&"string"!=typeof t)throw console.warn("Illegal children",t),new Error("The Docusaurus component only accept simple string values");const l=i({message:t,id:n});return r.createElement(r.Fragment,null,a(l,o))}},9935:(e,t,n)=>{"use strict";n.d(t,{m:()=>r});const r="default"},3919:(e,t,n)=>{"use strict";function r(e){return/^(?:\w*:|\/\/)/.test(e)}function a(e){return void 0!==e&&!r(e)}n.d(t,{Z:()=>a,b:()=>r})},4996:(e,t,n)=>{"use strict";n.d(t,{C:()=>o,Z:()=>i});var r=n(2263),a=n(3919);function o(){const{siteConfig:{baseUrl:e,url:t}}=(0,r.Z)();return{withBaseUrl:(n,r)=>function(e,t,n,r){let{forcePrependBaseUrl:o=!1,absolute:i=!1}=void 0===r?{}:r;if(!n||n.startsWith("#")||(0,a.b)(n))return n;if(o)return t+n.replace(/^\//,"");if(n===t.replace(/\/$/,""))return t;const l=n.startsWith(t)?n:t+n.replace(/^\//,"");return i?e+l:l}(t,e,n,r)}}function i(e,t){void 0===t&&(t={});const{withBaseUrl:n}=o();return n(e,t)}},2263:(e,t,n)=>{"use strict";n.d(t,{Z:()=>o});var r=n(7294),a=n(8940);function o(){return(0,r.useContext)(a._)}},2389:(e,t,n)=>{"use strict";n.d(t,{Z:()=>o});var r=n(7294),a=n(8934);function o(){return(0,r.useContext)(a._)}},9670:(e,t,n)=>{"use strict";n.d(t,{Z:()=>r});function r(e){const t={};return function e(n,r){Object.entries(n).forEach((n=>{let[a,o]=n;const i=r?`${r}.${a}`:a;var l;"object"==typeof(l=o)&&l&&Object.keys(l).length>0?e(o,i):t[i]=o}))}(e),t}},226:(e,t,n)=>{"use strict";n.d(t,{_:()=>a,z:()=>o});var r=n(7294);const a=r.createContext(null);function o(e){let{children:t,value:n}=e;const o=r.useContext(a),i=(0,r.useMemo)((()=>function(e){let{parent:t,value:n}=e;if(!t){if(!n)throw new Error("Unexpected: no Docusaurus route context found");if(!("plugin"in n))throw new Error("Unexpected: Docusaurus topmost route context has no `plugin` attribute");return n}const r={...t.data,...n?.data};return{plugin:t.plugin,data:r}}({parent:o,value:n})),[o,n]);return r.createElement(a.Provider,{value:i},t)}},143:(e,t,n)=>{"use strict";n.d(t,{Iw:()=>m,gA:()=>f,_r:()=>c,zh:()=>d,yW:()=>h,gB:()=>p});var r=n(6550),a=n(2263),o=n(9935);function i(e,t){void 0===t&&(t={});const n=function(){const{globalData:e}=(0,a.Z)();return e}()[e];if(!n&&t.failfast)throw new Error(`Docusaurus plugin global data not found for "${e}" plugin.`);return n}const l=e=>e.versions.find((e=>e.isLast));function s(e,t){const n=function(e,t){const n=l(e);return[...e.versions.filter((e=>e!==n)),n].find((e=>!!(0,r.LX)(t,{path:e.path,exact:!1,strict:!1})))}(e,t),a=n?.docs.find((e=>!!(0,r.LX)(t,{path:e.path,exact:!0,strict:!1})));return{activeVersion:n,activeDoc:a,alternateDocVersions:a?function(t){const n={};return e.versions.forEach((e=>{e.docs.forEach((r=>{r.id===t&&(n[e.name]=r)}))})),n}(a.id):{}}}const u={},c=()=>i("docusaurus-plugin-content-docs")??u,d=e=>function(e,t,n){void 0===t&&(t=o.m),void 0===n&&(n={});const r=i(e)?.[t];if(!r&&n.failfast)throw new Error(`Docusaurus plugin global data not found for "${e}" plugin with id "${t}".`);return r}("docusaurus-plugin-content-docs",e,{failfast:!0});function f(e){void 0===e&&(e={});const t=c(),{pathname:n}=(0,r.TH)();return function(e,t,n){void 0===n&&(n={});const a=Object.entries(e).sort(((e,t)=>t[1].path.localeCompare(e[1].path))).find((e=>{let[,n]=e;return!!(0,r.LX)(t,{path:n.path,exact:!1,strict:!1})})),o=a?{pluginId:a[0],pluginData:a[1]}:void 0;if(!o&&n.failfast)throw new Error(`Can't find active docs plugin for "${t}" pathname, while it was expected to be found. Maybe you tried to use a docs feature that can only be used on a docs-related page? Existing docs plugin paths are: ${Object.values(e).map((e=>e.path)).join(", ")}`);return o}(t,n,e)}function p(e){return d(e).versions}function h(e){const t=d(e);return l(t)}function m(e){const t=d(e),{pathname:n}=(0,r.TH)();return s(t,n)}},4367:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>r});const r={onRouteDidUpdate(e){let{location:t,previousLocation:n}=e;!n||t.pathname===n.pathname&&t.search===n.search&&t.hash===n.hash||(window.ga("set","page",t.pathname+t.search+t.hash),window.ga("send","pageview"))}}},8320:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>o});var r=n(4865),a=n.n(r);a().configure({showSpinner:!1});const o={onRouteUpdate(e){let{location:t,previousLocation:n}=e;if(n&&t.pathname!==n.pathname){const e=window.setTimeout((()=>{a().start()}),200);return()=>window.clearTimeout(e)}},onRouteDidUpdate(){a().done()}}},3310:(e,t,n)=>{"use strict";n.r(t);var r=n(7410),a=n(6809);!function(e){const{themeConfig:{prism:t}}=a.default,{additionalLanguages:r}=t;globalThis.Prism=e,r.forEach((e=>{n(3476)(`./prism-${e}`)})),delete globalThis.Prism}(r.Z)},6274:(e,t,n)=>{"use strict";n.d(t,{Z:()=>cn});var r=n(7294),a=n(6010),o=n(4763),i=n(1944),l=n(7462),s=n(6550),u=n(5999),c=n(902);function d(e){const t=(0,s.TH)(),n=(0,c.D9)(t),a=(0,c.zX)(e);(0,r.useEffect)((()=>{n&&t!==n&&a({location:t,previousLocation:n})}),[a,t,n])}const f="docusaurus_skipToContent_fallback";function p(e){e.setAttribute("tabindex","-1"),e.focus(),e.removeAttribute("tabindex")}function h(){const e=(0,r.useRef)(null),{action:t}=(0,s.k6)(),n=(0,r.useCallback)((e=>{e.preventDefault();const t=document.querySelector("main:first-of-type")??document.getElementById(f);t&&p(t)}),[]);return d((n=>{let{location:r}=n;e.current&&!r.hash&&"PUSH"===t&&p(e.current)})),{containerRef:e,onClick:n}}const m=(0,u.I)({id:"theme.common.skipToMainContent",description:"The skip to content label used for accessibility, allowing to rapidly navigate to main content with keyboard tab/enter navigation",message:"Skip to main content"});function g(e){const t=e.children??m,{containerRef:n,onClick:a}=h();return r.createElement("div",{ref:n,role:"region","aria-label":m},r.createElement("a",(0,l.Z)({},e,{href:`#${f}`,onClick:a}),t))}var b=n(5281),v=n(9727);const y="skipToContent_fXgn";function w(){return r.createElement(g,{className:y})}var k=n(6668),E=n(2389),S=n(12);const x=(0,S.W)("docusaurus.announcement.dismiss"),_=(0,S.W)("docusaurus.announcement.id"),C=()=>"true"===x.get(),T=e=>x.set(String(e)),L=r.createContext(null);function P(e){let{width:t=21,height:n=21,color:a="currentColor",strokeWidth:o=1.2,className:i,...s}=e;return r.createElement("svg",(0,l.Z)({viewBox:"0 0 15 15",width:t,height:n},s),r.createElement("g",{stroke:a,strokeWidth:o},r.createElement("path",{d:"M.75.75l13.5 13.5M14.25.75L.75 14.25"})))}const O="closeButton_CVFx";function N(e){return r.createElement("button",(0,l.Z)({type:"button","aria-label":(0,u.I)({id:"theme.AnnouncementBar.closeButtonAriaLabel",message:"Close",description:"The ARIA label for close button of announcement bar"})},e,{className:(0,a.Z)("clean-btn close",O,e.className)}),r.createElement(P,{width:14,height:14,strokeWidth:3.1}))}const A="content_knG7";function I(e){const{announcementBar:t}=(0,k.L)(),{content:n}=t;return r.createElement("div",(0,l.Z)({},e,{className:(0,a.Z)(A,e.className),dangerouslySetInnerHTML:{__html:n}}))}const R="announcementBar_mb4j",D="announcementBarPlaceholder_vyr4",M="announcementBarClose_gvF7",F="announcementBarContent_xLdY";function B(){const{announcementBar:e}=(0,k.L)(),{isActive:t,close:n}=function(){const e=(0,r.useContext)(L);if(!e)throw new c.i6("AnnouncementBarProvider");return e}();if(!t)return null;const{backgroundColor:a,textColor:o,isCloseable:i}=e;return r.createElement("div",{className:R,style:{backgroundColor:a,color:o},role:"banner"},i&&r.createElement("div",{className:D}),r.createElement(I,{className:F}),i&&r.createElement(N,{onClick:n,className:M}))}var j=n(3102),$=n(7524);function z(e){!function(e){const t=(0,s.k6)(),n=(0,c.zX)(e);(0,r.useEffect)((()=>t.block(((e,t)=>n(e,t)))),[t,n])}(((t,n)=>{if("POP"===n)return e(t,n)}))}const U=r.createContext(void 0);function H(){const e=function(){const e=(0,j.HY)(),{items:t}=(0,k.L)().navbar;return 0===t.length&&!e.component}(),t=(0,$.i)(),n=!e&&"mobile"===t,[a,o]=(0,r.useState)(!1);z((()=>{if(a)return o(!1),!1}));const i=(0,r.useCallback)((()=>{o((e=>!e))}),[]);return(0,r.useEffect)((()=>{"desktop"===t&&o(!1)}),[t]),(0,r.useMemo)((()=>({disabled:e,shouldRender:n,toggle:i,shown:a})),[e,n,i,a])}function Q(e){let{children:t}=e;const n=H();return r.createElement(U.Provider,{value:n},t)}function Z(){const e=r.useContext(U);if(void 0===e)throw new c.i6("NavbarMobileSidebarProvider");return e}var V=n(412);const W=r.createContext(void 0);function G(){const e=(0,r.useContext)(W);if(null==e)throw new c.i6("ScrollControllerProvider");return e}const q=()=>V.Z.canUseDOM?{scrollX:window.pageXOffset,scrollY:window.pageYOffset}:null;function Y(e){const[t,n]=(0,r.useState)(e),a=(0,r.useRef)(!1),o=(0,r.useRef)(0),i=(0,r.useCallback)((e=>{null!==e&&(o.current=e.getBoundingClientRect().height)}),[]);return function(e,t){void 0===t&&(t=[]);const{scrollEventsEnabledRef:n}=G(),a=(0,r.useRef)(q()),o=(0,c.zX)(e);(0,r.useEffect)((()=>{const e=()=>{if(!n.current)return;const e=q();o(e,a.current),a.current=e},t={passive:!0};return e(),window.addEventListener("scroll",e,t),()=>window.removeEventListener("scroll",e,t)}),[o,n,...t])}(((t,r)=>{let{scrollY:i}=t;if(!e)return;if(i=l?n(!1):i+u{if(!e)return;const r=t.location.hash;if(r?document.getElementById(r.substring(1)):void 0)return a.current=!0,void n(!1);n(!0)})),{navbarRef:i,isNavbarVisible:t}}const K=r.createContext(null);function X(e){let{children:t}=e;const n=function(){const e=Z(),t=(0,j.HY)(),[n,a]=(0,r.useState)(!1),o=null!==t.component,i=(0,c.D9)(o);return(0,r.useEffect)((()=>{o&&!i&&a(!0)}),[o,i]),(0,r.useEffect)((()=>{o?e.shown||a(!0):a(!1)}),[e.shown,o]),(0,r.useMemo)((()=>[n,a]),[n])}();return r.createElement(K.Provider,{value:n},t)}function J(e){if(e.component){const t=e.component;return r.createElement(t,e.props)}}function ee(){const e=(0,r.useContext)(K);if(!e)throw new c.i6("NavbarSecondaryMenuDisplayProvider");const[t,n]=e,a=(0,r.useCallback)((()=>n(!1)),[n]),o=(0,j.HY)();return(0,r.useMemo)((()=>({shown:t,hide:a,content:J(o)})),[a,o,t])}function te(e){let{header:t,primaryMenu:n,secondaryMenu:o}=e;const{shown:i}=ee();return r.createElement("div",{className:"navbar-sidebar"},t,r.createElement("div",{className:(0,a.Z)("navbar-sidebar__items",{"navbar-sidebar__items--show-secondary":i})},r.createElement("div",{className:"navbar-sidebar__item menu"},n),r.createElement("div",{className:"navbar-sidebar__item menu"},o)))}var ne=n(2949);function re(e){return r.createElement("svg",(0,l.Z)({viewBox:"0 0 24 24",width:24,height:24},e),r.createElement("path",{fill:"currentColor",d:"M12,9c1.65,0,3,1.35,3,3s-1.35,3-3,3s-3-1.35-3-3S10.35,9,12,9 M12,7c-2.76,0-5,2.24-5,5s2.24,5,5,5s5-2.24,5-5 S14.76,7,12,7L12,7z M2,13l2,0c0.55,0,1-0.45,1-1s-0.45-1-1-1l-2,0c-0.55,0-1,0.45-1,1S1.45,13,2,13z M20,13l2,0c0.55,0,1-0.45,1-1 s-0.45-1-1-1l-2,0c-0.55,0-1,0.45-1,1S19.45,13,20,13z M11,2v2c0,0.55,0.45,1,1,1s1-0.45,1-1V2c0-0.55-0.45-1-1-1S11,1.45,11,2z M11,20v2c0,0.55,0.45,1,1,1s1-0.45,1-1v-2c0-0.55-0.45-1-1-1C11.45,19,11,19.45,11,20z M5.99,4.58c-0.39-0.39-1.03-0.39-1.41,0 c-0.39,0.39-0.39,1.03,0,1.41l1.06,1.06c0.39,0.39,1.03,0.39,1.41,0s0.39-1.03,0-1.41L5.99,4.58z M18.36,16.95 c-0.39-0.39-1.03-0.39-1.41,0c-0.39,0.39-0.39,1.03,0,1.41l1.06,1.06c0.39,0.39,1.03,0.39,1.41,0c0.39-0.39,0.39-1.03,0-1.41 L18.36,16.95z M19.42,5.99c0.39-0.39,0.39-1.03,0-1.41c-0.39-0.39-1.03-0.39-1.41,0l-1.06,1.06c-0.39,0.39-0.39,1.03,0,1.41 s1.03,0.39,1.41,0L19.42,5.99z M7.05,18.36c0.39-0.39,0.39-1.03,0-1.41c-0.39-0.39-1.03-0.39-1.41,0l-1.06,1.06 c-0.39,0.39-0.39,1.03,0,1.41s1.03,0.39,1.41,0L7.05,18.36z"}))}function ae(e){return r.createElement("svg",(0,l.Z)({viewBox:"0 0 24 24",width:24,height:24},e),r.createElement("path",{fill:"currentColor",d:"M9.37,5.51C9.19,6.15,9.1,6.82,9.1,7.5c0,4.08,3.32,7.4,7.4,7.4c0.68,0,1.35-0.09,1.99-0.27C17.45,17.19,14.93,19,12,19 c-3.86,0-7-3.14-7-7C5,9.07,6.81,6.55,9.37,5.51z M12,3c-4.97,0-9,4.03-9,9s4.03,9,9,9s9-4.03,9-9c0-0.46-0.04-0.92-0.1-1.36 c-0.98,1.37-2.58,2.26-4.4,2.26c-2.98,0-5.4-2.42-5.4-5.4c0-1.81,0.89-3.42,2.26-4.4C12.92,3.04,12.46,3,12,3L12,3z"}))}const oe={toggle:"toggle_vylO",toggleButton:"toggleButton_gllP",darkToggleIcon:"darkToggleIcon_wfgR",lightToggleIcon:"lightToggleIcon_pyhR",toggleButtonDisabled:"toggleButtonDisabled_aARS"};function ie(e){let{className:t,value:n,onChange:o}=e;const i=(0,E.Z)(),l=(0,u.I)({message:"Switch between dark and light mode (currently {mode})",id:"theme.colorToggle.ariaLabel",description:"The ARIA label for the navbar color mode toggle"},{mode:"dark"===n?(0,u.I)({message:"dark mode",id:"theme.colorToggle.ariaLabel.mode.dark",description:"The name for the dark color mode"}):(0,u.I)({message:"light mode",id:"theme.colorToggle.ariaLabel.mode.light",description:"The name for the light color mode"})});return r.createElement("div",{className:(0,a.Z)(oe.toggle,t)},r.createElement("button",{className:(0,a.Z)("clean-btn",oe.toggleButton,!i&&oe.toggleButtonDisabled),type:"button",onClick:()=>o("dark"===n?"light":"dark"),disabled:!i,title:l,"aria-label":l,"aria-live":"polite"},r.createElement(re,{className:(0,a.Z)(oe.toggleIcon,oe.lightToggleIcon)}),r.createElement(ae,{className:(0,a.Z)(oe.toggleIcon,oe.darkToggleIcon)})))}const le=r.memo(ie);function se(e){let{className:t}=e;const n=(0,k.L)().colorMode.disableSwitch,{colorMode:a,setColorMode:o}=(0,ne.I)();return n?null:r.createElement(le,{className:t,value:a,onChange:o})}var ue=n(9960),ce=n(4996),de=n(2263);const fe={themedImage:"themedImage_ToTc","themedImage--light":"themedImage--light_HNdA","themedImage--dark":"themedImage--dark_i4oU"};function pe(e){const t=(0,E.Z)(),{colorMode:n}=(0,ne.I)(),{sources:o,className:i,alt:s,...u}=e,c=t?"dark"===n?["dark"]:["light"]:["light","dark"];return r.createElement(r.Fragment,null,c.map((e=>r.createElement("img",(0,l.Z)({key:e,src:o[e],alt:s,className:(0,a.Z)(fe.themedImage,fe[`themedImage--${e}`],i)},u)))))}function he(e){let{logo:t,alt:n,imageClassName:a}=e;const o={light:(0,ce.Z)(t.src),dark:(0,ce.Z)(t.srcDark||t.src)},i=r.createElement(pe,{className:t.className,sources:o,height:t.height,width:t.width,alt:n,style:t.style});return a?r.createElement("div",{className:a},i):i}function me(e){const{siteConfig:{title:t}}=(0,de.Z)(),{navbar:{title:n,logo:a}}=(0,k.L)(),{imageClassName:o,titleClassName:i,...s}=e,u=(0,ce.Z)(a?.href||"/"),c=n?"":t,d=a?.alt??c;return r.createElement(ue.Z,(0,l.Z)({to:u},s,a?.target&&{target:a.target}),a&&r.createElement(he,{logo:a,alt:d,imageClassName:o}),null!=n&&r.createElement("b",{className:i},n))}function ge(){return r.createElement(me,{className:"navbar__brand",imageClassName:"navbar__logo",titleClassName:"navbar__title text--truncate"})}function be(){const e=Z();return r.createElement("button",{type:"button","aria-label":(0,u.I)({id:"theme.docs.sidebar.closeSidebarButtonAriaLabel",message:"Close navigation bar",description:"The ARIA label for close button of mobile sidebar"}),className:"clean-btn navbar-sidebar__close",onClick:()=>e.toggle()},r.createElement(P,{color:"var(--ifm-color-emphasis-600)"}))}function ve(){return r.createElement("div",{className:"navbar-sidebar__brand"},r.createElement(ge,null),r.createElement(se,{className:"margin-right--md"}),r.createElement(be,null))}var ye=n(3919);function we(e,t){return void 0!==e&&void 0!==t&&new RegExp(e,"gi").test(t)}const ke="iconExternalLink_nPIU";function Ee(e){let{width:t=13.5,height:n=13.5}=e;return r.createElement("svg",{width:t,height:n,"aria-hidden":"true",viewBox:"0 0 24 24",className:ke},r.createElement("path",{fill:"currentColor",d:"M21 13v10h-21v-19h12v2h-10v15h17v-8h2zm3-12h-10.988l4.035 4-6.977 7.07 2.828 2.828 6.977-7.07 4.125 4.172v-11z"}))}function Se(e){let{activeBasePath:t,activeBaseRegex:n,to:a,href:o,label:i,html:s,isDropdownLink:u,prependBaseUrlToHref:c,...d}=e;const f=(0,ce.Z)(a),p=(0,ce.Z)(t),h=(0,ce.Z)(o,{forcePrependBaseUrl:!0}),m=i&&o&&!(0,ye.Z)(o),g=s?{dangerouslySetInnerHTML:{__html:s}}:{children:r.createElement(r.Fragment,null,i,m&&r.createElement(Ee,u&&{width:12,height:12}))};return o?r.createElement(ue.Z,(0,l.Z)({href:c?h:o},d,g)):r.createElement(ue.Z,(0,l.Z)({to:f,isNavLink:!0},(t||n)&&{isActive:(e,t)=>n?we(n,t.pathname):t.pathname.startsWith(p)},d,g))}function xe(e){let{className:t,isDropdownItem:n=!1,...o}=e;const i=r.createElement(Se,(0,l.Z)({className:(0,a.Z)(n?"dropdown__link":"navbar__item navbar__link",t),isDropdownLink:n},o));return n?r.createElement("li",null,i):i}function _e(e){let{className:t,isDropdownItem:n,...o}=e;return r.createElement("li",{className:"menu__list-item"},r.createElement(Se,(0,l.Z)({className:(0,a.Z)("menu__link",t)},o)))}function Ce(e){let{mobile:t=!1,position:n,...a}=e;const o=t?_e:xe;return r.createElement(o,(0,l.Z)({},a,{activeClassName:a.activeClassName??(t?"menu__link--active":"navbar__link--active")}))}var Te=n(394);n(723);function Le(e,t){return e.some((e=>function(e,t){return!!function(e,t){const n=e=>(!e||e.endsWith("/")?e:`${e}/`)?.toLowerCase();return n(e)===n(t)}(e.to,t)||!!we(e.activeBaseRegex,t)||!(!e.activeBasePath||!t.startsWith(e.activeBasePath))}(e,t)))}function Pe(e){let{items:t,position:n,className:o,onClick:i,...s}=e;const u=(0,r.useRef)(null),[c,d]=(0,r.useState)(!1);return(0,r.useEffect)((()=>{const e=e=>{u.current&&!u.current.contains(e.target)&&d(!1)};return document.addEventListener("mousedown",e),document.addEventListener("touchstart",e),()=>{document.removeEventListener("mousedown",e),document.removeEventListener("touchstart",e)}}),[u]),r.createElement("div",{ref:u,className:(0,a.Z)("navbar__item","dropdown","dropdown--hoverable",{"dropdown--right":"right"===n,"dropdown--show":c})},r.createElement(Se,(0,l.Z)({"aria-haspopup":"true","aria-expanded":c,role:"button",href:s.to?void 0:"#",className:(0,a.Z)("navbar__link",o)},s,{onClick:s.to?void 0:e=>e.preventDefault(),onKeyDown:e=>{"Enter"===e.key&&(e.preventDefault(),d(!c))}}),s.children??s.label),r.createElement("ul",{className:"dropdown__menu"},t.map(((e,n)=>r.createElement(Ct,(0,l.Z)({isDropdownItem:!0,onKeyDown:e=>{if(n===t.length-1&&"Tab"===e.key){e.preventDefault(),d(!1);const t=u.current.nextElementSibling;if(t){(t instanceof HTMLAnchorElement?t:t.querySelector("a")).focus()}}},activeClassName:"dropdown__link--active"},e,{key:n}))))))}function Oe(e){let{items:t,className:n,position:o,onClick:i,...u}=e;const c=function(){const{siteConfig:{baseUrl:e}}=(0,de.Z)(),{pathname:t}=(0,s.TH)();return t.replace(e,"/")}(),d=Le(t,c),{collapsed:f,toggleCollapsed:p,setCollapsed:h}=(0,Te.u)({initialState:()=>!d});return(0,r.useEffect)((()=>{d&&h(!d)}),[c,d,h]),r.createElement("li",{className:(0,a.Z)("menu__list-item",{"menu__list-item--collapsed":f})},r.createElement(Se,(0,l.Z)({role:"button",className:(0,a.Z)("menu__link menu__link--sublist menu__link--sublist-caret",n)},u,{onClick:e=>{e.preventDefault(),p()}}),u.children??u.label),r.createElement(Te.z,{lazy:!0,as:"ul",className:"menu__list",collapsed:f},t.map(((e,t)=>r.createElement(Ct,(0,l.Z)({mobile:!0,isDropdownItem:!0,onClick:i,activeClassName:"menu__link--active"},e,{key:t}))))))}function Ne(e){let{mobile:t=!1,...n}=e;const a=t?Oe:Pe;return r.createElement(a,n)}var Ae=n(4711);function Ie(e){let{width:t=20,height:n=20,...a}=e;return r.createElement("svg",(0,l.Z)({viewBox:"0 0 24 24",width:t,height:n,"aria-hidden":!0},a),r.createElement("path",{fill:"currentColor",d:"M12.87 15.07l-2.54-2.51.03-.03c1.74-1.94 2.98-4.17 3.71-6.53H17V4h-7V2H8v2H1v1.99h11.17C11.5 7.92 10.44 9.75 9 11.35 8.07 10.32 7.3 9.19 6.69 8h-2c.73 1.63 1.73 3.17 2.98 4.56l-5.09 5.02L4 19l5-5 3.11 3.11.76-2.04zM18.5 10h-2L12 22h2l1.12-3h4.75L21 22h2l-4.5-12zm-2.62 7l1.62-4.33L19.12 17h-3.24z"}))}const Re="iconLanguage_nlXk";var De=n(1029),Me=n(373),Fe=n(143),Be=n(22),je=n(8202),$e=n(3926),ze=n(1073),Ue=n(2539),He=n(726);const Qe="searchBar_RVTs",Ze="dropdownMenu_qbY6",Ve="suggestion_fB_2",We="cursor_eG29",Ge="hitTree_kk6K",qe="hitIcon_a7Zy",Ye="hitPath_ieM4",Ke="noResultsIcon_EBY5",Xe="hitFooter_E9YW",Je="hitWrapper_sAK8",et="hitTitle_vyVt",tt="hitAction_NqkB",nt="noResults_l6Q3",rt="searchBarContainer_NW3z",at="searchBarLoadingRing_YnHq",ot="searchClearButton_qk4g",it="searchIndexLoading_EJ1f",lt="searchHintContainer_Pkmr",st="searchHint_iIMx",ut="focused_OWtg",ct="input_FOTf",dt="hint_URu1",ft="suggestions_X8XU",pt="dataset_QiCy",ht="empty_eITn";function mt(e){let{document:t,type:n,page:r,metadata:a,tokens:o,isInterOfTree:i,isLastOfTree:l}=e;const s=0===n,u=1===n,c=[];i?c.push(''):l&&c.push('');const d=c.map((e=>`${e}`)),f=`${s?'':u?'':''}`,p=[`${(0,He.o)(t.t,(0,ze.m)(a,"t"),o)}`];if(!i&&!l&&De.H6){const e=r?(r.b??[]).concat(r.t).concat(t.s&&t.s!==r.t?t.s:[]):t.b;p.push(`${(0,$e.e)(e??[])}`)}else s||p.push(`${(0,Ue.C)(r.t||(t.u.startsWith("/docs/api-reference/")?"API Reference":""),o)}`);const h=``;return[...d,f,``,...p,"",h].join("")}function gt(){return`${(0,u.I)({id:"theme.SearchBar.noResultsText",message:"No results"})}`}var bt=n(311);async function vt(){const e=await Promise.all([n.e(8443),n.e(5525)]).then(n.t.bind(n,8443,23)),t=e.default;return t.noConflict?t.noConflict():e.noConflict&&e.noConflict(),t}const yt="_highlight";const wt=function(e){let{handleSearchBarToggle:t}=e;const{siteConfig:{baseUrl:n}}=(0,de.Z)(),o=(0,Fe.gA)();let i=n;try{const{preferredVersion:e}=(0,Me.J)(o?.pluginId??De.gQ);e&&!e.isLast&&(i=e.path+"/")}catch(N){if(De.l9&&!(N instanceof c.i6))throw N}const l=(0,s.k6)(),d=(0,s.TH)(),f=(0,r.useRef)(null),p=(0,r.useRef)("empty"),h=(0,r.useRef)(!1),[m,g]=(0,r.useState)(!1),[b,v]=(0,r.useState)(!1),[y,w]=(0,r.useState)(""),k=(0,r.useRef)(null),E=(0,r.useCallback)((async()=>{if("empty"!==p.current)return;p.current="loading",g(!0);const[{wrappedIndexes:e,zhDictionary:t},r]=await Promise.all([(0,Be.w)(i),vt()]);if(k.current=r(f.current,{hint:!1,autoselect:!0,openOnFocus:!0,cssClasses:{root:Qe,noPrefix:!0,dropdownMenu:Ze,input:ct,hint:dt,suggestions:ft,suggestion:Ve,cursor:We,dataset:pt,empty:ht}},[{source:(0,je.v)(e,t,De.qo),templates:{suggestion:mt,empty:gt,footer:e=>{let{query:t,isEmpty:r}=e;if(r)return;const a=document.createElement("a"),o=`${n}search?q=${encodeURIComponent(t)}`;a.href=o,a.textContent=(0,u.I)({id:"theme.SearchBar.seeAll",message:"See all results"}),a.addEventListener("click",(e=>{e.ctrlKey||e.metaKey||(e.preventDefault(),k.current.autocomplete.close(),l.push(o))}));const i=document.createElement("div");return i.className=Xe,i.appendChild(a),i}}}]).on("autocomplete:selected",(function(e,t){let{document:{u:n,h:r},tokens:a}=t;f.current?.blur();let o=n;if(De.vc&&a.length>0){const e=new URLSearchParams;for(const t of a)e.append(yt,t);o+=`?${e.toString()}`}r&&(o+=r),l.push(o)})).on("autocomplete:closed",(()=>{f.current?.blur()})),p.current="done",g(!1),h.current){const e=f.current;e.value&&k.current.autocomplete.open(),e.focus()}}),[n,i,l]);(0,r.useEffect)((()=>{if(!De.vc)return;const e=V.Z.canUseDOM?new URLSearchParams(d.search).getAll(yt):[];setTimeout((()=>{const t=document.querySelector("article");if(!t)return;const n=new De.vc(t);n.unmark(),0!==e.length&&n.mark(e),w(e.join(" ")),k.current?.autocomplete.setVal(e.join(" "))}))}),[d.search,d.pathname]);const[S,x]=(0,r.useState)(!1),_=(0,r.useCallback)((()=>{h.current=!0,E(),x(!0),t?.(!0)}),[t,E]),C=(0,r.useCallback)((()=>{x(!1),t?.(!1)}),[t]),T=(0,r.useCallback)((()=>{E()}),[E]),L=(0,r.useCallback)((e=>{w(e.target.value),e.target.value&&v(!0)}),[]),P=!!V.Z.canUseDOM&&/mac/i.test(navigator.userAgentData?.platform??navigator.platform);(0,r.useEffect)((()=>{if(!De.AY)return;const e=e=>{(P?e.metaKey:e.ctrlKey)&&"KeyK"===e.code&&(e.preventDefault(),f.current?.focus(),_())};return document.addEventListener("keydown",e),()=>{document.removeEventListener("keydown",e)}}),[P,_]);const O=(0,r.useCallback)((()=>{const e=new URLSearchParams(d.search);e.delete(yt);const t=e.toString(),n=d.pathname+(""!=t?`?${t}`:"")+d.hash;n!=d.pathname+d.search+d.hash&&l.push(n),w(""),k.current?.autocomplete.setVal("")}),[d.pathname,d.search,d.hash,l]);return r.createElement("div",{className:(0,a.Z)("navbar__search",rt,{[it]:m&&b,[ut]:S})},r.createElement("input",{placeholder:(0,u.I)({id:"theme.SearchBar.label",message:"Search",description:"The ARIA label and placeholder for search button"}),"aria-label":"Search",className:"navbar__search-input",onMouseEnter:T,onFocus:_,onBlur:C,onChange:L,ref:f,value:y}),r.createElement(bt.Z,{className:at}),De.AY&&De.t_&&(""!==y?r.createElement("button",{className:ot,onClick:O},"\u2715"):r.createElement("div",{className:lt},r.createElement("kbd",{className:st},P?"\u2318":"ctrl"),r.createElement("kbd",{className:st},"K"))))},kt="searchBox_ZlJk";function Et(e){let{children:t,className:n}=e;return r.createElement("div",{className:(0,a.Z)(n,kt)},t)}var St=n(2802);const xt=e=>e.docs.find((t=>t.id===e.mainDocId));const _t={default:Ce,localeDropdown:function(e){let{mobile:t,dropdownItemsBefore:n,dropdownItemsAfter:a,...o}=e;const{i18n:{currentLocale:i,locales:c,localeConfigs:d}}=(0,de.Z)(),f=(0,Ae.l)(),{search:p,hash:h}=(0,s.TH)(),m=[...n,...c.map((e=>{const n=`${`pathname://${f.createUrl({locale:e,fullyQualified:!1})}`}${p}${h}`;return{label:d[e].label,lang:d[e].htmlLang,to:n,target:"_self",autoAddBaseUrl:!1,className:e===i?t?"menu__link--active":"dropdown__link--active":""}})),...a],g=t?(0,u.I)({message:"Languages",id:"theme.navbar.mobileLanguageDropdown.label",description:"The label for the mobile language switcher dropdown"}):d[i].label;return r.createElement(Ne,(0,l.Z)({},o,{mobile:t,label:r.createElement(r.Fragment,null,r.createElement(Ie,{className:Re}),g),items:m}))},search:function(e){let{mobile:t,className:n}=e;return t?null:r.createElement(Et,{className:n},r.createElement(wt,null))},dropdown:Ne,html:function(e){let{value:t,className:n,mobile:o=!1,isDropdownItem:i=!1}=e;const l=i?"li":"div";return r.createElement(l,{className:(0,a.Z)({navbar__item:!o&&!i,"menu__list-item":o},n),dangerouslySetInnerHTML:{__html:t}})},doc:function(e){let{docId:t,label:n,docsPluginId:a,...o}=e;const{activeDoc:i}=(0,Fe.Iw)(a),s=(0,St.vY)(t,a);return null===s?null:r.createElement(Ce,(0,l.Z)({exact:!0},o,{isActive:()=>i?.path===s.path||!!i?.sidebar&&i.sidebar===s.sidebar,label:n??s.id,to:s.path}))},docSidebar:function(e){let{sidebarId:t,label:n,docsPluginId:a,...o}=e;const{activeDoc:i}=(0,Fe.Iw)(a),s=(0,St.oz)(t,a).link;if(!s)throw new Error(`DocSidebarNavbarItem: Sidebar with ID "${t}" doesn't have anything to be linked to.`);return r.createElement(Ce,(0,l.Z)({exact:!0},o,{isActive:()=>i?.sidebar===t,label:n??s.label,to:s.path}))},docsVersion:function(e){let{label:t,to:n,docsPluginId:a,...o}=e;const i=(0,St.lO)(a)[0],s=t??i.label,u=n??(e=>e.docs.find((t=>t.id===e.mainDocId)))(i).path;return r.createElement(Ce,(0,l.Z)({},o,{label:s,to:u}))},docsVersionDropdown:function(e){let{mobile:t,docsPluginId:n,dropdownActiveClassDisabled:a,dropdownItemsBefore:o,dropdownItemsAfter:i,...c}=e;const{search:d,hash:f}=(0,s.TH)(),p=(0,Fe.Iw)(n),h=(0,Fe.gB)(n),{savePreferredVersionName:m}=(0,Me.J)(n),g=[...o,...h.map((e=>{const t=p.alternateDocVersions[e.name]??xt(e);return{label:e.label,to:`${t.path}${d}${f}`,isActive:()=>e===p.activeVersion,onClick:()=>m(e.name)}})),...i],b=(0,St.lO)(n)[0],v=t&&g.length>1?(0,u.I)({id:"theme.navbar.mobileVersionsDropdown.label",message:"Versions",description:"The label for the navbar versions dropdown on mobile view"}):b.label,y=t&&g.length>1?void 0:xt(b).path;return g.length<=1?r.createElement(Ce,(0,l.Z)({},c,{mobile:t,label:v,to:y,isActive:a?()=>!1:void 0})):r.createElement(Ne,(0,l.Z)({},c,{mobile:t,label:v,to:y,items:g,isActive:a?()=>!1:void 0}))}};function Ct(e){let{type:t,...n}=e;const a=function(e,t){return e&&"default"!==e?e:"items"in t?"dropdown":"default"}(t,n),o=_t[a];if(!o)throw new Error(`No NavbarItem component found for type "${t}".`);return r.createElement(o,n)}function Tt(){const e=Z(),t=(0,k.L)().navbar.items;return r.createElement("ul",{className:"menu__list"},t.map(((t,n)=>r.createElement(Ct,(0,l.Z)({mobile:!0},t,{onClick:()=>e.toggle(),key:n})))))}function Lt(e){return r.createElement("button",(0,l.Z)({},e,{type:"button",className:"clean-btn navbar-sidebar__back"}),r.createElement(u.Z,{id:"theme.navbar.mobileSidebarSecondaryMenu.backButtonLabel",description:"The label of the back button to return to main menu, inside the mobile navbar sidebar secondary menu (notably used to display the docs sidebar)"},"\u2190 Back to main menu"))}function Pt(){const e=0===(0,k.L)().navbar.items.length,t=ee();return r.createElement(r.Fragment,null,!e&&r.createElement(Lt,{onClick:()=>t.hide()}),t.content)}function Ot(){const e=Z();var t;return void 0===(t=e.shown)&&(t=!0),(0,r.useEffect)((()=>(document.body.style.overflow=t?"hidden":"visible",()=>{document.body.style.overflow="visible"})),[t]),e.shouldRender?r.createElement(te,{header:r.createElement(ve,null),primaryMenu:r.createElement(Tt,null),secondaryMenu:r.createElement(Pt,null)}):null}const Nt="navbarHideable_m1mJ",At="navbarHidden_jGov";function It(e){return r.createElement("div",(0,l.Z)({role:"presentation"},e,{className:(0,a.Z)("navbar-sidebar__backdrop",e.className)}))}function Rt(e){let{children:t}=e;const{navbar:{hideOnScroll:n,style:o}}=(0,k.L)(),i=Z(),{navbarRef:l,isNavbarVisible:s}=Y(n);return r.createElement("nav",{ref:l,className:(0,a.Z)("navbar","navbar--fixed-top",n&&[Nt,!s&&At],{"navbar--dark":"dark"===o,"navbar--primary":"primary"===o,"navbar-sidebar--show":i.shown})},t,r.createElement(It,{onClick:i.toggle}),r.createElement(Ot,null))}function Dt(e){let{width:t=30,height:n=30,className:a,...o}=e;return r.createElement("svg",(0,l.Z)({className:a,width:t,height:n,viewBox:"0 0 30 30","aria-hidden":"true"},o),r.createElement("path",{stroke:"currentColor",strokeLinecap:"round",strokeMiterlimit:"10",strokeWidth:"2",d:"M4 7h22M4 15h22M4 23h22"}))}function Mt(){const{toggle:e,shown:t}=Z();return r.createElement("button",{onClick:e,"aria-label":(0,u.I)({id:"theme.docs.sidebar.toggleSidebarButtonAriaLabel",message:"Toggle navigation bar",description:"The ARIA label for hamburger menu button of mobile navigation"}),"aria-expanded":t,className:"navbar__toggle clean-btn",type:"button"},r.createElement(Dt,null))}const Ft="colorModeToggle_DEke";function Bt(e){let{items:t}=e;return r.createElement(r.Fragment,null,t.map(((e,t)=>r.createElement(Ct,(0,l.Z)({},e,{key:t})))))}function jt(e){let{left:t,right:n}=e;return r.createElement("div",{className:"navbar__inner"},r.createElement("div",{className:"navbar__items"},t),r.createElement("div",{className:"navbar__items navbar__items--right"},n))}function $t(){const e=Z(),t=(0,k.L)().navbar.items,[n,a]=function(e){function t(e){return"left"===(e.position??"right")}return[e.filter(t),e.filter((e=>!t(e)))]}(t),o=t.find((e=>"search"===e.type));return r.createElement(jt,{left:r.createElement(r.Fragment,null,!e.disabled&&r.createElement(Mt,null),r.createElement(ge,null),r.createElement(Bt,{items:n})),right:r.createElement(r.Fragment,null,r.createElement(Bt,{items:a}),r.createElement(se,{className:Ft}),!o&&r.createElement(Et,null,r.createElement(wt,null)))})}function zt(){return r.createElement(Rt,null,r.createElement($t,null))}function Ut(e){let{item:t}=e;const{to:n,href:a,label:o,prependBaseUrlToHref:i,...s}=t,u=(0,ce.Z)(n),c=(0,ce.Z)(a,{forcePrependBaseUrl:!0});return r.createElement(ue.Z,(0,l.Z)({className:"footer__link-item"},a?{href:i?c:a}:{to:u},s),o,a&&!(0,ye.Z)(a)&&r.createElement(Ee,null))}function Ht(e){let{item:t}=e;return t.html?r.createElement("li",{className:"footer__item",dangerouslySetInnerHTML:{__html:t.html}}):r.createElement("li",{key:t.href??t.to,className:"footer__item"},r.createElement(Ut,{item:t}))}function Qt(e){let{column:t}=e;return r.createElement("div",{className:"col footer__col"},r.createElement("div",{className:"footer__title"},t.title),r.createElement("ul",{className:"footer__items clean-list"},t.items.map(((e,t)=>r.createElement(Ht,{key:t,item:e})))))}function Zt(e){let{columns:t}=e;return r.createElement("div",{className:"row footer__links"},t.map(((e,t)=>r.createElement(Qt,{key:t,column:e}))))}function Vt(){return r.createElement("span",{className:"footer__link-separator"},"\xb7")}function Wt(e){let{item:t}=e;return t.html?r.createElement("span",{className:"footer__link-item",dangerouslySetInnerHTML:{__html:t.html}}):r.createElement(Ut,{item:t})}function Gt(e){let{links:t}=e;return r.createElement("div",{className:"footer__links text--center"},r.createElement("div",{className:"footer__links"},t.map(((e,n)=>r.createElement(r.Fragment,{key:n},r.createElement(Wt,{item:e}),t.length!==n+1&&r.createElement(Vt,null))))))}function qt(e){let{links:t}=e;return function(e){return"title"in e[0]}(t)?r.createElement(Zt,{columns:t}):r.createElement(Gt,{links:t})}const Yt="footerLogoLink_BH7S";function Kt(e){let{logo:t}=e;const{withBaseUrl:n}=(0,ce.C)(),o={light:n(t.src),dark:n(t.srcDark??t.src)};return r.createElement(pe,{className:(0,a.Z)("footer__logo",t.className),alt:t.alt,sources:o,width:t.width,height:t.height,style:t.style})}function Xt(e){let{logo:t}=e;return t.href?r.createElement(ue.Z,{href:t.href,className:Yt,target:t.target},r.createElement(Kt,{logo:t})):r.createElement(Kt,{logo:t})}function Jt(e){let{copyright:t}=e;return r.createElement("div",{className:"footer__copyright",dangerouslySetInnerHTML:{__html:t}})}function en(e){let{style:t,links:n,logo:o,copyright:i}=e;return r.createElement("footer",{className:(0,a.Z)("footer",{"footer--dark":"dark"===t})},r.createElement("div",{className:"container container-fluid"},n,(o||i)&&r.createElement("div",{className:"footer__bottom text--center"},o&&r.createElement("div",{className:"margin-bottom--sm"},o),i)))}function tn(){const{footer:e}=(0,k.L)();if(!e)return null;const{copyright:t,links:n,logo:a,style:o}=e;return r.createElement(en,{style:o,links:n&&n.length>0&&r.createElement(qt,{links:n}),logo:a&&r.createElement(Xt,{logo:a}),copyright:t&&r.createElement(Jt,{copyright:t})})}const nn=r.memo(tn),rn="docusaurus.tab.",an=r.createContext(void 0);const on=(0,c.Qc)([ne.S,function(e){let{children:t}=e;const n=function(){const{announcementBar:e}=(0,k.L)(),t=(0,E.Z)(),[n,a]=(0,r.useState)((()=>!!t&&C()));(0,r.useEffect)((()=>{a(C())}),[]);const o=(0,r.useCallback)((()=>{T(!0),a(!0)}),[]);return(0,r.useEffect)((()=>{if(!e)return;const{id:t}=e;let n=_.get();"annoucement-bar"===n&&(n="announcement-bar");const r=t!==n;_.set(t),r&&T(!1),!r&&C()||a(!1)}),[e]),(0,r.useMemo)((()=>({isActive:!!e&&!n,close:o})),[e,n,o])}();return r.createElement(L.Provider,{value:n},t)},function(e){let{children:t}=e;const n=function(){const[e,t]=(0,r.useState)({}),n=(0,r.useCallback)(((e,t)=>{(0,S.W)(`${rn}${e}`).set(t)}),[]);(0,r.useEffect)((()=>{try{const e={};(0,S._)().forEach((t=>{if(t.startsWith(rn)){const n=t.substring(rn.length);e[n]=(0,S.W)(t).get()}})),t(e)}catch(e){console.error(e)}}),[]);const a=(0,r.useCallback)(((e,r)=>{t((t=>({...t,[e]:r}))),n(e,r)}),[n]);return(0,r.useMemo)((()=>({tabGroupChoices:e,setTabGroupChoices:a})),[e,a])}();return r.createElement(an.Provider,{value:n},t)},function(e){let{children:t}=e;const n=function(){const e=(0,r.useRef)(!0);return(0,r.useMemo)((()=>({scrollEventsEnabledRef:e,enableScrollEvents:()=>{e.current=!0},disableScrollEvents:()=>{e.current=!1}})),[])}();return r.createElement(W.Provider,{value:n},t)},Me.L5,i.VC,function(e){let{children:t}=e;return r.createElement(j.n2,null,r.createElement(Q,null,r.createElement(X,null,t)))}]);function ln(e){let{children:t}=e;return r.createElement(on,null,t)}function sn(e){let{error:t,tryAgain:n}=e;return r.createElement("main",{className:"container margin-vert--xl"},r.createElement("div",{className:"row"},r.createElement("div",{className:"col col--6 col--offset-3"},r.createElement("h1",{className:"hero__title"},r.createElement(u.Z,{id:"theme.ErrorPageContent.title",description:"The title of the fallback page when the page crashed"},"This page crashed.")),r.createElement("p",null,t.message),r.createElement("div",null,r.createElement("button",{type:"button",onClick:n},r.createElement(u.Z,{id:"theme.ErrorPageContent.tryAgain",description:"The label of the button to try again when the page crashed"},"Try again"))))))}const un="mainWrapper_z2l0";function cn(e){const{children:t,noFooter:n,wrapperClassName:l,title:s,description:u}=e;return(0,v.t)(),r.createElement(ln,null,r.createElement(i.d,{title:s,description:u}),r.createElement(w,null),r.createElement(B,null),r.createElement(zt,null),r.createElement("div",{id:f,className:(0,a.Z)(b.k.wrapper.main,un,l)},r.createElement(o.Z,{fallback:e=>r.createElement(sn,e)},t)),!n&&r.createElement(nn,null))}},197:(e,t,n)=>{"use strict";n.d(t,{Z:()=>o});var r=n(7294),a=n(5742);function o(e){let{locale:t,version:n,tag:o}=e;const i=t;return r.createElement(a.Z,null,t&&r.createElement("meta",{name:"docusaurus_locale",content:t}),n&&r.createElement("meta",{name:"docusaurus_version",content:n}),o&&r.createElement("meta",{name:"docusaurus_tag",content:o}),i&&r.createElement("meta",{name:"docsearch:language",content:i}),n&&r.createElement("meta",{name:"docsearch:version",content:n}),o&&r.createElement("meta",{name:"docsearch:docusaurus_tag",content:o}))}},394:(e,t,n)=>{"use strict";n.d(t,{u:()=>i,z:()=>h});var r=n(7462),a=n(7294),o=n(412);function i(e){let{initialState:t}=e;const[n,r]=(0,a.useState)(t??!1),o=(0,a.useCallback)((()=>{r((e=>!e))}),[]);return{collapsed:n,setCollapsed:r,toggleCollapsed:o}}const l={display:"none",overflow:"hidden",height:"0px"},s={display:"block",overflow:"visible",height:"auto"};function u(e,t){const n=t?l:s;e.style.display=n.display,e.style.overflow=n.overflow,e.style.height=n.height}function c(e){let{collapsibleRef:t,collapsed:n,animation:r}=e;const o=(0,a.useRef)(!1);(0,a.useEffect)((()=>{const e=t.current;function a(){const t=e.scrollHeight,n=r?.duration??function(e){const t=e/36;return Math.round(10*(4+15*t**.25+t/5))}(t);return{transition:`height ${n}ms ${r?.easing??"ease-in-out"}`,height:`${t}px`}}function i(){const t=a();e.style.transition=t.transition,e.style.height=t.height}if(!o.current)return u(e,n),void(o.current=!0);return e.style.willChange="height",function(){const t=requestAnimationFrame((()=>{n?(i(),requestAnimationFrame((()=>{e.style.height=l.height,e.style.overflow=l.overflow}))):(e.style.display="block",requestAnimationFrame((()=>{i()})))}));return()=>cancelAnimationFrame(t)}()}),[t,n,r])}function d(e){if(!o.Z.canUseDOM)return e?l:s}function f(e){let{as:t="div",collapsed:n,children:r,animation:o,onCollapseTransitionEnd:i,className:l,disableSSRStyle:s}=e;const f=(0,a.useRef)(null);return c({collapsibleRef:f,collapsed:n,animation:o}),a.createElement(t,{ref:f,style:s?void 0:d(n),onTransitionEnd:e=>{"height"===e.propertyName&&(u(f.current,n),i?.(n))},className:l},r)}function p(e){let{collapsed:t,...n}=e;const[o,i]=(0,a.useState)(!t),[l,s]=(0,a.useState)(t);return(0,a.useLayoutEffect)((()=>{t||i(!0)}),[t]),(0,a.useLayoutEffect)((()=>{o&&s(t)}),[o,t]),o?a.createElement(f,(0,r.Z)({},n,{collapsed:l})):null}function h(e){let{lazy:t,...n}=e;const r=t?p:f;return a.createElement(r,n)}},2949:(e,t,n)=>{"use strict";n.d(t,{I:()=>g,S:()=>m});var r=n(7294),a=n(412),o=n(902),i=n(12),l=n(6668);const s=r.createContext(void 0),u="theme",c=(0,i.W)(u),d="light",f="dark",p=e=>e===f?f:d;function h(){const{colorMode:{defaultMode:e,disableSwitch:t,respectPrefersColorScheme:n}}=(0,l.L)(),[o,i]=(0,r.useState)((e=>a.Z.canUseDOM?p(document.documentElement.getAttribute("data-theme")):p(e))(e));(0,r.useEffect)((()=>{t&&c.del()}),[t]);const s=(0,r.useCallback)((function(t,r){void 0===r&&(r={});const{persist:a=!0}=r;t?(i(t),a&&(e=>{c.set(p(e))})(t)):(i(n?window.matchMedia("(prefers-color-scheme: dark)").matches?f:d:e),c.del())}),[n,e]);(0,r.useEffect)((()=>{document.documentElement.setAttribute("data-theme",p(o))}),[o]),(0,r.useEffect)((()=>{if(t)return;const e=e=>{if(e.key!==u)return;const t=c.get();null!==t&&s(p(t))};return window.addEventListener("storage",e),()=>window.removeEventListener("storage",e)}),[t,s]);const h=(0,r.useRef)(!1);return(0,r.useEffect)((()=>{if(t&&!n)return;const e=window.matchMedia("(prefers-color-scheme: dark)"),r=()=>{window.matchMedia("print").matches||h.current?h.current=window.matchMedia("print").matches:s(null)};return e.addListener(r),()=>e.removeListener(r)}),[s,t,n]),(0,r.useMemo)((()=>({colorMode:o,setColorMode:s,get isDarkTheme(){return o===f},setLightTheme(){s(d)},setDarkTheme(){s(f)}})),[o,s])}function m(e){let{children:t}=e;const n=h();return r.createElement(s.Provider,{value:n},t)}function g(){const e=(0,r.useContext)(s);if(null==e)throw new o.i6("ColorModeProvider","Please see https://docusaurus.io/docs/api/themes/configuration#use-color-mode.");return e}},373:(e,t,n)=>{"use strict";n.d(t,{J:()=>y,L5:()=>b});var r=n(7294),a=n(143),o=n(9935),i=n(6668),l=n(2802),s=n(902),u=n(12);const c=e=>`docs-preferred-version-${e}`,d=(e,t,n)=>{(0,u.W)(c(e),{persistence:t}).set(n)},f=(e,t)=>(0,u.W)(c(e),{persistence:t}).get(),p=(e,t)=>{(0,u.W)(c(e),{persistence:t}).del()};const h=r.createContext(null);function m(){const e=(0,a._r)(),t=(0,i.L)().docs.versionPersistence,n=(0,r.useMemo)((()=>Object.keys(e)),[e]),[o,l]=(0,r.useState)((()=>(e=>Object.fromEntries(e.map((e=>[e,{preferredVersionName:null}]))))(n)));(0,r.useEffect)((()=>{l(function(e){let{pluginIds:t,versionPersistence:n,allDocsData:r}=e;function a(e){const t=f(e,n);return r[e].versions.some((e=>e.name===t))?{preferredVersionName:t}:(p(e,n),{preferredVersionName:null})}return Object.fromEntries(t.map((e=>[e,a(e)])))}({allDocsData:e,versionPersistence:t,pluginIds:n}))}),[e,t,n]);return[o,(0,r.useMemo)((()=>({savePreferredVersion:function(e,n){d(e,t,n),l((t=>({...t,[e]:{preferredVersionName:n}})))}})),[t])]}function g(e){let{children:t}=e;const n=m();return r.createElement(h.Provider,{value:n},t)}function b(e){let{children:t}=e;return l.cE?r.createElement(g,null,t):r.createElement(r.Fragment,null,t)}function v(){const e=(0,r.useContext)(h);if(!e)throw new s.i6("DocsPreferredVersionContextProvider");return e}function y(e){void 0===e&&(e=o.m);const t=(0,a.zh)(e),[n,i]=v(),{preferredVersionName:l}=n[e];return{preferredVersion:t.versions.find((e=>e.name===l))??null,savePreferredVersionName:(0,r.useCallback)((t=>{i.savePreferredVersion(e,t)}),[i,e])}}},3102:(e,t,n)=>{"use strict";n.d(t,{HY:()=>l,Zo:()=>s,n2:()=>i});var r=n(7294),a=n(902);const o=r.createContext(null);function i(e){let{children:t}=e;const n=(0,r.useState)({component:null,props:null});return r.createElement(o.Provider,{value:n},t)}function l(){const e=(0,r.useContext)(o);if(!e)throw new a.i6("NavbarSecondaryMenuContentProvider");return e[0]}function s(e){let{component:t,props:n}=e;const i=(0,r.useContext)(o);if(!i)throw new a.i6("NavbarSecondaryMenuContentProvider");const[,l]=i,s=(0,a.Ql)(n);return(0,r.useEffect)((()=>{l({component:t,props:s})}),[l,t,s]),(0,r.useEffect)((()=>()=>l({component:null,props:null})),[l]),null}},9727:(e,t,n)=>{"use strict";n.d(t,{h:()=>a,t:()=>o});var r=n(7294);const a="navigation-with-keyboard";function o(){(0,r.useEffect)((()=>{function e(e){"keydown"===e.type&&"Tab"===e.key&&document.body.classList.add(a),"mousedown"===e.type&&document.body.classList.remove(a)}return document.addEventListener("keydown",e),document.addEventListener("mousedown",e),()=>{document.body.classList.remove(a),document.removeEventListener("keydown",e),document.removeEventListener("mousedown",e)}}),[])}},7524:(e,t,n)=>{"use strict";n.d(t,{i:()=>u});var r=n(7294),a=n(412);const o="desktop",i="mobile",l="ssr";function s(){return a.Z.canUseDOM?window.innerWidth>996?o:i:l}function u(){const[e,t]=(0,r.useState)((()=>s()));return(0,r.useEffect)((()=>{function e(){t(s())}return window.addEventListener("resize",e),()=>{window.removeEventListener("resize",e),clearTimeout(undefined)}}),[]),e}},5281:(e,t,n)=>{"use strict";n.d(t,{k:()=>r});const r={page:{blogListPage:"blog-list-page",blogPostPage:"blog-post-page",blogTagsListPage:"blog-tags-list-page",blogTagPostListPage:"blog-tags-post-list-page",docsDocPage:"docs-doc-page",docsTagsListPage:"docs-tags-list-page",docsTagDocListPage:"docs-tags-doc-list-page",mdxPage:"mdx-page"},wrapper:{main:"main-wrapper",blogPages:"blog-wrapper",docsPages:"docs-wrapper",mdxPages:"mdx-wrapper"},common:{editThisPage:"theme-edit-this-page",lastUpdated:"theme-last-updated",backToTopButton:"theme-back-to-top-button",codeBlock:"theme-code-block",admonition:"theme-admonition",admonitionType:e=>`theme-admonition-${e}`},layout:{},docs:{docVersionBanner:"theme-doc-version-banner",docVersionBadge:"theme-doc-version-badge",docBreadcrumbs:"theme-doc-breadcrumbs",docMarkdown:"theme-doc-markdown",docTocMobile:"theme-doc-toc-mobile",docTocDesktop:"theme-doc-toc-desktop",docFooter:"theme-doc-footer",docFooterTagsRow:"theme-doc-footer-tags-row",docFooterEditMetaRow:"theme-doc-footer-edit-meta-row",docSidebarContainer:"theme-doc-sidebar-container",docSidebarMenu:"theme-doc-sidebar-menu",docSidebarItemCategory:"theme-doc-sidebar-item-category",docSidebarItemLink:"theme-doc-sidebar-item-link",docSidebarItemCategoryLevel:e=>`theme-doc-sidebar-item-category-level-${e}`,docSidebarItemLinkLevel:e=>`theme-doc-sidebar-item-link-level-${e}`},blog:{}}},2802:(e,t,n)=>{"use strict";n.d(t,{cE:()=>l,lO:()=>s,vY:()=>c,oz:()=>u});var r=n(7294),a=n(143),o=n(373);function i(e){return Array.from(new Set(e))}const l=!!a._r;function s(e){const{activeVersion:t}=(0,a.Iw)(e),{preferredVersion:n}=(0,o.J)(e),l=(0,a.yW)(e);return(0,r.useMemo)((()=>i([t,n,l].filter(Boolean))),[t,n,l])}function u(e,t){const n=s(t);return(0,r.useMemo)((()=>{const t=n.flatMap((e=>e.sidebars?Object.entries(e.sidebars):[])),r=t.find((t=>t[0]===e));if(!r)throw new Error(`Can't find any sidebar with id "${e}" in version${n.length>1?"s":""} ${n.map((e=>e.name)).join(", ")}".\n Available sidebar ids are:\n - ${Object.keys(t).join("\n- ")}`);return r[1]}),[e,n])}function c(e,t){const n=s(t);return(0,r.useMemo)((()=>{const t=n.flatMap((e=>e.docs)),r=t.find((t=>t.id===e));if(!r){if(n.flatMap((e=>e.draftIds)).includes(e))return null;throw new Error(`DocNavbarItem: couldn't find any doc with id "${e}" in version${n.length>1?"s":""} ${n.map((e=>e.name)).join(", ")}".\nAvailable doc ids are:\n- ${i(t.map((e=>e.id))).join("\n- ")}`)}return r}),[e,n])}},1944:(e,t,n)=>{"use strict";n.d(t,{FG:()=>f,d:()=>c,VC:()=>p});var r=n(7294),a=n(6010),o=n(5742),i=n(226);function l(){const e=r.useContext(i._);if(!e)throw new Error("Unexpected: no Docusaurus route context found");return e}var s=n(4996),u=n(2263);function c(e){let{title:t,description:n,keywords:a,image:i,children:l}=e;const c=function(e){const{siteConfig:t}=(0,u.Z)(),{title:n,titleDelimiter:r}=t;return e?.trim().length?`${e.trim()} ${r} ${n}`:n}(t),{withBaseUrl:d}=(0,s.C)(),f=i?d(i,{absolute:!0}):void 0;return r.createElement(o.Z,null,t&&r.createElement("title",null,c),t&&r.createElement("meta",{property:"og:title",content:c}),n&&r.createElement("meta",{name:"description",content:n}),n&&r.createElement("meta",{property:"og:description",content:n}),a&&r.createElement("meta",{name:"keywords",content:Array.isArray(a)?a.join(","):a}),f&&r.createElement("meta",{property:"og:image",content:f}),f&&r.createElement("meta",{name:"twitter:image",content:f}),l)}const d=r.createContext(void 0);function f(e){let{className:t,children:n}=e;const i=r.useContext(d),l=(0,a.Z)(i,t);return r.createElement(d.Provider,{value:l},r.createElement(o.Z,null,r.createElement("html",{className:l})),n)}function p(e){let{children:t}=e;const n=l(),o=`plugin-${n.plugin.name.replace(/docusaurus-(?:plugin|theme)-(?:content-)?/gi,"")}`;const i=`plugin-id-${n.plugin.id}`;return r.createElement(f,{className:(0,a.Z)(o,i)},t)}},902:(e,t,n)=>{"use strict";n.d(t,{D9:()=>i,Qc:()=>u,Ql:()=>s,i6:()=>l,zX:()=>o});var r=n(7294);const a=n(412).Z.canUseDOM?r.useLayoutEffect:r.useEffect;function o(e){const t=(0,r.useRef)(e);return a((()=>{t.current=e}),[e]),(0,r.useCallback)((function(){return t.current(...arguments)}),[])}function i(e){const t=(0,r.useRef)();return a((()=>{t.current=e})),t.current}class l extends Error{constructor(e,t){super(),this.name="ReactContextError",this.message=`Hook ${this.stack?.split("\n")[1]?.match(/at (?:\w+\.)?(?\w+)/)?.groups.name??""} is called outside the <${e}>. ${t??""}`}}function s(e){const t=Object.entries(e);return t.sort(((e,t)=>e[0].localeCompare(t[0]))),(0,r.useMemo)((()=>e),t.flat())}function u(e){return t=>{let{children:n}=t;return r.createElement(r.Fragment,null,e.reduceRight(((e,t)=>r.createElement(t,null,e)),n))}}},12:(e,t,n)=>{"use strict";n.d(t,{W:()=>l,_:()=>s});const r="localStorage";function a(e){if(void 0===e&&(e=r),"undefined"==typeof window)throw new Error("Browser storage is not available on Node.js/Docusaurus SSR process.");if("none"===e)return null;try{return window[e]}catch(n){return t=n,o||(console.warn("Docusaurus browser storage is not available.\nPossible reasons: running Docusaurus in an iframe, in an incognito browser session, or using too strict browser privacy settings.",t),o=!0),null}var t}let o=!1;const i={get:()=>null,set:()=>{},del:()=>{}};function l(e,t){if("undefined"==typeof window)return function(e){function t(){throw new Error(`Illegal storage API usage for storage key "${e}".\nDocusaurus storage APIs are not supposed to be called on the server-rendering process.\nPlease only call storage APIs in effects and event handlers.`)}return{get:t,set:t,del:t}}(e);const n=a(t?.persistence);return null===n?i:{get:()=>{try{return n.getItem(e)}catch(t){return console.error(`Docusaurus storage error, can't get key=${e}`,t),null}},set:t=>{try{n.setItem(e,t)}catch(r){console.error(`Docusaurus storage error, can't set ${e}=${t}`,r)}},del:()=>{try{n.removeItem(e)}catch(t){console.error(`Docusaurus storage error, can't delete key=${e}`,t)}}}}function s(e){void 0===e&&(e=r);const t=a(e);if(!t)return[];const n=[];for(let r=0;r{"use strict";n.d(t,{l:()=>o});var r=n(2263),a=n(6550);function o(){const{siteConfig:{baseUrl:e,url:t},i18n:{defaultLocale:n,currentLocale:o}}=(0,r.Z)(),{pathname:i}=(0,a.TH)(),l=o===n?e:e.replace(`/${o}/`,"/"),s=i.replace(e,"");return{createUrl:function(e){let{locale:r,fullyQualified:a}=e;return`${a?t:""}${function(e){return e===n?`${l}`:`${l}${e}/`}(r)}${s}`}}}},6668:(e,t,n)=>{"use strict";n.d(t,{L:()=>a});var r=n(2263);function a(){return(0,r.Z)().siteConfig.themeConfig}},8802:(e,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){const{trailingSlash:n,baseUrl:r}=t;if(e.startsWith("#"))return e;if(void 0===n)return e;const[a]=e.split(/[#?]/),o="/"===a||a===r?a:(i=a,n?function(e){return e.endsWith("/")?e:`${e}/`}(i):function(e){return e.endsWith("/")?e.slice(0,-1):e}(i));var i;return e.replace(a,o)}},8780:function(e,t,n){"use strict";var r=this&&this.__importDefault||function(e){return e&&e.__esModule?e:{default:e}};Object.defineProperty(t,"__esModule",{value:!0}),t.applyTrailingSlash=t.blogPostContainerID=void 0,t.blogPostContainerID="post-content";var a=n(8802);Object.defineProperty(t,"applyTrailingSlash",{enumerable:!0,get:function(){return r(a).default}})},311:(e,t,n)=>{"use strict";n.d(t,{Z:()=>i});var r=n(7294),a=n(6010);const o="loadingRing_RJI3";function i(e){let{className:t}=e;return r.createElement("div",{className:(0,a.Z)(o,t)},r.createElement("div",null),r.createElement("div",null),r.createElement("div",null),r.createElement("div",null))}},22:(e,t,n)=>{"use strict";n.d(t,{w:()=>i});var r=n(1336),a=n.n(r),o=n(1029);async function i(e){{const t=await(await fetch(`${e}${o.J}`)).json(),n=t.map(((e,t)=>{let{documents:n,index:r}=e;return{type:t,documents:n,index:a().Index.load(r)}})),r=t.reduce(((e,t)=>{for(const n of t.index.invertedIndex)/\p{Unified_Ideograph}/u.test(n[0][0])&&e.add(n[0]);return e}),new Set);return{wrappedIndexes:n,zhDictionary:Array.from(r)}}}},8202:(e,t,n)=>{"use strict";n.d(t,{v:()=>s});var r=n(1336),a=n.n(r);var o=n(1029);function i(e){return l(e).concat(l(e.filter((e=>{const t=e[e.length-1];return!t.trailing&&t.maybeTyping})),!0))}function l(e,t){return e.map((e=>({tokens:e.map((e=>e.value)),term:e.map((e=>({value:e.value,presence:a().Query.presence.REQUIRED,wildcard:(t?e.trailing||e.maybeTyping:e.trailing)?a().Query.wildcard.TRAILING:a().Query.wildcard.NONE})))})))}function s(e,t,n){return function(r,l){const s=function(e,t){if(1===t.length&&["ja","jp","th"].includes(t[0]))return a()[t[0]].tokenizer(e).map((e=>e.toString()));let n=/[^-\s]+/g;return t.includes("zh")&&(n=/\w+|\p{Unified_Ideograph}+/gu),e.toLowerCase().match(n)||[]}(r,o.dK);if(0===s.length)return void l([]);const u=function(e,t){const n=function(e,t){const n=[];return function e(r,a){if(0===r.length)return void n.push(a);const o=r[0];if(/\p{Unified_Ideograph}/u.test(o)){const n=function(e,t){const n=[];return function e(r,a){let o=0,i=!1;for(const l of t)if(r.substr(0,l.length)===l){const t={missed:a.missed,term:a.term.concat({value:l})};r.length>l.length?e(r.substr(l.length),t):n.push(t),i=!0}else for(let t=l.length-1;t>o;t-=1){const s=l.substr(0,t);if(r.substr(0,t)===s){o=t;const l={missed:a.missed,term:a.term.concat({value:s,trailing:!0})};r.length>t?e(r.substr(t),l):n.push(l),i=!0;break}}i||(r.length>0?e(r.substr(1),{missed:a.missed+1,term:a.term}):a.term.length>0&&n.push(a))}(e,{missed:0,term:[]}),n.sort(((e,t)=>{const n=e.missed>0?1:0,r=t.missed>0?1:0;return n!==r?n-r:e.term.length-t.term.length})).map((e=>e.term))}(o,t);for(const t of n){const n=a.concat(...t);e(r.slice(1),n)}}else{const t=a.concat({value:o});e(r.slice(1),t)}}(e,[]),n}(e,t);if(0===n.length)return[{tokens:e,term:e.map((e=>({value:e,presence:a().Query.presence.REQUIRED,wildcard:a().Query.wildcard.LEADING|a().Query.wildcard.TRAILING})))}];for(const a of n)a[a.length-1].maybeTyping=!0;const r=[];for(const i of o.dK)if("en"===i)o._k||r.unshift(a().stopWordFilter);else{const e=a()[i];e.stopWordFilter&&r.unshift(e.stopWordFilter)}let l;if(r.length>0){const e=e=>r.reduce(((e,t)=>e.filter((e=>t(e.value)))),e);l=[];const t=[];for(const r of n){const n=e(r);l.push(n),n.length0&&t.push(n)}n.push(...t)}else l=n.slice();const s=[];for(const a of l)if(a.length>2)for(let e=a.length-1;e>=0;e-=1)s.push(a.slice(0,e).concat(a.slice(e+1)));return i(n).concat(i(s))}(s,t),c=[];e:for(const{term:t,tokens:a}of u)for(const{documents:r,index:o,type:i}of e)if(c.push(...o.query((e=>{for(const n of t)e.term(n.value,{wildcard:n.wildcard,presence:n.presence})})).slice(0,n).filter((e=>!c.some((t=>t.document.i.toString()===e.ref)))).slice(0,n-c.length).map((t=>{const n=r.find((e=>e.i.toString()===t.ref));return{document:n,type:i,page:0!==i&&e[0].documents.find((e=>e.i===n.p)),metadata:t.matchData.metadata,tokens:a,score:t.score}}))),c.length>=n)break e;!function(e){e.forEach(((e,t)=>{e.index=t})),e.sort(((t,n)=>{let r=t.type>0&&t.page?e.findIndex((e=>e.document===t.page)):t.index,a=n.type>0&&n.page?e.findIndex((e=>e.document===n.page)):n.index;return-1===r&&(r=t.index),-1===a&&(a=n.index),r===a?0===t.type?-1:0===n.type?1:t.index-n.index:r-a}))}(c),function(e){e.forEach(((t,n)=>{n>0&&t.page&&e.some((e=>e.document===t.page))&&(n{"use strict";function r(e){return e.join(" \u203a ")}n.d(t,{e:()=>r})},1690:(e,t,n)=>{"use strict";function r(e){return e.replace(/&/g,"&").replace(//g,">").replace(/"/g,""").replace(/'/g,"'")}n.d(t,{X:()=>r})},1073:(e,t,n)=>{"use strict";function r(e,t){const n=[];for(const r of Object.values(e))r[t]&&n.push(...r[t].position);return n.sort(((e,t)=>e[0]-t[0]||t[1]-e[1]))}n.d(t,{m:()=>r})},2539:(e,t,n)=>{"use strict";n.d(t,{C:()=>a});var r=n(1690);function a(e,t,n){const o=[];for(const i of t){const n=e.toLowerCase().indexOf(i);if(n>=0){n>0&&o.push(a(e.substr(0,n),t)),o.push(`${(0,r.X)(e.substr(n,i.length))}`);const l=n+i.length;l${(0,r.X)(e)}`:(0,r.X)(e):o.join("")}},726:(e,t,n)=>{"use strict";n.d(t,{o:()=>s});var r=n(1690),a=n(2539);const o=/\w+|\p{Unified_Ideograph}/u;function i(e){const t=[];let n=0,r=e;for(;r.length>0;){const a=r.match(o);if(!a){t.push(r);break}a.index>0&&t.push(r.substring(0,a.index)),t.push(a[0]),n+=a.index+a[0].length,r=e.substring(n)}return t}var l=n(1029);function s(e,t,n,o){void 0===o&&(o=l.Hk);const{chunkIndex:s,chunks:u}=function(e,t,n){const o=[];let l=0,s=0,u=-1;for(;ls){const t=i(e.substring(s,c)).map((e=>({html:(0,r.X)(e),textLength:e.length})));for(const e of t)o.push(e)}-1===u&&(u=o.length),s=c+d,o.push({html:(0,a.C)(e.substring(c,s),n,!0),textLength:d})}}if(s({html:(0,r.X)(e),textLength:e.length})));for(const e of t)o.push(e)}return{chunkIndex:u,chunks:o}}(e,t,n),c=u.slice(0,s),d=u[s],f=[d.html],p=u.slice(s+1);let h=d.textLength,m=0,g=0,b=!1,v=!1;for(;h0){const e=c.pop();h+e.textLength<=o?(f.unshift(e.html),m+=e.textLength,h+=e.textLength):(b=!0,c.length=0)}else{if(!(p.length>0))break;{const e=p.shift();h+e.textLength<=o?(f.push(e.html),g+=e.textLength,h+=e.textLength):(v=!0,p.length=0)}}return(b||c.length>0)&&f.unshift("\u2026"),(v||p.length>0)&&f.push("\u2026"),f.join("")}},1029:(e,t,n)=>{"use strict";n.d(t,{vc:()=>o,gQ:()=>f,H6:()=>u,l9:()=>p,dK:()=>r,_k:()=>a,AY:()=>c,t_:()=>d,J:()=>i,Hk:()=>s,qo:()=>l});n(1336);const r=["en"],a=!1,o=null,i="search-index.json?_=8c634268",l=8,s=50,u=!1,c=!0,d=!0,f=void 0,p=!1},6010:(e,t,n)=>{"use strict";function r(e){var t,n,a="";if("string"==typeof e||"number"==typeof e)a+=e;else if("object"==typeof e)if(Array.isArray(e))for(t=0;ta});const a=function(){for(var e,t,n=0,a="";n{"use strict";n.d(t,{lX:()=>w,q_:()=>C,ob:()=>p,PP:()=>L,Ep:()=>f});var r=n(7462);function a(e){return"/"===e.charAt(0)}function o(e,t){for(var n=t,r=n+1,a=e.length;r=0;f--){var p=i[f];"."===p?o(i,f):".."===p?(o(i,f),d++):d&&(o(i,f),d--)}if(!u)for(;d--;d)i.unshift("..");!u||""===i[0]||i[0]&&a(i[0])||i.unshift("");var h=i.join("/");return n&&"/"!==h.substr(-1)&&(h+="/"),h};var l=n(8776);function s(e){return"/"===e.charAt(0)?e:"/"+e}function u(e){return"/"===e.charAt(0)?e.substr(1):e}function c(e,t){return function(e,t){return 0===e.toLowerCase().indexOf(t.toLowerCase())&&-1!=="/?#".indexOf(e.charAt(t.length))}(e,t)?e.substr(t.length):e}function d(e){return"/"===e.charAt(e.length-1)?e.slice(0,-1):e}function f(e){var t=e.pathname,n=e.search,r=e.hash,a=t||"/";return n&&"?"!==n&&(a+="?"===n.charAt(0)?n:"?"+n),r&&"#"!==r&&(a+="#"===r.charAt(0)?r:"#"+r),a}function p(e,t,n,a){var o;"string"==typeof e?(o=function(e){var t=e||"/",n="",r="",a=t.indexOf("#");-1!==a&&(r=t.substr(a),t=t.substr(0,a));var o=t.indexOf("?");return-1!==o&&(n=t.substr(o),t=t.substr(0,o)),{pathname:t,search:"?"===n?"":n,hash:"#"===r?"":r}}(e),o.state=t):(void 0===(o=(0,r.Z)({},e)).pathname&&(o.pathname=""),o.search?"?"!==o.search.charAt(0)&&(o.search="?"+o.search):o.search="",o.hash?"#"!==o.hash.charAt(0)&&(o.hash="#"+o.hash):o.hash="",void 0!==t&&void 0===o.state&&(o.state=t));try{o.pathname=decodeURI(o.pathname)}catch(l){throw l instanceof URIError?new URIError('Pathname "'+o.pathname+'" could not be decoded. This is likely caused by an invalid percent-encoding.'):l}return n&&(o.key=n),a?o.pathname?"/"!==o.pathname.charAt(0)&&(o.pathname=i(o.pathname,a.pathname)):o.pathname=a.pathname:o.pathname||(o.pathname="/"),o}function h(){var e=null;var t=[];return{setPrompt:function(t){return e=t,function(){e===t&&(e=null)}},confirmTransitionTo:function(t,n,r,a){if(null!=e){var o="function"==typeof e?e(t,n):e;"string"==typeof o?"function"==typeof r?r(o,a):a(!0):a(!1!==o)}else a(!0)},appendListener:function(e){var n=!0;function r(){n&&e.apply(void 0,arguments)}return t.push(r),function(){n=!1,t=t.filter((function(e){return e!==r}))}},notifyListeners:function(){for(var e=arguments.length,n=new Array(e),r=0;rt?n.splice(t,n.length-t,a):n.push(a),d({action:r,location:a,index:t,entries:n})}}))},replace:function(e,t){var r="REPLACE",a=p(e,t,m(),w.location);c.confirmTransitionTo(a,r,n,(function(e){e&&(w.entries[w.index]=a,d({action:r,location:a}))}))},go:y,goBack:function(){y(-1)},goForward:function(){y(1)},canGo:function(e){var t=w.index+e;return t>=0&&t{"use strict";var r=n(9864),a={childContextTypes:!0,contextType:!0,contextTypes:!0,defaultProps:!0,displayName:!0,getDefaultProps:!0,getDerivedStateFromError:!0,getDerivedStateFromProps:!0,mixins:!0,propTypes:!0,type:!0},o={name:!0,length:!0,prototype:!0,caller:!0,callee:!0,arguments:!0,arity:!0},i={$$typeof:!0,compare:!0,defaultProps:!0,displayName:!0,propTypes:!0,type:!0},l={};function s(e){return r.isMemo(e)?i:l[e.$$typeof]||a}l[r.ForwardRef]={$$typeof:!0,render:!0,defaultProps:!0,displayName:!0,propTypes:!0},l[r.Memo]=i;var u=Object.defineProperty,c=Object.getOwnPropertyNames,d=Object.getOwnPropertySymbols,f=Object.getOwnPropertyDescriptor,p=Object.getPrototypeOf,h=Object.prototype;e.exports=function e(t,n,r){if("string"!=typeof n){if(h){var a=p(n);a&&a!==h&&e(t,a,r)}var i=c(n);d&&(i=i.concat(d(n)));for(var l=s(t),m=s(n),g=0;g{"use strict";e.exports=function(e,t,n,r,a,o,i,l){if(!e){var s;if(void 0===t)s=new Error("Minified exception occurred; use the non-minified dev environment for the full error message and additional helpful warnings.");else{var u=[n,r,a,o,i,l],c=0;(s=new Error(t.replace(/%s/g,(function(){return u[c++]})))).name="Invariant Violation"}throw s.framesToPop=1,s}}},5826:e=>{e.exports=Array.isArray||function(e){return"[object Array]"==Object.prototype.toString.call(e)}},1336:(e,t,n)=>{var r,a;!function(){var o,i,l,s,u,c,d,f,p,h,m,g,b,v,y,w,k,E,S,x,_,C,T,L,P,O,N,A,I,R,D=function(e){var t=new D.Builder;return t.pipeline.add(D.trimmer,D.stopWordFilter,D.stemmer),t.searchPipeline.add(D.stemmer),e.call(t,t),t.build()};D.version="2.3.9",D.utils={},D.utils.warn=(o=this,function(e){o.console&&console.warn&&console.warn(e)}),D.utils.asString=function(e){return null==e?"":e.toString()},D.utils.clone=function(e){if(null==e)return e;for(var t=Object.create(null),n=Object.keys(e),r=0;r0){var s=D.utils.clone(t)||{};s.position=[i,l],s.index=a.length,a.push(new D.Token(n.slice(i,o),s))}i=o+1}}return a},D.tokenizer.separator=/[\s\-]+/,D.Pipeline=function(){this._stack=[]},D.Pipeline.registeredFunctions=Object.create(null),D.Pipeline.registerFunction=function(e,t){t in this.registeredFunctions&&D.utils.warn("Overwriting existing registered function: "+t),e.label=t,D.Pipeline.registeredFunctions[e.label]=e},D.Pipeline.warnIfFunctionNotRegistered=function(e){e.label&&e.label in this.registeredFunctions||D.utils.warn("Function is not registered with pipeline. This may cause problems when serialising the index.\n",e)},D.Pipeline.load=function(e){var t=new D.Pipeline;return e.forEach((function(e){var n=D.Pipeline.registeredFunctions[e];if(!n)throw new Error("Cannot load unregistered function: "+e);t.add(n)})),t},D.Pipeline.prototype.add=function(){var e=Array.prototype.slice.call(arguments);e.forEach((function(e){D.Pipeline.warnIfFunctionNotRegistered(e),this._stack.push(e)}),this)},D.Pipeline.prototype.after=function(e,t){D.Pipeline.warnIfFunctionNotRegistered(t);var n=this._stack.indexOf(e);if(-1==n)throw new Error("Cannot find existingFn");n+=1,this._stack.splice(n,0,t)},D.Pipeline.prototype.before=function(e,t){D.Pipeline.warnIfFunctionNotRegistered(t);var n=this._stack.indexOf(e);if(-1==n)throw new Error("Cannot find existingFn");this._stack.splice(n,0,t)},D.Pipeline.prototype.remove=function(e){var t=this._stack.indexOf(e);-1!=t&&this._stack.splice(t,1)},D.Pipeline.prototype.run=function(e){for(var t=this._stack.length,n=0;n1&&(oe&&(n=a),o!=e);)r=n-t,a=t+Math.floor(r/2),o=this.elements[2*a];return o==e||o>e?2*a:ol?u+=2:i==l&&(t+=n[s+1]*r[u+1],s+=2,u+=2);return t},D.Vector.prototype.similarity=function(e){return this.dot(e)/this.magnitude()||0},D.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),t=1,n=0;t0){var o,i=a.str.charAt(0);i in a.node.edges?o=a.node.edges[i]:(o=new D.TokenSet,a.node.edges[i]=o),1==a.str.length&&(o.final=!0),r.push({node:o,editsRemaining:a.editsRemaining,str:a.str.slice(1)})}if(0!=a.editsRemaining){if("*"in a.node.edges)var l=a.node.edges["*"];else{l=new D.TokenSet;a.node.edges["*"]=l}if(0==a.str.length&&(l.final=!0),r.push({node:l,editsRemaining:a.editsRemaining-1,str:a.str}),a.str.length>1&&r.push({node:a.node,editsRemaining:a.editsRemaining-1,str:a.str.slice(1)}),1==a.str.length&&(a.node.final=!0),a.str.length>=1){if("*"in a.node.edges)var s=a.node.edges["*"];else{s=new D.TokenSet;a.node.edges["*"]=s}1==a.str.length&&(s.final=!0),r.push({node:s,editsRemaining:a.editsRemaining-1,str:a.str.slice(1)})}if(a.str.length>1){var u,c=a.str.charAt(0),d=a.str.charAt(1);d in a.node.edges?u=a.node.edges[d]:(u=new D.TokenSet,a.node.edges[d]=u),1==a.str.length&&(u.final=!0),r.push({node:u,editsRemaining:a.editsRemaining-1,str:c+a.str.slice(2)})}}}return n},D.TokenSet.fromString=function(e){for(var t=new D.TokenSet,n=t,r=0,a=e.length;r=e;t--){var n=this.uncheckedNodes[t],r=n.child.toString();r in this.minimizedNodes?n.parent.edges[n.char]=this.minimizedNodes[r]:(n.child._str=r,this.minimizedNodes[r]=n.child),this.uncheckedNodes.pop()}},D.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},D.Index.prototype.search=function(e){return this.query((function(t){new D.QueryParser(e,t).parse()}))},D.Index.prototype.query=function(e){for(var t=new D.Query(this.fields),n=Object.create(null),r=Object.create(null),a=Object.create(null),o=Object.create(null),i=Object.create(null),l=0;l1?1:e},D.Builder.prototype.k1=function(e){this._k1=e},D.Builder.prototype.add=function(e,t){var n=e[this._ref],r=Object.keys(this._fields);this._documents[n]=t||{},this.documentCount+=1;for(var a=0;a=this.length)return D.QueryLexer.EOS;var e=this.str.charAt(this.pos);return this.pos+=1,e},D.QueryLexer.prototype.width=function(){return this.pos-this.start},D.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},D.QueryLexer.prototype.backup=function(){this.pos-=1},D.QueryLexer.prototype.acceptDigitRun=function(){var e,t;do{t=(e=this.next()).charCodeAt(0)}while(t>47&&t<58);e!=D.QueryLexer.EOS&&this.backup()},D.QueryLexer.prototype.more=function(){return this.pos1&&(e.backup(),e.emit(D.QueryLexer.TERM)),e.ignore(),e.more())return D.QueryLexer.lexText},D.QueryLexer.lexEditDistance=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(D.QueryLexer.EDIT_DISTANCE),D.QueryLexer.lexText},D.QueryLexer.lexBoost=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(D.QueryLexer.BOOST),D.QueryLexer.lexText},D.QueryLexer.lexEOS=function(e){e.width()>0&&e.emit(D.QueryLexer.TERM)},D.QueryLexer.termSeparator=D.tokenizer.separator,D.QueryLexer.lexText=function(e){for(;;){var t=e.next();if(t==D.QueryLexer.EOS)return D.QueryLexer.lexEOS;if(92!=t.charCodeAt(0)){if(":"==t)return D.QueryLexer.lexField;if("~"==t)return e.backup(),e.width()>0&&e.emit(D.QueryLexer.TERM),D.QueryLexer.lexEditDistance;if("^"==t)return e.backup(),e.width()>0&&e.emit(D.QueryLexer.TERM),D.QueryLexer.lexBoost;if("+"==t&&1===e.width())return e.emit(D.QueryLexer.PRESENCE),D.QueryLexer.lexText;if("-"==t&&1===e.width())return e.emit(D.QueryLexer.PRESENCE),D.QueryLexer.lexText;if(t.match(D.QueryLexer.termSeparator))return D.QueryLexer.lexTerm}else e.escapeCharacter()}},D.QueryParser=function(e,t){this.lexer=new D.QueryLexer(e),this.query=t,this.currentClause={},this.lexemeIdx=0},D.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var e=D.QueryParser.parseClause;e;)e=e(this);return this.query},D.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},D.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},D.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},D.QueryParser.parseClause=function(e){var t=e.peekLexeme();if(null!=t)switch(t.type){case D.QueryLexer.PRESENCE:return D.QueryParser.parsePresence;case D.QueryLexer.FIELD:return D.QueryParser.parseField;case D.QueryLexer.TERM:return D.QueryParser.parseTerm;default:var n="expected either a field or a term, found "+t.type;throw t.str.length>=1&&(n+=" with value '"+t.str+"'"),new D.QueryParseError(n,t.start,t.end)}},D.QueryParser.parsePresence=function(e){var t=e.consumeLexeme();if(null!=t){switch(t.str){case"-":e.currentClause.presence=D.Query.presence.PROHIBITED;break;case"+":e.currentClause.presence=D.Query.presence.REQUIRED;break;default:var n="unrecognised presence operator'"+t.str+"'";throw new D.QueryParseError(n,t.start,t.end)}var r=e.peekLexeme();if(null==r){n="expecting term or field, found nothing";throw new D.QueryParseError(n,t.start,t.end)}switch(r.type){case D.QueryLexer.FIELD:return D.QueryParser.parseField;case D.QueryLexer.TERM:return D.QueryParser.parseTerm;default:n="expecting term or field, found '"+r.type+"'";throw new D.QueryParseError(n,r.start,r.end)}}},D.QueryParser.parseField=function(e){var t=e.consumeLexeme();if(null!=t){if(-1==e.query.allFields.indexOf(t.str)){var n=e.query.allFields.map((function(e){return"'"+e+"'"})).join(", "),r="unrecognised field '"+t.str+"', possible fields: "+n;throw new D.QueryParseError(r,t.start,t.end)}e.currentClause.fields=[t.str];var a=e.peekLexeme();if(null==a){r="expecting term, found nothing";throw new D.QueryParseError(r,t.start,t.end)}if(a.type===D.QueryLexer.TERM)return D.QueryParser.parseTerm;r="expecting term, found '"+a.type+"'";throw new D.QueryParseError(r,a.start,a.end)}},D.QueryParser.parseTerm=function(e){var t=e.consumeLexeme();if(null!=t){e.currentClause.term=t.str.toLowerCase(),-1!=t.str.indexOf("*")&&(e.currentClause.usePipeline=!1);var n=e.peekLexeme();if(null!=n)switch(n.type){case D.QueryLexer.TERM:return e.nextClause(),D.QueryParser.parseTerm;case D.QueryLexer.FIELD:return e.nextClause(),D.QueryParser.parseField;case D.QueryLexer.EDIT_DISTANCE:return D.QueryParser.parseEditDistance;case D.QueryLexer.BOOST:return D.QueryParser.parseBoost;case D.QueryLexer.PRESENCE:return e.nextClause(),D.QueryParser.parsePresence;default:var r="Unexpected lexeme type '"+n.type+"'";throw new D.QueryParseError(r,n.start,n.end)}else e.nextClause()}},D.QueryParser.parseEditDistance=function(e){var t=e.consumeLexeme();if(null!=t){var n=parseInt(t.str,10);if(isNaN(n)){var r="edit distance must be numeric";throw new D.QueryParseError(r,t.start,t.end)}e.currentClause.editDistance=n;var a=e.peekLexeme();if(null!=a)switch(a.type){case D.QueryLexer.TERM:return e.nextClause(),D.QueryParser.parseTerm;case D.QueryLexer.FIELD:return e.nextClause(),D.QueryParser.parseField;case D.QueryLexer.EDIT_DISTANCE:return D.QueryParser.parseEditDistance;case D.QueryLexer.BOOST:return D.QueryParser.parseBoost;case D.QueryLexer.PRESENCE:return e.nextClause(),D.QueryParser.parsePresence;default:r="Unexpected lexeme type '"+a.type+"'";throw new D.QueryParseError(r,a.start,a.end)}else e.nextClause()}},D.QueryParser.parseBoost=function(e){var t=e.consumeLexeme();if(null!=t){var n=parseInt(t.str,10);if(isNaN(n)){var r="boost must be numeric";throw new D.QueryParseError(r,t.start,t.end)}e.currentClause.boost=n;var a=e.peekLexeme();if(null!=a)switch(a.type){case D.QueryLexer.TERM:return e.nextClause(),D.QueryParser.parseTerm;case D.QueryLexer.FIELD:return e.nextClause(),D.QueryParser.parseField;case D.QueryLexer.EDIT_DISTANCE:return D.QueryParser.parseEditDistance;case D.QueryLexer.BOOST:return D.QueryParser.parseBoost;case D.QueryLexer.PRESENCE:return e.nextClause(),D.QueryParser.parsePresence;default:r="Unexpected lexeme type '"+a.type+"'";throw new D.QueryParseError(r,a.start,a.end)}else e.nextClause()}},void 0===(a="function"==typeof(r=function(){return D})?r.call(t,n,t,e):r)||(e.exports=a)}()},2497:(e,t,n)=>{"use strict";n.r(t)},2295:(e,t,n)=>{"use strict";n.r(t)},4865:function(e,t,n){var r,a;r=function(){var e,t,n={version:"0.2.0"},r=n.settings={minimum:.08,easing:"ease",positionUsing:"",speed:200,trickle:!0,trickleRate:.02,trickleSpeed:800,showSpinner:!0,barSelector:'[role="bar"]',spinnerSelector:'[role="spinner"]',parent:"body",template:'
'};function a(e,t,n){return en?n:e}function o(e){return 100*(-1+e)}function i(e,t,n){var a;return(a="translate3d"===r.positionUsing?{transform:"translate3d("+o(e)+"%,0,0)"}:"translate"===r.positionUsing?{transform:"translate("+o(e)+"%,0)"}:{"margin-left":o(e)+"%"}).transition="all "+t+"ms "+n,a}n.configure=function(e){var t,n;for(t in e)void 0!==(n=e[t])&&e.hasOwnProperty(t)&&(r[t]=n);return this},n.status=null,n.set=function(e){var t=n.isStarted();e=a(e,r.minimum,1),n.status=1===e?null:e;var o=n.render(!t),u=o.querySelector(r.barSelector),c=r.speed,d=r.easing;return o.offsetWidth,l((function(t){""===r.positionUsing&&(r.positionUsing=n.getPositioningCSS()),s(u,i(e,c,d)),1===e?(s(o,{transition:"none",opacity:1}),o.offsetWidth,setTimeout((function(){s(o,{transition:"all "+c+"ms linear",opacity:0}),setTimeout((function(){n.remove(),t()}),c)}),c)):setTimeout(t,c)})),this},n.isStarted=function(){return"number"==typeof n.status},n.start=function(){n.status||n.set(0);var e=function(){setTimeout((function(){n.status&&(n.trickle(),e())}),r.trickleSpeed)};return r.trickle&&e(),this},n.done=function(e){return e||n.status?n.inc(.3+.5*Math.random()).set(1):this},n.inc=function(e){var t=n.status;return t?("number"!=typeof e&&(e=(1-t)*a(Math.random()*t,.1,.95)),t=a(t+e,0,.994),n.set(t)):n.start()},n.trickle=function(){return n.inc(Math.random()*r.trickleRate)},e=0,t=0,n.promise=function(r){return r&&"resolved"!==r.state()?(0===t&&n.start(),e++,t++,r.always((function(){0==--t?(e=0,n.done()):n.set((e-t)/e)})),this):this},n.render=function(e){if(n.isRendered())return document.getElementById("nprogress");c(document.documentElement,"nprogress-busy");var t=document.createElement("div");t.id="nprogress",t.innerHTML=r.template;var a,i=t.querySelector(r.barSelector),l=e?"-100":o(n.status||0),u=document.querySelector(r.parent);return s(i,{transition:"all 0 linear",transform:"translate3d("+l+"%,0,0)"}),r.showSpinner||(a=t.querySelector(r.spinnerSelector))&&p(a),u!=document.body&&c(u,"nprogress-custom-parent"),u.appendChild(t),t},n.remove=function(){d(document.documentElement,"nprogress-busy"),d(document.querySelector(r.parent),"nprogress-custom-parent");var e=document.getElementById("nprogress");e&&p(e)},n.isRendered=function(){return!!document.getElementById("nprogress")},n.getPositioningCSS=function(){var e=document.body.style,t="WebkitTransform"in e?"Webkit":"MozTransform"in e?"Moz":"msTransform"in e?"ms":"OTransform"in e?"O":"";return t+"Perspective"in e?"translate3d":t+"Transform"in e?"translate":"margin"};var l=function(){var e=[];function t(){var n=e.shift();n&&n(t)}return function(n){e.push(n),1==e.length&&t()}}(),s=function(){var e=["Webkit","O","Moz","ms"],t={};function n(e){return e.replace(/^-ms-/,"ms-").replace(/-([\da-z])/gi,(function(e,t){return t.toUpperCase()}))}function r(t){var n=document.body.style;if(t in n)return t;for(var r,a=e.length,o=t.charAt(0).toUpperCase()+t.slice(1);a--;)if((r=e[a]+o)in n)return r;return t}function a(e){return e=n(e),t[e]||(t[e]=r(e))}function o(e,t,n){t=a(t),e.style[t]=n}return function(e,t){var n,r,a=arguments;if(2==a.length)for(n in t)void 0!==(r=t[n])&&t.hasOwnProperty(n)&&o(e,n,r);else o(e,a[1],a[2])}}();function u(e,t){return("string"==typeof e?e:f(e)).indexOf(" "+t+" ")>=0}function c(e,t){var n=f(e),r=n+t;u(n,t)||(e.className=r.substring(1))}function d(e,t){var n,r=f(e);u(e,t)&&(n=r.replace(" "+t+" "," "),e.className=n.substring(1,n.length-1))}function f(e){return(" "+(e.className||"")+" ").replace(/\s+/gi," ")}function p(e){e&&e.parentNode&&e.parentNode.removeChild(e)}return n},void 0===(a="function"==typeof r?r.call(t,n,t,e):r)||(e.exports=a)},7418:e=>{"use strict";var t=Object.getOwnPropertySymbols,n=Object.prototype.hasOwnProperty,r=Object.prototype.propertyIsEnumerable;function a(e){if(null==e)throw new TypeError("Object.assign cannot be called with null or undefined");return Object(e)}e.exports=function(){try{if(!Object.assign)return!1;var e=new String("abc");if(e[5]="de","5"===Object.getOwnPropertyNames(e)[0])return!1;for(var t={},n=0;n<10;n++)t["_"+String.fromCharCode(n)]=n;if("0123456789"!==Object.getOwnPropertyNames(t).map((function(e){return t[e]})).join(""))return!1;var r={};return"abcdefghijklmnopqrst".split("").forEach((function(e){r[e]=e})),"abcdefghijklmnopqrst"===Object.keys(Object.assign({},r)).join("")}catch(a){return!1}}()?Object.assign:function(e,o){for(var i,l,s=a(e),u=1;u{"use strict";n.d(t,{Z:()=>o});var r=function(){var e=/(?:^|\s)lang(?:uage)?-([\w-]+)(?=\s|$)/i,t=0,n={},r={util:{encode:function e(t){return t instanceof a?new a(t.type,e(t.content),t.alias):Array.isArray(t)?t.map(e):t.replace(/&/g,"&").replace(/=d.reach);S+=E.value.length,E=E.next){var x=E.value;if(t.length>e.length)return;if(!(x instanceof a)){var _,C=1;if(v){if(!(_=o(k,S,e,b))||_.index>=e.length)break;var T=_.index,L=_.index+_[0].length,P=S;for(P+=E.value.length;T>=P;)P+=(E=E.next).value.length;if(S=P-=E.value.length,E.value instanceof a)continue;for(var O=E;O!==t.tail&&(Pd.reach&&(d.reach=R);var D=E.prev;if(A&&(D=s(t,D,A),S+=A.length),u(t,D,C),E=s(t,D,new a(f,g?r.tokenize(N,g):N,y,N)),I&&s(t,E,I),C>1){var M={cause:f+","+h,reach:R};i(e,t,n,E.prev,S,M),d&&M.reach>d.reach&&(d.reach=M.reach)}}}}}}function l(){var e={value:null,prev:null,next:null},t={value:null,prev:e,next:null};e.next=t,this.head=e,this.tail=t,this.length=0}function s(e,t,n){var r=t.next,a={value:n,prev:t,next:r};return t.next=a,r.prev=a,e.length++,a}function u(e,t,n){for(var r=t.next,a=0;a"+o.content+""},r}(),a=r;r.default=r,a.languages.markup={comment:{pattern://,greedy:!0},prolog:{pattern:/<\?[\s\S]+?\?>/,greedy:!0},doctype:{pattern:/"'[\]]|"[^"]*"|'[^']*')+(?:\[(?:[^<"'\]]|"[^"]*"|'[^']*'|<(?!!--)|)*\]\s*)?>/i,greedy:!0,inside:{"internal-subset":{pattern:/(^[^\[]*\[)[\s\S]+(?=\]>$)/,lookbehind:!0,greedy:!0,inside:null},string:{pattern:/"[^"]*"|'[^']*'/,greedy:!0},punctuation:/^$|[[\]]/,"doctype-tag":/^DOCTYPE/i,name:/[^\s<>'"]+/}},cdata:{pattern://i,greedy:!0},tag:{pattern:/<\/?(?!\d)[^\s>\/=$<%]+(?:\s(?:\s*[^\s>\/=]+(?:\s*=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+(?=[\s>]))|(?=[\s/>])))+)?\s*\/?>/,greedy:!0,inside:{tag:{pattern:/^<\/?[^\s>\/]+/,inside:{punctuation:/^<\/?/,namespace:/^[^\s>\/:]+:/}},"special-attr":[],"attr-value":{pattern:/=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+)/,inside:{punctuation:[{pattern:/^=/,alias:"attr-equals"},/"|'/]}},punctuation:/\/?>/,"attr-name":{pattern:/[^\s>\/]+/,inside:{namespace:/^[^\s>\/:]+:/}}}},entity:[{pattern:/&[\da-z]{1,8};/i,alias:"named-entity"},/&#x?[\da-f]{1,8};/i]},a.languages.markup.tag.inside["attr-value"].inside.entity=a.languages.markup.entity,a.languages.markup.doctype.inside["internal-subset"].inside=a.languages.markup,a.hooks.add("wrap",(function(e){"entity"===e.type&&(e.attributes.title=e.content.replace(/&/,"&"))})),Object.defineProperty(a.languages.markup.tag,"addInlined",{value:function(e,t){var n={};n["language-"+t]={pattern:/(^$)/i,lookbehind:!0,inside:a.languages[t]},n.cdata=/^$/i;var r={"included-cdata":{pattern://i,inside:n}};r["language-"+t]={pattern:/[\s\S]+/,inside:a.languages[t]};var o={};o[e]={pattern:RegExp(/(<__[^>]*>)(?:))*\]\]>|(?!)/.source.replace(/__/g,(function(){return e})),"i"),lookbehind:!0,greedy:!0,inside:r},a.languages.insertBefore("markup","cdata",o)}}),Object.defineProperty(a.languages.markup.tag,"addAttribute",{value:function(e,t){a.languages.markup.tag.inside["special-attr"].push({pattern:RegExp(/(^|["'\s])/.source+"(?:"+e+")"+/\s*=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+(?=[\s>]))/.source,"i"),lookbehind:!0,inside:{"attr-name":/^[^\s=]+/,"attr-value":{pattern:/=[\s\S]+/,inside:{value:{pattern:/(^=\s*(["']|(?!["'])))\S[\s\S]*(?=\2$)/,lookbehind:!0,alias:[t,"language-"+t],inside:a.languages[t]},punctuation:[{pattern:/^=/,alias:"attr-equals"},/"|'/]}}}})}}),a.languages.html=a.languages.markup,a.languages.mathml=a.languages.markup,a.languages.svg=a.languages.markup,a.languages.xml=a.languages.extend("markup",{}),a.languages.ssml=a.languages.xml,a.languages.atom=a.languages.xml,a.languages.rss=a.languages.xml,function(e){var t="\\b(?:BASH|BASHOPTS|BASH_ALIASES|BASH_ARGC|BASH_ARGV|BASH_CMDS|BASH_COMPLETION_COMPAT_DIR|BASH_LINENO|BASH_REMATCH|BASH_SOURCE|BASH_VERSINFO|BASH_VERSION|COLORTERM|COLUMNS|COMP_WORDBREAKS|DBUS_SESSION_BUS_ADDRESS|DEFAULTS_PATH|DESKTOP_SESSION|DIRSTACK|DISPLAY|EUID|GDMSESSION|GDM_LANG|GNOME_KEYRING_CONTROL|GNOME_KEYRING_PID|GPG_AGENT_INFO|GROUPS|HISTCONTROL|HISTFILE|HISTFILESIZE|HISTSIZE|HOME|HOSTNAME|HOSTTYPE|IFS|INSTANCE|JOB|LANG|LANGUAGE|LC_ADDRESS|LC_ALL|LC_IDENTIFICATION|LC_MEASUREMENT|LC_MONETARY|LC_NAME|LC_NUMERIC|LC_PAPER|LC_TELEPHONE|LC_TIME|LESSCLOSE|LESSOPEN|LINES|LOGNAME|LS_COLORS|MACHTYPE|MAILCHECK|MANDATORY_PATH|NO_AT_BRIDGE|OLDPWD|OPTERR|OPTIND|ORBIT_SOCKETDIR|OSTYPE|PAPERSIZE|PATH|PIPESTATUS|PPID|PS1|PS2|PS3|PS4|PWD|RANDOM|REPLY|SECONDS|SELINUX_INIT|SESSION|SESSIONTYPE|SESSION_MANAGER|SHELL|SHELLOPTS|SHLVL|SSH_AUTH_SOCK|TERM|UID|UPSTART_EVENTS|UPSTART_INSTANCE|UPSTART_JOB|UPSTART_SESSION|USER|WINDOWID|XAUTHORITY|XDG_CONFIG_DIRS|XDG_CURRENT_DESKTOP|XDG_DATA_DIRS|XDG_GREETER_DATA_DIR|XDG_MENU_PREFIX|XDG_RUNTIME_DIR|XDG_SEAT|XDG_SEAT_PATH|XDG_SESSION_DESKTOP|XDG_SESSION_ID|XDG_SESSION_PATH|XDG_SESSION_TYPE|XDG_VTNR|XMODIFIERS)\\b",n={pattern:/(^(["']?)\w+\2)[ \t]+\S.*/,lookbehind:!0,alias:"punctuation",inside:null},r={bash:n,environment:{pattern:RegExp("\\$"+t),alias:"constant"},variable:[{pattern:/\$?\(\([\s\S]+?\)\)/,greedy:!0,inside:{variable:[{pattern:/(^\$\(\([\s\S]+)\)\)/,lookbehind:!0},/^\$\(\(/],number:/\b0x[\dA-Fa-f]+\b|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:[Ee]-?\d+)?/,operator:/--|\+\+|\*\*=?|<<=?|>>=?|&&|\|\||[=!+\-*/%<>^&|]=?|[?~:]/,punctuation:/\(\(?|\)\)?|,|;/}},{pattern:/\$\((?:\([^)]+\)|[^()])+\)|`[^`]+`/,greedy:!0,inside:{variable:/^\$\(|^`|\)$|`$/}},{pattern:/\$\{[^}]+\}/,greedy:!0,inside:{operator:/:[-=?+]?|[!\/]|##?|%%?|\^\^?|,,?/,punctuation:/[\[\]]/,environment:{pattern:RegExp("(\\{)"+t),lookbehind:!0,alias:"constant"}}},/\$(?:\w+|[#?*!@$])/],entity:/\\(?:[abceEfnrtv\\"]|O?[0-7]{1,3}|U[0-9a-fA-F]{8}|u[0-9a-fA-F]{4}|x[0-9a-fA-F]{1,2})/};e.languages.bash={shebang:{pattern:/^#!\s*\/.*/,alias:"important"},comment:{pattern:/(^|[^"{\\$])#.*/,lookbehind:!0},"function-name":[{pattern:/(\bfunction\s+)[\w-]+(?=(?:\s*\(?:\s*\))?\s*\{)/,lookbehind:!0,alias:"function"},{pattern:/\b[\w-]+(?=\s*\(\s*\)\s*\{)/,alias:"function"}],"for-or-select":{pattern:/(\b(?:for|select)\s+)\w+(?=\s+in\s)/,alias:"variable",lookbehind:!0},"assign-left":{pattern:/(^|[\s;|&]|[<>]\()\w+(?=\+?=)/,inside:{environment:{pattern:RegExp("(^|[\\s;|&]|[<>]\\()"+t),lookbehind:!0,alias:"constant"}},alias:"variable",lookbehind:!0},string:[{pattern:/((?:^|[^<])<<-?\s*)(\w+)\s[\s\S]*?(?:\r?\n|\r)\2/,lookbehind:!0,greedy:!0,inside:r},{pattern:/((?:^|[^<])<<-?\s*)(["'])(\w+)\2\s[\s\S]*?(?:\r?\n|\r)\3/,lookbehind:!0,greedy:!0,inside:{bash:n}},{pattern:/(^|[^\\](?:\\\\)*)"(?:\\[\s\S]|\$\([^)]+\)|\$(?!\()|`[^`]+`|[^"\\`$])*"/,lookbehind:!0,greedy:!0,inside:r},{pattern:/(^|[^$\\])'[^']*'/,lookbehind:!0,greedy:!0},{pattern:/\$'(?:[^'\\]|\\[\s\S])*'/,greedy:!0,inside:{entity:r.entity}}],environment:{pattern:RegExp("\\$?"+t),alias:"constant"},variable:r.variable,function:{pattern:/(^|[\s;|&]|[<>]\()(?:add|apropos|apt|apt-cache|apt-get|aptitude|aspell|automysqlbackup|awk|basename|bash|bc|bconsole|bg|bzip2|cal|cat|cfdisk|chgrp|chkconfig|chmod|chown|chroot|cksum|clear|cmp|column|comm|composer|cp|cron|crontab|csplit|curl|cut|date|dc|dd|ddrescue|debootstrap|df|diff|diff3|dig|dir|dircolors|dirname|dirs|dmesg|docker|docker-compose|du|egrep|eject|env|ethtool|expand|expect|expr|fdformat|fdisk|fg|fgrep|file|find|fmt|fold|format|free|fsck|ftp|fuser|gawk|git|gparted|grep|groupadd|groupdel|groupmod|groups|grub-mkconfig|gzip|halt|head|hg|history|host|hostname|htop|iconv|id|ifconfig|ifdown|ifup|import|install|ip|jobs|join|kill|killall|less|link|ln|locate|logname|logrotate|look|lpc|lpr|lprint|lprintd|lprintq|lprm|ls|lsof|lynx|make|man|mc|mdadm|mkconfig|mkdir|mke2fs|mkfifo|mkfs|mkisofs|mknod|mkswap|mmv|more|most|mount|mtools|mtr|mutt|mv|nano|nc|netstat|nice|nl|node|nohup|notify-send|npm|nslookup|op|open|parted|passwd|paste|pathchk|ping|pkill|pnpm|podman|podman-compose|popd|pr|printcap|printenv|ps|pushd|pv|quota|quotacheck|quotactl|ram|rar|rcp|reboot|remsync|rename|renice|rev|rm|rmdir|rpm|rsync|scp|screen|sdiff|sed|sendmail|seq|service|sftp|sh|shellcheck|shuf|shutdown|sleep|slocate|sort|split|ssh|stat|strace|su|sudo|sum|suspend|swapon|sync|tac|tail|tar|tee|time|timeout|top|touch|tr|traceroute|tsort|tty|umount|uname|unexpand|uniq|units|unrar|unshar|unzip|update-grub|uptime|useradd|userdel|usermod|users|uudecode|uuencode|v|vcpkg|vdir|vi|vim|virsh|vmstat|wait|watch|wc|wget|whereis|which|who|whoami|write|xargs|xdg-open|yarn|yes|zenity|zip|zsh|zypper)(?=$|[)\s;|&])/,lookbehind:!0},keyword:{pattern:/(^|[\s;|&]|[<>]\()(?:case|do|done|elif|else|esac|fi|for|function|if|in|select|then|until|while)(?=$|[)\s;|&])/,lookbehind:!0},builtin:{pattern:/(^|[\s;|&]|[<>]\()(?:\.|:|alias|bind|break|builtin|caller|cd|command|continue|declare|echo|enable|eval|exec|exit|export|getopts|hash|help|let|local|logout|mapfile|printf|pwd|read|readarray|readonly|return|set|shift|shopt|source|test|times|trap|type|typeset|ulimit|umask|unalias|unset)(?=$|[)\s;|&])/,lookbehind:!0,alias:"class-name"},boolean:{pattern:/(^|[\s;|&]|[<>]\()(?:false|true)(?=$|[)\s;|&])/,lookbehind:!0},"file-descriptor":{pattern:/\B&\d\b/,alias:"important"},operator:{pattern:/\d?<>|>\||\+=|=[=~]?|!=?|<<[<-]?|[&\d]?>>|\d[<>]&?|[<>][&=]?|&[>&]?|\|[&|]?/,inside:{"file-descriptor":{pattern:/^\d/,alias:"important"}}},punctuation:/\$?\(\(?|\)\)?|\.\.|[{}[\];\\]/,number:{pattern:/(^|\s)(?:[1-9]\d*|0)(?:[.,]\d+)?\b/,lookbehind:!0}},n.inside=e.languages.bash;for(var a=["comment","function-name","for-or-select","assign-left","string","environment","function","keyword","builtin","boolean","file-descriptor","operator","punctuation","number"],o=r.variable[1].inside,i=0;i]=?|[!=]=?=?|--?|\+\+?|&&?|\|\|?|[?*/~^%]/,punctuation:/[{}[\];(),.:]/},a.languages.c=a.languages.extend("clike",{comment:{pattern:/\/\/(?:[^\r\n\\]|\\(?:\r\n?|\n|(?![\r\n])))*|\/\*[\s\S]*?(?:\*\/|$)/,greedy:!0},string:{pattern:/"(?:\\(?:\r\n|[\s\S])|[^"\\\r\n])*"/,greedy:!0},"class-name":{pattern:/(\b(?:enum|struct)\s+(?:__attribute__\s*\(\([\s\S]*?\)\)\s*)?)\w+|\b[a-z]\w*_t\b/,lookbehind:!0},keyword:/\b(?:_Alignas|_Alignof|_Atomic|_Bool|_Complex|_Generic|_Imaginary|_Noreturn|_Static_assert|_Thread_local|__attribute__|asm|auto|break|case|char|const|continue|default|do|double|else|enum|extern|float|for|goto|if|inline|int|long|register|return|short|signed|sizeof|static|struct|switch|typedef|typeof|union|unsigned|void|volatile|while)\b/,function:/\b[a-z_]\w*(?=\s*\()/i,number:/(?:\b0x(?:[\da-f]+(?:\.[\da-f]*)?|\.[\da-f]+)(?:p[+-]?\d+)?|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:e[+-]?\d+)?)[ful]{0,4}/i,operator:/>>=?|<<=?|->|([-+&|:])\1|[?:~]|[-+*/%&|^!=<>]=?/}),a.languages.insertBefore("c","string",{char:{pattern:/'(?:\\(?:\r\n|[\s\S])|[^'\\\r\n]){0,32}'/,greedy:!0}}),a.languages.insertBefore("c","string",{macro:{pattern:/(^[\t ]*)#\s*[a-z](?:[^\r\n\\/]|\/(?!\*)|\/\*(?:[^*]|\*(?!\/))*\*\/|\\(?:\r\n|[\s\S]))*/im,lookbehind:!0,greedy:!0,alias:"property",inside:{string:[{pattern:/^(#\s*include\s*)<[^>]+>/,lookbehind:!0},a.languages.c.string],char:a.languages.c.char,comment:a.languages.c.comment,"macro-name":[{pattern:/(^#\s*define\s+)\w+\b(?!\()/i,lookbehind:!0},{pattern:/(^#\s*define\s+)\w+\b(?=\()/i,lookbehind:!0,alias:"function"}],directive:{pattern:/^(#\s*)[a-z]+/,lookbehind:!0,alias:"keyword"},"directive-hash":/^#/,punctuation:/##|\\(?=[\r\n])/,expression:{pattern:/\S[\s\S]*/,inside:a.languages.c}}}}),a.languages.insertBefore("c","function",{constant:/\b(?:EOF|NULL|SEEK_CUR|SEEK_END|SEEK_SET|__DATE__|__FILE__|__LINE__|__TIMESTAMP__|__TIME__|__func__|stderr|stdin|stdout)\b/}),delete a.languages.c.boolean,function(e){var t=/\b(?:alignas|alignof|asm|auto|bool|break|case|catch|char|char16_t|char32_t|char8_t|class|co_await|co_return|co_yield|compl|concept|const|const_cast|consteval|constexpr|constinit|continue|decltype|default|delete|do|double|dynamic_cast|else|enum|explicit|export|extern|final|float|for|friend|goto|if|import|inline|int|int16_t|int32_t|int64_t|int8_t|long|module|mutable|namespace|new|noexcept|nullptr|operator|override|private|protected|public|register|reinterpret_cast|requires|return|short|signed|sizeof|static|static_assert|static_cast|struct|switch|template|this|thread_local|throw|try|typedef|typeid|typename|uint16_t|uint32_t|uint64_t|uint8_t|union|unsigned|using|virtual|void|volatile|wchar_t|while)\b/,n=/\b(?!)\w+(?:\s*\.\s*\w+)*\b/.source.replace(//g,(function(){return t.source}));e.languages.cpp=e.languages.extend("c",{"class-name":[{pattern:RegExp(/(\b(?:class|concept|enum|struct|typename)\s+)(?!)\w+/.source.replace(//g,(function(){return t.source}))),lookbehind:!0},/\b[A-Z]\w*(?=\s*::\s*\w+\s*\()/,/\b[A-Z_]\w*(?=\s*::\s*~\w+\s*\()/i,/\b\w+(?=\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>\s*::\s*\w+\s*\()/],keyword:t,number:{pattern:/(?:\b0b[01']+|\b0x(?:[\da-f']+(?:\.[\da-f']*)?|\.[\da-f']+)(?:p[+-]?[\d']+)?|(?:\b[\d']+(?:\.[\d']*)?|\B\.[\d']+)(?:e[+-]?[\d']+)?)[ful]{0,4}/i,greedy:!0},operator:/>>=?|<<=?|->|--|\+\+|&&|\|\||[?:~]|<=>|[-+*/%&|^!=<>]=?|\b(?:and|and_eq|bitand|bitor|not|not_eq|or|or_eq|xor|xor_eq)\b/,boolean:/\b(?:false|true)\b/}),e.languages.insertBefore("cpp","string",{module:{pattern:RegExp(/(\b(?:import|module)\s+)/.source+"(?:"+/"(?:\\(?:\r\n|[\s\S])|[^"\\\r\n])*"|<[^<>\r\n]*>/.source+"|"+/(?:\s*:\s*)?|:\s*/.source.replace(//g,(function(){return n}))+")"),lookbehind:!0,greedy:!0,inside:{string:/^[<"][\s\S]+/,operator:/:/,punctuation:/\./}},"raw-string":{pattern:/R"([^()\\ ]{0,16})\([\s\S]*?\)\1"/,alias:"string",greedy:!0}}),e.languages.insertBefore("cpp","keyword",{"generic-function":{pattern:/\b(?!operator\b)[a-z_]\w*\s*<(?:[^<>]|<[^<>]*>)*>(?=\s*\()/i,inside:{function:/^\w+/,generic:{pattern:/<[\s\S]+/,alias:"class-name",inside:e.languages.cpp}}}}),e.languages.insertBefore("cpp","operator",{"double-colon":{pattern:/::/,alias:"punctuation"}}),e.languages.insertBefore("cpp","class-name",{"base-clause":{pattern:/(\b(?:class|struct)\s+\w+\s*:\s*)[^;{}"'\s]+(?:\s+[^;{}"'\s]+)*(?=\s*[;{])/,lookbehind:!0,greedy:!0,inside:e.languages.extend("cpp",{})}}),e.languages.insertBefore("inside","double-colon",{"class-name":/\b[a-z_]\w*\b(?!\s*::)/i},e.languages.cpp["base-clause"])}(a),function(e){var t=/(?:"(?:\\(?:\r\n|[\s\S])|[^"\\\r\n])*"|'(?:\\(?:\r\n|[\s\S])|[^'\\\r\n])*')/;e.languages.css={comment:/\/\*[\s\S]*?\*\//,atrule:{pattern:/@[\w-](?:[^;{\s]|\s+(?![\s{]))*(?:;|(?=\s*\{))/,inside:{rule:/^@[\w-]+/,"selector-function-argument":{pattern:/(\bselector\s*\(\s*(?![\s)]))(?:[^()\s]|\s+(?![\s)])|\((?:[^()]|\([^()]*\))*\))+(?=\s*\))/,lookbehind:!0,alias:"selector"},keyword:{pattern:/(^|[^\w-])(?:and|not|only|or)(?![\w-])/,lookbehind:!0}}},url:{pattern:RegExp("\\burl\\((?:"+t.source+"|"+/(?:[^\\\r\n()"']|\\[\s\S])*/.source+")\\)","i"),greedy:!0,inside:{function:/^url/i,punctuation:/^\(|\)$/,string:{pattern:RegExp("^"+t.source+"$"),alias:"url"}}},selector:{pattern:RegExp("(^|[{}\\s])[^{}\\s](?:[^{};\"'\\s]|\\s+(?![\\s{])|"+t.source+")*(?=\\s*\\{)"),lookbehind:!0},string:{pattern:t,greedy:!0},property:{pattern:/(^|[^-\w\xA0-\uFFFF])(?!\s)[-_a-z\xA0-\uFFFF](?:(?!\s)[-\w\xA0-\uFFFF])*(?=\s*:)/i,lookbehind:!0},important:/!important\b/i,function:{pattern:/(^|[^-a-z0-9])[-a-z0-9]+(?=\()/i,lookbehind:!0},punctuation:/[(){};:,]/},e.languages.css.atrule.inside.rest=e.languages.css;var n=e.languages.markup;n&&(n.tag.addInlined("style","css"),n.tag.addAttribute("style","css"))}(a),function(e){var t,n=/("|')(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/;e.languages.css.selector={pattern:e.languages.css.selector.pattern,lookbehind:!0,inside:t={"pseudo-element":/:(?:after|before|first-letter|first-line|selection)|::[-\w]+/,"pseudo-class":/:[-\w]+/,class:/\.[-\w]+/,id:/#[-\w]+/,attribute:{pattern:RegExp("\\[(?:[^[\\]\"']|"+n.source+")*\\]"),greedy:!0,inside:{punctuation:/^\[|\]$/,"case-sensitivity":{pattern:/(\s)[si]$/i,lookbehind:!0,alias:"keyword"},namespace:{pattern:/^(\s*)(?:(?!\s)[-*\w\xA0-\uFFFF])*\|(?!=)/,lookbehind:!0,inside:{punctuation:/\|$/}},"attr-name":{pattern:/^(\s*)(?:(?!\s)[-\w\xA0-\uFFFF])+/,lookbehind:!0},"attr-value":[n,{pattern:/(=\s*)(?:(?!\s)[-\w\xA0-\uFFFF])+(?=\s*$)/,lookbehind:!0}],operator:/[|~*^$]?=/}},"n-th":[{pattern:/(\(\s*)[+-]?\d*[\dn](?:\s*[+-]\s*\d+)?(?=\s*\))/,lookbehind:!0,inside:{number:/[\dn]+/,operator:/[+-]/}},{pattern:/(\(\s*)(?:even|odd)(?=\s*\))/i,lookbehind:!0}],combinator:/>|\+|~|\|\|/,punctuation:/[(),]/}},e.languages.css.atrule.inside["selector-function-argument"].inside=t,e.languages.insertBefore("css","property",{variable:{pattern:/(^|[^-\w\xA0-\uFFFF])--(?!\s)[-_a-z\xA0-\uFFFF](?:(?!\s)[-\w\xA0-\uFFFF])*/i,lookbehind:!0}});var r={pattern:/(\b\d+)(?:%|[a-z]+(?![\w-]))/,lookbehind:!0},a={pattern:/(^|[^\w.-])-?(?:\d+(?:\.\d+)?|\.\d+)/,lookbehind:!0};e.languages.insertBefore("css","function",{operator:{pattern:/(\s)[+\-*\/](?=\s)/,lookbehind:!0},hexcode:{pattern:/\B#[\da-f]{3,8}\b/i,alias:"color"},color:[{pattern:/(^|[^\w-])(?:AliceBlue|AntiqueWhite|Aqua|Aquamarine|Azure|Beige|Bisque|Black|BlanchedAlmond|Blue|BlueViolet|Brown|BurlyWood|CadetBlue|Chartreuse|Chocolate|Coral|CornflowerBlue|Cornsilk|Crimson|Cyan|DarkBlue|DarkCyan|DarkGoldenRod|DarkGr[ae]y|DarkGreen|DarkKhaki|DarkMagenta|DarkOliveGreen|DarkOrange|DarkOrchid|DarkRed|DarkSalmon|DarkSeaGreen|DarkSlateBlue|DarkSlateGr[ae]y|DarkTurquoise|DarkViolet|DeepPink|DeepSkyBlue|DimGr[ae]y|DodgerBlue|FireBrick|FloralWhite|ForestGreen|Fuchsia|Gainsboro|GhostWhite|Gold|GoldenRod|Gr[ae]y|Green|GreenYellow|HoneyDew|HotPink|IndianRed|Indigo|Ivory|Khaki|Lavender|LavenderBlush|LawnGreen|LemonChiffon|LightBlue|LightCoral|LightCyan|LightGoldenRodYellow|LightGr[ae]y|LightGreen|LightPink|LightSalmon|LightSeaGreen|LightSkyBlue|LightSlateGr[ae]y|LightSteelBlue|LightYellow|Lime|LimeGreen|Linen|Magenta|Maroon|MediumAquaMarine|MediumBlue|MediumOrchid|MediumPurple|MediumSeaGreen|MediumSlateBlue|MediumSpringGreen|MediumTurquoise|MediumVioletRed|MidnightBlue|MintCream|MistyRose|Moccasin|NavajoWhite|Navy|OldLace|Olive|OliveDrab|Orange|OrangeRed|Orchid|PaleGoldenRod|PaleGreen|PaleTurquoise|PaleVioletRed|PapayaWhip|PeachPuff|Peru|Pink|Plum|PowderBlue|Purple|Red|RosyBrown|RoyalBlue|SaddleBrown|Salmon|SandyBrown|SeaGreen|SeaShell|Sienna|Silver|SkyBlue|SlateBlue|SlateGr[ae]y|Snow|SpringGreen|SteelBlue|Tan|Teal|Thistle|Tomato|Transparent|Turquoise|Violet|Wheat|White|WhiteSmoke|Yellow|YellowGreen)(?![\w-])/i,lookbehind:!0},{pattern:/\b(?:hsl|rgb)\(\s*\d{1,3}\s*,\s*\d{1,3}%?\s*,\s*\d{1,3}%?\s*\)\B|\b(?:hsl|rgb)a\(\s*\d{1,3}\s*,\s*\d{1,3}%?\s*,\s*\d{1,3}%?\s*,\s*(?:0|0?\.\d+|1)\s*\)\B/i,inside:{unit:r,number:a,function:/[\w-]+(?=\()/,punctuation:/[(),]/}}],entity:/\\[\da-f]{1,8}/i,unit:r,number:a})}(a),a.languages.javascript=a.languages.extend("clike",{"class-name":[a.languages.clike["class-name"],{pattern:/(^|[^$\w\xA0-\uFFFF])(?!\s)[_$A-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\.(?:constructor|prototype))/,lookbehind:!0}],keyword:[{pattern:/((?:^|\})\s*)catch\b/,lookbehind:!0},{pattern:/(^|[^.]|\.\.\.\s*)\b(?:as|assert(?=\s*\{)|async(?=\s*(?:function\b|\(|[$\w\xA0-\uFFFF]|$))|await|break|case|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally(?=\s*(?:\{|$))|for|from(?=\s*(?:['"]|$))|function|(?:get|set)(?=\s*(?:[#\[$\w\xA0-\uFFFF]|$))|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)\b/,lookbehind:!0}],function:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*(?:\.\s*(?:apply|bind|call)\s*)?\()/,number:{pattern:RegExp(/(^|[^\w$])/.source+"(?:"+/NaN|Infinity/.source+"|"+/0[bB][01]+(?:_[01]+)*n?/.source+"|"+/0[oO][0-7]+(?:_[0-7]+)*n?/.source+"|"+/0[xX][\dA-Fa-f]+(?:_[\dA-Fa-f]+)*n?/.source+"|"+/\d+(?:_\d+)*n/.source+"|"+/(?:\d+(?:_\d+)*(?:\.(?:\d+(?:_\d+)*)?)?|\.\d+(?:_\d+)*)(?:[Ee][+-]?\d+(?:_\d+)*)?/.source+")"+/(?![\w$])/.source),lookbehind:!0},operator:/--|\+\+|\*\*=?|=>|&&=?|\|\|=?|[!=]==|<<=?|>>>?=?|[-+*/%&|^!=<>]=?|\.{3}|\?\?=?|\?\.?|[~:]/}),a.languages.javascript["class-name"][0].pattern=/(\b(?:class|extends|implements|instanceof|interface|new)\s+)[\w.\\]+/,a.languages.insertBefore("javascript","keyword",{regex:{pattern:/((?:^|[^$\w\xA0-\uFFFF."'\])\s]|\b(?:return|yield))\s*)\/(?:\[(?:[^\]\\\r\n]|\\.)*\]|\\.|[^/\\\[\r\n])+\/[dgimyus]{0,7}(?=(?:\s|\/\*(?:[^*]|\*(?!\/))*\*\/)*(?:$|[\r\n,.;:})\]]|\/\/))/,lookbehind:!0,greedy:!0,inside:{"regex-source":{pattern:/^(\/)[\s\S]+(?=\/[a-z]*$)/,lookbehind:!0,alias:"language-regex",inside:a.languages.regex},"regex-delimiter":/^\/|\/$/,"regex-flags":/^[a-z]+$/}},"function-variable":{pattern:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*[=:]\s*(?:async\s*)?(?:\bfunction\b|(?:\((?:[^()]|\([^()]*\))*\)|(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)\s*=>))/,alias:"function"},parameter:[{pattern:/(function(?:\s+(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*)?\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\))/,lookbehind:!0,inside:a.languages.javascript},{pattern:/(^|[^$\w\xA0-\uFFFF])(?!\s)[_$a-z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*=>)/i,lookbehind:!0,inside:a.languages.javascript},{pattern:/(\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*=>)/,lookbehind:!0,inside:a.languages.javascript},{pattern:/((?:\b|\s|^)(?!(?:as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)(?![$\w\xA0-\uFFFF]))(?:(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*\s*)\(\s*|\]\s*\(\s*)(?!\s)(?:[^()\s]|\s+(?![\s)])|\([^()]*\))+(?=\s*\)\s*\{)/,lookbehind:!0,inside:a.languages.javascript}],constant:/\b[A-Z](?:[A-Z_]|\dx?)*\b/}),a.languages.insertBefore("javascript","string",{hashbang:{pattern:/^#!.*/,greedy:!0,alias:"comment"},"template-string":{pattern:/`(?:\\[\s\S]|\$\{(?:[^{}]|\{(?:[^{}]|\{[^}]*\})*\})+\}|(?!\$\{)[^\\`])*`/,greedy:!0,inside:{"template-punctuation":{pattern:/^`|`$/,alias:"string"},interpolation:{pattern:/((?:^|[^\\])(?:\\{2})*)\$\{(?:[^{}]|\{(?:[^{}]|\{[^}]*\})*\})+\}/,lookbehind:!0,inside:{"interpolation-punctuation":{pattern:/^\$\{|\}$/,alias:"punctuation"},rest:a.languages.javascript}},string:/[\s\S]+/}},"string-property":{pattern:/((?:^|[,{])[ \t]*)(["'])(?:\\(?:\r\n|[\s\S])|(?!\2)[^\\\r\n])*\2(?=\s*:)/m,lookbehind:!0,greedy:!0,alias:"property"}}),a.languages.insertBefore("javascript","operator",{"literal-property":{pattern:/((?:^|[,{])[ \t]*)(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*(?=\s*:)/m,lookbehind:!0,alias:"property"}}),a.languages.markup&&(a.languages.markup.tag.addInlined("script","javascript"),a.languages.markup.tag.addAttribute(/on(?:abort|blur|change|click|composition(?:end|start|update)|dblclick|error|focus(?:in|out)?|key(?:down|up)|load|mouse(?:down|enter|leave|move|out|over|up)|reset|resize|scroll|select|slotchange|submit|unload|wheel)/.source,"javascript")),a.languages.js=a.languages.javascript,function(e){var t=/#(?!\{).+/,n={pattern:/#\{[^}]+\}/,alias:"variable"};e.languages.coffeescript=e.languages.extend("javascript",{comment:t,string:[{pattern:/'(?:\\[\s\S]|[^\\'])*'/,greedy:!0},{pattern:/"(?:\\[\s\S]|[^\\"])*"/,greedy:!0,inside:{interpolation:n}}],keyword:/\b(?:and|break|by|catch|class|continue|debugger|delete|do|each|else|extend|extends|false|finally|for|if|in|instanceof|is|isnt|let|loop|namespace|new|no|not|null|of|off|on|or|own|return|super|switch|then|this|throw|true|try|typeof|undefined|unless|until|when|while|window|with|yes|yield)\b/,"class-member":{pattern:/@(?!\d)\w+/,alias:"variable"}}),e.languages.insertBefore("coffeescript","comment",{"multiline-comment":{pattern:/###[\s\S]+?###/,alias:"comment"},"block-regex":{pattern:/\/{3}[\s\S]*?\/{3}/,alias:"regex",inside:{comment:t,interpolation:n}}}),e.languages.insertBefore("coffeescript","string",{"inline-javascript":{pattern:/`(?:\\[\s\S]|[^\\`])*`/,inside:{delimiter:{pattern:/^`|`$/,alias:"punctuation"},script:{pattern:/[\s\S]+/,alias:"language-javascript",inside:e.languages.javascript}}},"multiline-string":[{pattern:/'''[\s\S]*?'''/,greedy:!0,alias:"string"},{pattern:/"""[\s\S]*?"""/,greedy:!0,alias:"string",inside:{interpolation:n}}]}),e.languages.insertBefore("coffeescript","keyword",{property:/(?!\d)\w+(?=\s*:(?!:))/}),delete e.languages.coffeescript["template-string"],e.languages.coffee=e.languages.coffeescript}(a),function(e){var t=/[*&][^\s[\]{},]+/,n=/!(?:<[\w\-%#;/?:@&=+$,.!~*'()[\]]+>|(?:[a-zA-Z\d-]*!)?[\w\-%#;/?:@&=+$.~*'()]+)?/,r="(?:"+n.source+"(?:[ \t]+"+t.source+")?|"+t.source+"(?:[ \t]+"+n.source+")?)",a=/(?:[^\s\x00-\x08\x0e-\x1f!"#%&'*,\-:>?@[\]`{|}\x7f-\x84\x86-\x9f\ud800-\udfff\ufffe\uffff]|[?:-])(?:[ \t]*(?:(?![#:])|:))*/.source.replace(//g,(function(){return/[^\s\x00-\x08\x0e-\x1f,[\]{}\x7f-\x84\x86-\x9f\ud800-\udfff\ufffe\uffff]/.source})),o=/"(?:[^"\\\r\n]|\\.)*"|'(?:[^'\\\r\n]|\\.)*'/.source;function i(e,t){t=(t||"").replace(/m/g,"")+"m";var n=/([:\-,[{]\s*(?:\s<>[ \t]+)?)(?:<>)(?=[ \t]*(?:$|,|\]|\}|(?:[\r\n]\s*)?#))/.source.replace(/<>/g,(function(){return r})).replace(/<>/g,(function(){return e}));return RegExp(n,t)}e.languages.yaml={scalar:{pattern:RegExp(/([\-:]\s*(?:\s<>[ \t]+)?[|>])[ \t]*(?:((?:\r?\n|\r)[ \t]+)\S[^\r\n]*(?:\2[^\r\n]+)*)/.source.replace(/<>/g,(function(){return r}))),lookbehind:!0,alias:"string"},comment:/#.*/,key:{pattern:RegExp(/((?:^|[:\-,[{\r\n?])[ \t]*(?:<>[ \t]+)?)<>(?=\s*:\s)/.source.replace(/<>/g,(function(){return r})).replace(/<>/g,(function(){return"(?:"+a+"|"+o+")"}))),lookbehind:!0,greedy:!0,alias:"atrule"},directive:{pattern:/(^[ \t]*)%.+/m,lookbehind:!0,alias:"important"},datetime:{pattern:i(/\d{4}-\d\d?-\d\d?(?:[tT]|[ \t]+)\d\d?:\d{2}:\d{2}(?:\.\d*)?(?:[ \t]*(?:Z|[-+]\d\d?(?::\d{2})?))?|\d{4}-\d{2}-\d{2}|\d\d?:\d{2}(?::\d{2}(?:\.\d*)?)?/.source),lookbehind:!0,alias:"number"},boolean:{pattern:i(/false|true/.source,"i"),lookbehind:!0,alias:"important"},null:{pattern:i(/null|~/.source,"i"),lookbehind:!0,alias:"important"},string:{pattern:i(o),lookbehind:!0,greedy:!0},number:{pattern:i(/[+-]?(?:0x[\da-f]+|0o[0-7]+|(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?|\.inf|\.nan)/.source,"i"),lookbehind:!0},tag:n,important:t,punctuation:/---|[:[\]{}\-,|>?]|\.\.\./},e.languages.yml=e.languages.yaml}(a),function(e){var t=/(?:\\.|[^\\\n\r]|(?:\n|\r\n?)(?![\r\n]))/.source;function n(e){return e=e.replace(//g,(function(){return t})),RegExp(/((?:^|[^\\])(?:\\{2})*)/.source+"(?:"+e+")")}var r=/(?:\\.|``(?:[^`\r\n]|`(?!`))+``|`[^`\r\n]+`|[^\\|\r\n`])+/.source,a=/\|?__(?:\|__)+\|?(?:(?:\n|\r\n?)|(?![\s\S]))/.source.replace(/__/g,(function(){return r})),o=/\|?[ \t]*:?-{3,}:?[ \t]*(?:\|[ \t]*:?-{3,}:?[ \t]*)+\|?(?:\n|\r\n?)/.source;e.languages.markdown=e.languages.extend("markup",{}),e.languages.insertBefore("markdown","prolog",{"front-matter-block":{pattern:/(^(?:\s*[\r\n])?)---(?!.)[\s\S]*?[\r\n]---(?!.)/,lookbehind:!0,greedy:!0,inside:{punctuation:/^---|---$/,"front-matter":{pattern:/\S+(?:\s+\S+)*/,alias:["yaml","language-yaml"],inside:e.languages.yaml}}},blockquote:{pattern:/^>(?:[\t ]*>)*/m,alias:"punctuation"},table:{pattern:RegExp("^"+a+o+"(?:"+a+")*","m"),inside:{"table-data-rows":{pattern:RegExp("^("+a+o+")(?:"+a+")*$"),lookbehind:!0,inside:{"table-data":{pattern:RegExp(r),inside:e.languages.markdown},punctuation:/\|/}},"table-line":{pattern:RegExp("^("+a+")"+o+"$"),lookbehind:!0,inside:{punctuation:/\||:?-{3,}:?/}},"table-header-row":{pattern:RegExp("^"+a+"$"),inside:{"table-header":{pattern:RegExp(r),alias:"important",inside:e.languages.markdown},punctuation:/\|/}}}},code:[{pattern:/((?:^|\n)[ \t]*\n|(?:^|\r\n?)[ \t]*\r\n?)(?: {4}|\t).+(?:(?:\n|\r\n?)(?: {4}|\t).+)*/,lookbehind:!0,alias:"keyword"},{pattern:/^```[\s\S]*?^```$/m,greedy:!0,inside:{"code-block":{pattern:/^(```.*(?:\n|\r\n?))[\s\S]+?(?=(?:\n|\r\n?)^```$)/m,lookbehind:!0},"code-language":{pattern:/^(```).+/,lookbehind:!0},punctuation:/```/}}],title:[{pattern:/\S.*(?:\n|\r\n?)(?:==+|--+)(?=[ \t]*$)/m,alias:"important",inside:{punctuation:/==+$|--+$/}},{pattern:/(^\s*)#.+/m,lookbehind:!0,alias:"important",inside:{punctuation:/^#+|#+$/}}],hr:{pattern:/(^\s*)([*-])(?:[\t ]*\2){2,}(?=\s*$)/m,lookbehind:!0,alias:"punctuation"},list:{pattern:/(^\s*)(?:[*+-]|\d+\.)(?=[\t ].)/m,lookbehind:!0,alias:"punctuation"},"url-reference":{pattern:/!?\[[^\]]+\]:[\t ]+(?:\S+|<(?:\\.|[^>\\])+>)(?:[\t ]+(?:"(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|\((?:\\.|[^)\\])*\)))?/,inside:{variable:{pattern:/^(!?\[)[^\]]+/,lookbehind:!0},string:/(?:"(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|\((?:\\.|[^)\\])*\))$/,punctuation:/^[\[\]!:]|[<>]/},alias:"url"},bold:{pattern:n(/\b__(?:(?!_)|_(?:(?!_))+_)+__\b|\*\*(?:(?!\*)|\*(?:(?!\*))+\*)+\*\*/.source),lookbehind:!0,greedy:!0,inside:{content:{pattern:/(^..)[\s\S]+(?=..$)/,lookbehind:!0,inside:{}},punctuation:/\*\*|__/}},italic:{pattern:n(/\b_(?:(?!_)|__(?:(?!_))+__)+_\b|\*(?:(?!\*)|\*\*(?:(?!\*))+\*\*)+\*/.source),lookbehind:!0,greedy:!0,inside:{content:{pattern:/(^.)[\s\S]+(?=.$)/,lookbehind:!0,inside:{}},punctuation:/[*_]/}},strike:{pattern:n(/(~~?)(?:(?!~))+\2/.source),lookbehind:!0,greedy:!0,inside:{content:{pattern:/(^~~?)[\s\S]+(?=\1$)/,lookbehind:!0,inside:{}},punctuation:/~~?/}},"code-snippet":{pattern:/(^|[^\\`])(?:``[^`\r\n]+(?:`[^`\r\n]+)*``(?!`)|`[^`\r\n]+`(?!`))/,lookbehind:!0,greedy:!0,alias:["code","keyword"]},url:{pattern:n(/!?\[(?:(?!\]))+\](?:\([^\s)]+(?:[\t ]+"(?:\\.|[^"\\])*")?\)|[ \t]?\[(?:(?!\]))+\])/.source),lookbehind:!0,greedy:!0,inside:{operator:/^!/,content:{pattern:/(^\[)[^\]]+(?=\])/,lookbehind:!0,inside:{}},variable:{pattern:/(^\][ \t]?\[)[^\]]+(?=\]$)/,lookbehind:!0},url:{pattern:/(^\]\()[^\s)]+/,lookbehind:!0},string:{pattern:/(^[ \t]+)"(?:\\.|[^"\\])*"(?=\)$)/,lookbehind:!0}}}}),["url","bold","italic","strike"].forEach((function(t){["url","bold","italic","strike","code-snippet"].forEach((function(n){t!==n&&(e.languages.markdown[t].inside.content.inside[n]=e.languages.markdown[n])}))})),e.hooks.add("after-tokenize",(function(e){"markdown"!==e.language&&"md"!==e.language||function e(t){if(t&&"string"!=typeof t)for(var n=0,r=t.length;n",quot:'"'},s=String.fromCodePoint||String.fromCharCode;e.languages.md=e.languages.markdown}(a),a.languages.graphql={comment:/#.*/,description:{pattern:/(?:"""(?:[^"]|(?!""")")*"""|"(?:\\.|[^\\"\r\n])*")(?=\s*[a-z_])/i,greedy:!0,alias:"string",inside:{"language-markdown":{pattern:/(^"(?:"")?)(?!\1)[\s\S]+(?=\1$)/,lookbehind:!0,inside:a.languages.markdown}}},string:{pattern:/"""(?:[^"]|(?!""")")*"""|"(?:\\.|[^\\"\r\n])*"/,greedy:!0},number:/(?:\B-|\b)\d+(?:\.\d+)?(?:e[+-]?\d+)?\b/i,boolean:/\b(?:false|true)\b/,variable:/\$[a-z_]\w*/i,directive:{pattern:/@[a-z_]\w*/i,alias:"function"},"attr-name":{pattern:/\b[a-z_]\w*(?=\s*(?:\((?:[^()"]|"(?:\\.|[^\\"\r\n])*")*\))?:)/i,greedy:!0},"atom-input":{pattern:/\b[A-Z]\w*Input\b/,alias:"class-name"},scalar:/\b(?:Boolean|Float|ID|Int|String)\b/,constant:/\b[A-Z][A-Z_\d]*\b/,"class-name":{pattern:/(\b(?:enum|implements|interface|on|scalar|type|union)\s+|&\s*|:\s*|\[)[A-Z_]\w*/,lookbehind:!0},fragment:{pattern:/(\bfragment\s+|\.{3}\s*(?!on\b))[a-zA-Z_]\w*/,lookbehind:!0,alias:"function"},"definition-mutation":{pattern:/(\bmutation\s+)[a-zA-Z_]\w*/,lookbehind:!0,alias:"function"},"definition-query":{pattern:/(\bquery\s+)[a-zA-Z_]\w*/,lookbehind:!0,alias:"function"},keyword:/\b(?:directive|enum|extend|fragment|implements|input|interface|mutation|on|query|repeatable|scalar|schema|subscription|type|union)\b/,operator:/[!=|&]|\.{3}/,"property-query":/\w+(?=\s*\()/,object:/\w+(?=\s*\{)/,punctuation:/[!(){}\[\]:=,]/,property:/\w+/},a.hooks.add("after-tokenize",(function(e){if("graphql"===e.language)for(var t=e.tokens.filter((function(e){return"string"!=typeof e&&"comment"!==e.type&&"scalar"!==e.type})),n=0;n0)){var l=f(/^\{$/,/^\}$/);if(-1===l)continue;for(var s=n;s=0&&p(u,"variable-input")}}}}function c(e){return t[n+e]}function d(e,t){t=t||0;for(var n=0;n?|<|>)?|>[>=]?|\b(?:AND|BETWEEN|DIV|ILIKE|IN|IS|LIKE|NOT|OR|REGEXP|RLIKE|SOUNDS LIKE|XOR)\b/i,punctuation:/[;[\]()`,.]/},function(e){var t=e.languages.javascript["template-string"],n=t.pattern.source,r=t.inside.interpolation,a=r.inside["interpolation-punctuation"],o=r.pattern.source;function i(t,r){if(e.languages[t])return{pattern:RegExp("((?:"+r+")\\s*)"+n),lookbehind:!0,greedy:!0,inside:{"template-punctuation":{pattern:/^`|`$/,alias:"string"},"embedded-code":{pattern:/[\s\S]+/,alias:t}}}}function l(e,t){return"___"+t.toUpperCase()+"_"+e+"___"}function s(t,n,r){var a={code:t,grammar:n,language:r};return e.hooks.run("before-tokenize",a),a.tokens=e.tokenize(a.code,a.grammar),e.hooks.run("after-tokenize",a),a.tokens}function u(t){var n={};n["interpolation-punctuation"]=a;var o=e.tokenize(t,n);if(3===o.length){var i=[1,1];i.push.apply(i,s(o[1],e.languages.javascript,"javascript")),o.splice.apply(o,i)}return new e.Token("interpolation",o,r.alias,t)}function c(t,n,r){var a=e.tokenize(t,{interpolation:{pattern:RegExp(o),lookbehind:!0}}),i=0,c={},d=s(a.map((function(e){if("string"==typeof e)return e;for(var n,a=e.content;-1!==t.indexOf(n=l(i++,r)););return c[n]=a,n})).join(""),n,r),f=Object.keys(c);return i=0,function e(t){for(var n=0;n=f.length)return;var r=t[n];if("string"==typeof r||"string"==typeof r.content){var a=f[i],o="string"==typeof r?r:r.content,l=o.indexOf(a);if(-1!==l){++i;var s=o.substring(0,l),d=u(c[a]),p=o.substring(l+a.length),h=[];if(s&&h.push(s),h.push(d),p){var m=[p];e(m),h.push.apply(h,m)}"string"==typeof r?(t.splice.apply(t,[n,1].concat(h)),n+=h.length-1):r.content=h}}else{var g=r.content;Array.isArray(g)?e(g):e([g])}}}(d),new e.Token(r,d,"language-"+r,t)}e.languages.javascript["template-string"]=[i("css",/\b(?:styled(?:\([^)]*\))?(?:\s*\.\s*\w+(?:\([^)]*\))*)*|css(?:\s*\.\s*(?:global|resolve))?|createGlobalStyle|keyframes)/.source),i("html",/\bhtml|\.\s*(?:inner|outer)HTML\s*\+?=/.source),i("svg",/\bsvg/.source),i("markdown",/\b(?:markdown|md)/.source),i("graphql",/\b(?:gql|graphql(?:\s*\.\s*experimental)?)/.source),i("sql",/\bsql/.source),t].filter(Boolean);var d={javascript:!0,js:!0,typescript:!0,ts:!0,jsx:!0,tsx:!0};function f(e){return"string"==typeof e?e:Array.isArray(e)?e.map(f).join(""):f(e.content)}e.hooks.add("after-tokenize",(function(t){t.language in d&&function t(n){for(var r=0,a=n.length;r]|<(?:[^<>]|<[^<>]*>)*>)*>)?/,lookbehind:!0,greedy:!0,inside:null},builtin:/\b(?:Array|Function|Promise|any|boolean|console|never|number|string|symbol|unknown)\b/}),e.languages.typescript.keyword.push(/\b(?:abstract|declare|is|keyof|readonly|require)\b/,/\b(?:asserts|infer|interface|module|namespace|type)\b(?=\s*(?:[{_$a-zA-Z\xA0-\uFFFF]|$))/,/\btype\b(?=\s*(?:[\{*]|$))/),delete e.languages.typescript.parameter,delete e.languages.typescript["literal-property"];var t=e.languages.extend("typescript",{});delete t["class-name"],e.languages.typescript["class-name"].inside=t,e.languages.insertBefore("typescript","function",{decorator:{pattern:/@[$\w\xA0-\uFFFF]+/,inside:{at:{pattern:/^@/,alias:"operator"},function:/^[\s\S]+/}},"generic-function":{pattern:/#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>(?=\s*\()/,greedy:!0,inside:{function:/^#?(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*/,generic:{pattern:/<[\s\S]+/,alias:"class-name",inside:t}}}}),e.languages.ts=e.languages.typescript}(a),function(e){function t(e,t){return RegExp(e.replace(//g,(function(){return/(?!\s)[_$a-zA-Z\xA0-\uFFFF](?:(?!\s)[$\w\xA0-\uFFFF])*/.source})),t)}e.languages.insertBefore("javascript","function-variable",{"method-variable":{pattern:RegExp("(\\.\\s*)"+e.languages.javascript["function-variable"].pattern.source),lookbehind:!0,alias:["function-variable","method","function","property-access"]}}),e.languages.insertBefore("javascript","function",{method:{pattern:RegExp("(\\.\\s*)"+e.languages.javascript.function.source),lookbehind:!0,alias:["function","property-access"]}}),e.languages.insertBefore("javascript","constant",{"known-class-name":[{pattern:/\b(?:(?:Float(?:32|64)|(?:Int|Uint)(?:8|16|32)|Uint8Clamped)?Array|ArrayBuffer|BigInt|Boolean|DataView|Date|Error|Function|Intl|JSON|(?:Weak)?(?:Map|Set)|Math|Number|Object|Promise|Proxy|Reflect|RegExp|String|Symbol|WebAssembly)\b/,alias:"class-name"},{pattern:/\b(?:[A-Z]\w*)Error\b/,alias:"class-name"}]}),e.languages.insertBefore("javascript","keyword",{imports:{pattern:t(/(\bimport\b\s*)(?:(?:\s*,\s*(?:\*\s*as\s+|\{[^{}]*\}))?|\*\s*as\s+|\{[^{}]*\})(?=\s*\bfrom\b)/.source),lookbehind:!0,inside:e.languages.javascript},exports:{pattern:t(/(\bexport\b\s*)(?:\*(?:\s*as\s+)?(?=\s*\bfrom\b)|\{[^{}]*\})/.source),lookbehind:!0,inside:e.languages.javascript}}),e.languages.javascript.keyword.unshift({pattern:/\b(?:as|default|export|from|import)\b/,alias:"module"},{pattern:/\b(?:await|break|catch|continue|do|else|finally|for|if|return|switch|throw|try|while|yield)\b/,alias:"control-flow"},{pattern:/\bnull\b/,alias:["null","nil"]},{pattern:/\bundefined\b/,alias:"nil"}),e.languages.insertBefore("javascript","operator",{spread:{pattern:/\.{3}/,alias:"operator"},arrow:{pattern:/=>/,alias:"operator"}}),e.languages.insertBefore("javascript","punctuation",{"property-access":{pattern:t(/(\.\s*)#?/.source),lookbehind:!0},"maybe-class-name":{pattern:/(^|[^$\w\xA0-\uFFFF])[A-Z][$\w\xA0-\uFFFF]+/,lookbehind:!0},dom:{pattern:/\b(?:document|(?:local|session)Storage|location|navigator|performance|window)\b/,alias:"variable"},console:{pattern:/\bconsole(?=\s*\.)/,alias:"class-name"}});for(var n=["function","function-variable","method","method-variable","property-access"],r=0;r*\.{3}(?:[^{}]|)*\})/.source;function o(e,t){return e=e.replace(//g,(function(){return n})).replace(//g,(function(){return r})).replace(//g,(function(){return a})),RegExp(e,t)}a=o(a).source,e.languages.jsx=e.languages.extend("markup",t),e.languages.jsx.tag.pattern=o(/<\/?(?:[\w.:-]+(?:+(?:[\w.:$-]+(?:=(?:"(?:\\[\s\S]|[^\\"])*"|'(?:\\[\s\S]|[^\\'])*'|[^\s{'"/>=]+|))?|))**\/?)?>/.source),e.languages.jsx.tag.inside.tag.pattern=/^<\/?[^\s>\/]*/,e.languages.jsx.tag.inside["attr-value"].pattern=/=(?!\{)(?:"(?:\\[\s\S]|[^\\"])*"|'(?:\\[\s\S]|[^\\'])*'|[^\s'">]+)/,e.languages.jsx.tag.inside.tag.inside["class-name"]=/^[A-Z]\w*(?:\.[A-Z]\w*)*$/,e.languages.jsx.tag.inside.comment=t.comment,e.languages.insertBefore("inside","attr-name",{spread:{pattern:o(//.source),inside:e.languages.jsx}},e.languages.jsx.tag),e.languages.insertBefore("inside","special-attr",{script:{pattern:o(/=/.source),alias:"language-javascript",inside:{"script-punctuation":{pattern:/^=(?=\{)/,alias:"punctuation"},rest:e.languages.jsx}}},e.languages.jsx.tag);var i=function(e){return e?"string"==typeof e?e:"string"==typeof e.content?e.content:e.content.map(i).join(""):""},l=function(t){for(var n=[],r=0;r0&&n[n.length-1].tagName===i(a.content[0].content[1])&&n.pop():"/>"===a.content[a.content.length-1].content||n.push({tagName:i(a.content[0].content[1]),openedBraces:0}):n.length>0&&"punctuation"===a.type&&"{"===a.content?n[n.length-1].openedBraces++:n.length>0&&n[n.length-1].openedBraces>0&&"punctuation"===a.type&&"}"===a.content?n[n.length-1].openedBraces--:o=!0),(o||"string"==typeof a)&&n.length>0&&0===n[n.length-1].openedBraces){var s=i(a);r0&&("string"==typeof t[r-1]||"plain-text"===t[r-1].type)&&(s=i(t[r-1])+s,t.splice(r-1,1),r--),t[r]=new e.Token("plain-text",s,null,s)}a.content&&"string"!=typeof a.content&&l(a.content)}};e.hooks.add("after-tokenize",(function(e){"jsx"!==e.language&&"tsx"!==e.language||l(e.tokens)}))}(a),function(e){e.languages.diff={coord:[/^(?:\*{3}|-{3}|\+{3}).*$/m,/^@@.*@@$/m,/^\d.*$/m]};var t={"deleted-sign":"-","deleted-arrow":"<","inserted-sign":"+","inserted-arrow":">",unchanged:" ",diff:"!"};Object.keys(t).forEach((function(n){var r=t[n],a=[];/^\w+$/.test(n)||a.push(/\w+/.exec(n)[0]),"diff"===n&&a.push("bold"),e.languages.diff[n]={pattern:RegExp("^(?:["+r+"].*(?:\r\n?|\n|(?![\\s\\S])))+","m"),alias:a,inside:{line:{pattern:/(.)(?=[\s\S]).*(?:\r\n?|\n)?/,lookbehind:!0},prefix:{pattern:/[\s\S]/,alias:/\w+/.exec(n)[0]}}}})),Object.defineProperty(e.languages.diff,"PREFIXES",{value:t})}(a),a.languages.git={comment:/^#.*/m,deleted:/^[-\u2013].*/m,inserted:/^\+.*/m,string:/("|')(?:\\.|(?!\1)[^\\\r\n])*\1/,command:{pattern:/^.*\$ git .*$/m,inside:{parameter:/\s--?\w+/}},coord:/^@@.*@@$/m,"commit-sha1":/^commit \w{40}$/m},a.languages.go=a.languages.extend("clike",{string:{pattern:/(^|[^\\])"(?:\\.|[^"\\\r\n])*"|`[^`]*`/,lookbehind:!0,greedy:!0},keyword:/\b(?:break|case|chan|const|continue|default|defer|else|fallthrough|for|func|go(?:to)?|if|import|interface|map|package|range|return|select|struct|switch|type|var)\b/,boolean:/\b(?:_|false|iota|nil|true)\b/,number:[/\b0(?:b[01_]+|o[0-7_]+)i?\b/i,/\b0x(?:[a-f\d_]+(?:\.[a-f\d_]*)?|\.[a-f\d_]+)(?:p[+-]?\d+(?:_\d+)*)?i?(?!\w)/i,/(?:\b\d[\d_]*(?:\.[\d_]*)?|\B\.\d[\d_]*)(?:e[+-]?[\d_]+)?i?(?!\w)/i],operator:/[*\/%^!=]=?|\+[=+]?|-[=-]?|\|[=|]?|&(?:=|&|\^=?)?|>(?:>=?|=)?|<(?:<=?|=|-)?|:=|\.\.\./,builtin:/\b(?:append|bool|byte|cap|close|complex|complex(?:64|128)|copy|delete|error|float(?:32|64)|u?int(?:8|16|32|64)?|imag|len|make|new|panic|print(?:ln)?|real|recover|rune|string|uintptr)\b/}),a.languages.insertBefore("go","string",{char:{pattern:/'(?:\\.|[^'\\\r\n]){0,10}'/,greedy:!0}}),delete a.languages.go["class-name"],function(e){function t(e,t){return"___"+e.toUpperCase()+t+"___"}Object.defineProperties(e.languages["markup-templating"]={},{buildPlaceholders:{value:function(n,r,a,o){if(n.language===r){var i=n.tokenStack=[];n.code=n.code.replace(a,(function(e){if("function"==typeof o&&!o(e))return e;for(var a,l=i.length;-1!==n.code.indexOf(a=t(r,l));)++l;return i[l]=e,a})),n.grammar=e.languages.markup}}},tokenizePlaceholders:{value:function(n,r){if(n.language===r&&n.tokenStack){n.grammar=e.languages[r];var a=0,o=Object.keys(n.tokenStack);!function i(l){for(var s=0;s=o.length);s++){var u=l[s];if("string"==typeof u||u.content&&"string"==typeof u.content){var c=o[a],d=n.tokenStack[c],f="string"==typeof u?u:u.content,p=t(r,c),h=f.indexOf(p);if(h>-1){++a;var m=f.substring(0,h),g=new e.Token(r,e.tokenize(d,n.grammar),"language-"+r,d),b=f.substring(h+p.length),v=[];m&&v.push.apply(v,i([m])),v.push(g),b&&v.push.apply(v,i([b])),"string"==typeof u?l.splice.apply(l,[s,1].concat(v)):u.content=v}}else u.content&&i(u.content)}return l}(n.tokens)}}}})}(a),function(e){e.languages.handlebars={comment:/\{\{![\s\S]*?\}\}/,delimiter:{pattern:/^\{\{\{?|\}\}\}?$/,alias:"punctuation"},string:/(["'])(?:\\.|(?!\1)[^\\\r\n])*\1/,number:/\b0x[\dA-Fa-f]+\b|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:[Ee][+-]?\d+)?/,boolean:/\b(?:false|true)\b/,block:{pattern:/^(\s*(?:~\s*)?)[#\/]\S+?(?=\s*(?:~\s*)?$|\s)/,lookbehind:!0,alias:"keyword"},brackets:{pattern:/\[[^\]]+\]/,inside:{punctuation:/\[|\]/,variable:/[\s\S]+/}},punctuation:/[!"#%&':()*+,.\/;<=>@\[\\\]^`{|}~]/,variable:/[^!"#%&'()*+,\/;<=>@\[\\\]^`{|}~\s]+/},e.hooks.add("before-tokenize",(function(t){e.languages["markup-templating"].buildPlaceholders(t,"handlebars",/\{\{\{[\s\S]+?\}\}\}|\{\{[\s\S]+?\}\}/g)})),e.hooks.add("after-tokenize",(function(t){e.languages["markup-templating"].tokenizePlaceholders(t,"handlebars")})),e.languages.hbs=e.languages.handlebars}(a),a.languages.json={property:{pattern:/(^|[^\\])"(?:\\.|[^\\"\r\n])*"(?=\s*:)/,lookbehind:!0,greedy:!0},string:{pattern:/(^|[^\\])"(?:\\.|[^\\"\r\n])*"(?!\s*:)/,lookbehind:!0,greedy:!0},comment:{pattern:/\/\/.*|\/\*[\s\S]*?(?:\*\/|$)/,greedy:!0},number:/-?\b\d+(?:\.\d+)?(?:e[+-]?\d+)?\b/i,punctuation:/[{}[\],]/,operator:/:/,boolean:/\b(?:false|true)\b/,null:{pattern:/\bnull\b/,alias:"keyword"}},a.languages.webmanifest=a.languages.json,a.languages.less=a.languages.extend("css",{comment:[/\/\*[\s\S]*?\*\//,{pattern:/(^|[^\\])\/\/.*/,lookbehind:!0}],atrule:{pattern:/@[\w-](?:\((?:[^(){}]|\([^(){}]*\))*\)|[^(){};\s]|\s+(?!\s))*?(?=\s*\{)/,inside:{punctuation:/[:()]/}},selector:{pattern:/(?:@\{[\w-]+\}|[^{};\s@])(?:@\{[\w-]+\}|\((?:[^(){}]|\([^(){}]*\))*\)|[^(){};@\s]|\s+(?!\s))*?(?=\s*\{)/,inside:{variable:/@+[\w-]+/}},property:/(?:@\{[\w-]+\}|[\w-])+(?:\+_?)?(?=\s*:)/,operator:/[+\-*\/]/}),a.languages.insertBefore("less","property",{variable:[{pattern:/@[\w-]+\s*:/,inside:{punctuation:/:/}},/@@?[\w-]+/],"mixin-usage":{pattern:/([{;]\s*)[.#](?!\d)[\w-].*?(?=[(;])/,lookbehind:!0,alias:"function"}}),a.languages.makefile={comment:{pattern:/(^|[^\\])#(?:\\(?:\r\n|[\s\S])|[^\\\r\n])*/,lookbehind:!0},string:{pattern:/(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},"builtin-target":{pattern:/\.[A-Z][^:#=\s]+(?=\s*:(?!=))/,alias:"builtin"},target:{pattern:/^(?:[^:=\s]|[ \t]+(?![\s:]))+(?=\s*:(?!=))/m,alias:"symbol",inside:{variable:/\$+(?:(?!\$)[^(){}:#=\s]+|(?=[({]))/}},variable:/\$+(?:(?!\$)[^(){}:#=\s]+|\([@*%<^+?][DF]\)|(?=[({]))/,keyword:/-include\b|\b(?:define|else|endef|endif|export|ifn?def|ifn?eq|include|override|private|sinclude|undefine|unexport|vpath)\b/,function:{pattern:/(\()(?:abspath|addsuffix|and|basename|call|dir|error|eval|file|filter(?:-out)?|findstring|firstword|flavor|foreach|guile|if|info|join|lastword|load|notdir|or|origin|patsubst|realpath|shell|sort|strip|subst|suffix|value|warning|wildcard|word(?:list|s)?)(?=[ \t])/,lookbehind:!0},operator:/(?:::|[?:+!])?=|[|@]/,punctuation:/[:;(){}]/},a.languages.objectivec=a.languages.extend("c",{string:{pattern:/@?"(?:\\(?:\r\n|[\s\S])|[^"\\\r\n])*"/,greedy:!0},keyword:/\b(?:asm|auto|break|case|char|const|continue|default|do|double|else|enum|extern|float|for|goto|if|in|inline|int|long|register|return|self|short|signed|sizeof|static|struct|super|switch|typedef|typeof|union|unsigned|void|volatile|while)\b|(?:@interface|@end|@implementation|@protocol|@class|@public|@protected|@private|@property|@try|@catch|@finally|@throw|@synthesize|@dynamic|@selector)\b/,operator:/-[->]?|\+\+?|!=?|<>?=?|==?|&&?|\|\|?|[~^%?*\/@]/}),delete a.languages.objectivec["class-name"],a.languages.objc=a.languages.objectivec,a.languages.ocaml={comment:{pattern:/\(\*[\s\S]*?\*\)/,greedy:!0},char:{pattern:/'(?:[^\\\r\n']|\\(?:.|[ox]?[0-9a-f]{1,3}))'/i,greedy:!0},string:[{pattern:/"(?:\\(?:[\s\S]|\r\n)|[^\\\r\n"])*"/,greedy:!0},{pattern:/\{([a-z_]*)\|[\s\S]*?\|\1\}/,greedy:!0}],number:[/\b(?:0b[01][01_]*|0o[0-7][0-7_]*)\b/i,/\b0x[a-f0-9][a-f0-9_]*(?:\.[a-f0-9_]*)?(?:p[+-]?\d[\d_]*)?(?!\w)/i,/\b\d[\d_]*(?:\.[\d_]*)?(?:e[+-]?\d[\d_]*)?(?!\w)/i],directive:{pattern:/\B#\w+/,alias:"property"},label:{pattern:/\B~\w+/,alias:"property"},"type-variable":{pattern:/\B'\w+/,alias:"function"},variant:{pattern:/`\w+/,alias:"symbol"},keyword:/\b(?:as|assert|begin|class|constraint|do|done|downto|else|end|exception|external|for|fun|function|functor|if|in|include|inherit|initializer|lazy|let|match|method|module|mutable|new|nonrec|object|of|open|private|rec|sig|struct|then|to|try|type|val|value|virtual|when|where|while|with)\b/,boolean:/\b(?:false|true)\b/,"operator-like-punctuation":{pattern:/\[[<>|]|[>|]\]|\{<|>\}/,alias:"punctuation"},operator:/\.[.~]|:[=>]|[=<>@^|&+\-*\/$%!?~][!$%&*+\-.\/:<=>?@^|~]*|\b(?:and|asr|land|lor|lsl|lsr|lxor|mod|or)\b/,punctuation:/;;|::|[(){}\[\].,:;#]|\b_\b/},a.languages.python={comment:{pattern:/(^|[^\\])#.*/,lookbehind:!0,greedy:!0},"string-interpolation":{pattern:/(?:f|fr|rf)(?:("""|''')[\s\S]*?\1|("|')(?:\\.|(?!\2)[^\\\r\n])*\2)/i,greedy:!0,inside:{interpolation:{pattern:/((?:^|[^{])(?:\{\{)*)\{(?!\{)(?:[^{}]|\{(?!\{)(?:[^{}]|\{(?!\{)(?:[^{}])+\})+\})+\}/,lookbehind:!0,inside:{"format-spec":{pattern:/(:)[^:(){}]+(?=\}$)/,lookbehind:!0},"conversion-option":{pattern:/![sra](?=[:}]$)/,alias:"punctuation"},rest:null}},string:/[\s\S]+/}},"triple-quoted-string":{pattern:/(?:[rub]|br|rb)?("""|''')[\s\S]*?\1/i,greedy:!0,alias:"string"},string:{pattern:/(?:[rub]|br|rb)?("|')(?:\\.|(?!\1)[^\\\r\n])*\1/i,greedy:!0},function:{pattern:/((?:^|\s)def[ \t]+)[a-zA-Z_]\w*(?=\s*\()/g,lookbehind:!0},"class-name":{pattern:/(\bclass\s+)\w+/i,lookbehind:!0},decorator:{pattern:/(^[\t ]*)@\w+(?:\.\w+)*/m,lookbehind:!0,alias:["annotation","punctuation"],inside:{punctuation:/\./}},keyword:/\b(?:_(?=\s*:)|and|as|assert|async|await|break|case|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|match|nonlocal|not|or|pass|print|raise|return|try|while|with|yield)\b/,builtin:/\b(?:__import__|abs|all|any|apply|ascii|basestring|bin|bool|buffer|bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|complex|delattr|dict|dir|divmod|enumerate|eval|execfile|file|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|intern|isinstance|issubclass|iter|len|list|locals|long|map|max|memoryview|min|next|object|oct|open|ord|pow|property|range|raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|vars|xrange|zip)\b/,boolean:/\b(?:False|None|True)\b/,number:/\b0(?:b(?:_?[01])+|o(?:_?[0-7])+|x(?:_?[a-f0-9])+)\b|(?:\b\d+(?:_\d+)*(?:\.(?:\d+(?:_\d+)*)?)?|\B\.\d+(?:_\d+)*)(?:e[+-]?\d+(?:_\d+)*)?j?(?!\w)/i,operator:/[-+%=]=?|!=|:=|\*\*?=?|\/\/?=?|<[<=>]?|>[=>]?|[&|^~]/,punctuation:/[{}[\];(),.:]/},a.languages.python["string-interpolation"].inside.interpolation.inside.rest=a.languages.python,a.languages.py=a.languages.python,a.languages.reason=a.languages.extend("clike",{string:{pattern:/"(?:\\(?:\r\n|[\s\S])|[^\\\r\n"])*"/,greedy:!0},"class-name":/\b[A-Z]\w*/,keyword:/\b(?:and|as|assert|begin|class|constraint|do|done|downto|else|end|exception|external|for|fun|function|functor|if|in|include|inherit|initializer|lazy|let|method|module|mutable|new|nonrec|object|of|open|or|private|rec|sig|struct|switch|then|to|try|type|val|virtual|when|while|with)\b/,operator:/\.{3}|:[:=]|\|>|->|=(?:==?|>)?|<=?|>=?|[|^?'#!~`]|[+\-*\/]\.?|\b(?:asr|land|lor|lsl|lsr|lxor|mod)\b/}),a.languages.insertBefore("reason","class-name",{char:{pattern:/'(?:\\x[\da-f]{2}|\\o[0-3][0-7][0-7]|\\\d{3}|\\.|[^'\\\r\n])'/,greedy:!0},constructor:/\b[A-Z]\w*\b(?!\s*\.)/,label:{pattern:/\b[a-z]\w*(?=::)/,alias:"symbol"}}),delete a.languages.reason.function,function(e){e.languages.sass=e.languages.extend("css",{comment:{pattern:/^([ \t]*)\/[\/*].*(?:(?:\r?\n|\r)\1[ \t].+)*/m,lookbehind:!0,greedy:!0}}),e.languages.insertBefore("sass","atrule",{"atrule-line":{pattern:/^(?:[ \t]*)[@+=].+/m,greedy:!0,inside:{atrule:/(?:@[\w-]+|[+=])/}}}),delete e.languages.sass.atrule;var t=/\$[-\w]+|#\{\$[-\w]+\}/,n=[/[+*\/%]|[=!]=|<=?|>=?|\b(?:and|not|or)\b/,{pattern:/(\s)-(?=\s)/,lookbehind:!0}];e.languages.insertBefore("sass","property",{"variable-line":{pattern:/^[ \t]*\$.+/m,greedy:!0,inside:{punctuation:/:/,variable:t,operator:n}},"property-line":{pattern:/^[ \t]*(?:[^:\s]+ *:.*|:[^:\s].*)/m,greedy:!0,inside:{property:[/[^:\s]+(?=\s*:)/,{pattern:/(:)[^:\s]+/,lookbehind:!0}],punctuation:/:/,variable:t,operator:n,important:e.languages.sass.important}}}),delete e.languages.sass.property,delete e.languages.sass.important,e.languages.insertBefore("sass","punctuation",{selector:{pattern:/^([ \t]*)\S(?:,[^,\r\n]+|[^,\r\n]*)(?:,[^,\r\n]+)*(?:,(?:\r?\n|\r)\1[ \t]+\S(?:,[^,\r\n]+|[^,\r\n]*)(?:,[^,\r\n]+)*)*/m,lookbehind:!0,greedy:!0}})}(a),a.languages.scss=a.languages.extend("css",{comment:{pattern:/(^|[^\\])(?:\/\*[\s\S]*?\*\/|\/\/.*)/,lookbehind:!0},atrule:{pattern:/@[\w-](?:\([^()]+\)|[^()\s]|\s+(?!\s))*?(?=\s+[{;])/,inside:{rule:/@[\w-]+/}},url:/(?:[-a-z]+-)?url(?=\()/i,selector:{pattern:/(?=\S)[^@;{}()]?(?:[^@;{}()\s]|\s+(?!\s)|#\{\$[-\w]+\})+(?=\s*\{(?:\}|\s|[^}][^:{}]*[:{][^}]))/,inside:{parent:{pattern:/&/,alias:"important"},placeholder:/%[-\w]+/,variable:/\$[-\w]+|#\{\$[-\w]+\}/}},property:{pattern:/(?:[-\w]|\$[-\w]|#\{\$[-\w]+\})+(?=\s*:)/,inside:{variable:/\$[-\w]+|#\{\$[-\w]+\}/}}}),a.languages.insertBefore("scss","atrule",{keyword:[/@(?:content|debug|each|else(?: if)?|extend|for|forward|function|if|import|include|mixin|return|use|warn|while)\b/i,{pattern:/( )(?:from|through)(?= )/,lookbehind:!0}]}),a.languages.insertBefore("scss","important",{variable:/\$[-\w]+|#\{\$[-\w]+\}/}),a.languages.insertBefore("scss","function",{"module-modifier":{pattern:/\b(?:as|hide|show|with)\b/i,alias:"keyword"},placeholder:{pattern:/%[-\w]+/,alias:"selector"},statement:{pattern:/\B!(?:default|optional)\b/i,alias:"keyword"},boolean:/\b(?:false|true)\b/,null:{pattern:/\bnull\b/,alias:"keyword"},operator:{pattern:/(\s)(?:[-+*\/%]|[=!]=|<=?|>=?|and|not|or)(?=\s)/,lookbehind:!0}}),a.languages.scss.atrule.inside.rest=a.languages.scss,function(e){var t={pattern:/(\b\d+)(?:%|[a-z]+)/,lookbehind:!0},n={pattern:/(^|[^\w.-])-?(?:\d+(?:\.\d+)?|\.\d+)/,lookbehind:!0},r={comment:{pattern:/(^|[^\\])(?:\/\*[\s\S]*?\*\/|\/\/.*)/,lookbehind:!0},url:{pattern:/\burl\((["']?).*?\1\)/i,greedy:!0},string:{pattern:/("|')(?:(?!\1)[^\\\r\n]|\\(?:\r\n|[\s\S]))*\1/,greedy:!0},interpolation:null,func:null,important:/\B!(?:important|optional)\b/i,keyword:{pattern:/(^|\s+)(?:(?:else|for|if|return|unless)(?=\s|$)|@[\w-]+)/,lookbehind:!0},hexcode:/#[\da-f]{3,6}/i,color:[/\b(?:AliceBlue|AntiqueWhite|Aqua|Aquamarine|Azure|Beige|Bisque|Black|BlanchedAlmond|Blue|BlueViolet|Brown|BurlyWood|CadetBlue|Chartreuse|Chocolate|Coral|CornflowerBlue|Cornsilk|Crimson|Cyan|DarkBlue|DarkCyan|DarkGoldenRod|DarkGr[ae]y|DarkGreen|DarkKhaki|DarkMagenta|DarkOliveGreen|DarkOrange|DarkOrchid|DarkRed|DarkSalmon|DarkSeaGreen|DarkSlateBlue|DarkSlateGr[ae]y|DarkTurquoise|DarkViolet|DeepPink|DeepSkyBlue|DimGr[ae]y|DodgerBlue|FireBrick|FloralWhite|ForestGreen|Fuchsia|Gainsboro|GhostWhite|Gold|GoldenRod|Gr[ae]y|Green|GreenYellow|HoneyDew|HotPink|IndianRed|Indigo|Ivory|Khaki|Lavender|LavenderBlush|LawnGreen|LemonChiffon|LightBlue|LightCoral|LightCyan|LightGoldenRodYellow|LightGr[ae]y|LightGreen|LightPink|LightSalmon|LightSeaGreen|LightSkyBlue|LightSlateGr[ae]y|LightSteelBlue|LightYellow|Lime|LimeGreen|Linen|Magenta|Maroon|MediumAquaMarine|MediumBlue|MediumOrchid|MediumPurple|MediumSeaGreen|MediumSlateBlue|MediumSpringGreen|MediumTurquoise|MediumVioletRed|MidnightBlue|MintCream|MistyRose|Moccasin|NavajoWhite|Navy|OldLace|Olive|OliveDrab|Orange|OrangeRed|Orchid|PaleGoldenRod|PaleGreen|PaleTurquoise|PaleVioletRed|PapayaWhip|PeachPuff|Peru|Pink|Plum|PowderBlue|Purple|Red|RosyBrown|RoyalBlue|SaddleBrown|Salmon|SandyBrown|SeaGreen|SeaShell|Sienna|Silver|SkyBlue|SlateBlue|SlateGr[ae]y|Snow|SpringGreen|SteelBlue|Tan|Teal|Thistle|Tomato|Transparent|Turquoise|Violet|Wheat|White|WhiteSmoke|Yellow|YellowGreen)\b/i,{pattern:/\b(?:hsl|rgb)\(\s*\d{1,3}\s*,\s*\d{1,3}%?\s*,\s*\d{1,3}%?\s*\)\B|\b(?:hsl|rgb)a\(\s*\d{1,3}\s*,\s*\d{1,3}%?\s*,\s*\d{1,3}%?\s*,\s*(?:0|0?\.\d+|1)\s*\)\B/i,inside:{unit:t,number:n,function:/[\w-]+(?=\()/,punctuation:/[(),]/}}],entity:/\\[\da-f]{1,8}/i,unit:t,boolean:/\b(?:false|true)\b/,operator:[/~|[+!\/%<>?=]=?|[-:]=|\*[*=]?|\.{2,3}|&&|\|\||\B-\B|\b(?:and|in|is(?: a| defined| not|nt)?|not|or)\b/],number:n,punctuation:/[{}()\[\];:,]/};r.interpolation={pattern:/\{[^\r\n}:]+\}/,alias:"variable",inside:{delimiter:{pattern:/^\{|\}$/,alias:"punctuation"},rest:r}},r.func={pattern:/[\w-]+\([^)]*\).*/,inside:{function:/^[^(]+/,rest:r}},e.languages.stylus={"atrule-declaration":{pattern:/(^[ \t]*)@.+/m,lookbehind:!0,inside:{atrule:/^@[\w-]+/,rest:r}},"variable-declaration":{pattern:/(^[ \t]*)[\w$-]+\s*.?=[ \t]*(?:\{[^{}]*\}|\S.*|$)/m,lookbehind:!0,inside:{variable:/^\S+/,rest:r}},statement:{pattern:/(^[ \t]*)(?:else|for|if|return|unless)[ \t].+/m,lookbehind:!0,inside:{keyword:/^\S+/,rest:r}},"property-declaration":{pattern:/((?:^|\{)([ \t]*))(?:[\w-]|\{[^}\r\n]+\})+(?:\s*:\s*|[ \t]+)(?!\s)[^{\r\n]*(?:;|[^{\r\n,]$(?!(?:\r?\n|\r)(?:\{|\2[ \t])))/m,lookbehind:!0,inside:{property:{pattern:/^[^\s:]+/,inside:{interpolation:r.interpolation}},rest:r}},selector:{pattern:/(^[ \t]*)(?:(?=\S)(?:[^{}\r\n:()]|::?[\w-]+(?:\([^)\r\n]*\)|(?![\w-]))|\{[^}\r\n]+\})+)(?:(?:\r?\n|\r)(?:\1(?:(?=\S)(?:[^{}\r\n:()]|::?[\w-]+(?:\([^)\r\n]*\)|(?![\w-]))|\{[^}\r\n]+\})+)))*(?:,$|\{|(?=(?:\r?\n|\r)(?:\{|\1[ \t])))/m,lookbehind:!0,inside:{interpolation:r.interpolation,comment:r.comment,punctuation:/[{},]/}},func:r.func,string:r.string,comment:{pattern:/(^|[^\\])(?:\/\*[\s\S]*?\*\/|\/\/.*)/,lookbehind:!0,greedy:!0},interpolation:r.interpolation,punctuation:/[{}()\[\];:.]/}}(a),function(e){var t=e.util.clone(e.languages.typescript);e.languages.tsx=e.languages.extend("jsx",t),delete e.languages.tsx.parameter,delete e.languages.tsx["literal-property"];var n=e.languages.tsx.tag;n.pattern=RegExp(/(^|[^\w$]|(?=<\/))/.source+"(?:"+n.pattern.source+")",n.pattern.flags),n.lookbehind=!0}(a),a.languages.wasm={comment:[/\(;[\s\S]*?;\)/,{pattern:/;;.*/,greedy:!0}],string:{pattern:/"(?:\\[\s\S]|[^"\\])*"/,greedy:!0},keyword:[{pattern:/\b(?:align|offset)=/,inside:{operator:/=/}},{pattern:/\b(?:(?:f32|f64|i32|i64)(?:\.(?:abs|add|and|ceil|clz|const|convert_[su]\/i(?:32|64)|copysign|ctz|demote\/f64|div(?:_[su])?|eqz?|extend_[su]\/i32|floor|ge(?:_[su])?|gt(?:_[su])?|le(?:_[su])?|load(?:(?:8|16|32)_[su])?|lt(?:_[su])?|max|min|mul|neg?|nearest|or|popcnt|promote\/f32|reinterpret\/[fi](?:32|64)|rem_[su]|rot[lr]|shl|shr_[su]|sqrt|store(?:8|16|32)?|sub|trunc(?:_[su]\/f(?:32|64))?|wrap\/i64|xor))?|memory\.(?:grow|size))\b/,inside:{punctuation:/\./}},/\b(?:anyfunc|block|br(?:_if|_table)?|call(?:_indirect)?|data|drop|elem|else|end|export|func|get_(?:global|local)|global|if|import|local|loop|memory|module|mut|nop|offset|param|result|return|select|set_(?:global|local)|start|table|tee_local|then|type|unreachable)\b/],variable:/\$[\w!#$%&'*+\-./:<=>?@\\^`|~]+/,number:/[+-]?\b(?:\d(?:_?\d)*(?:\.\d(?:_?\d)*)?(?:[eE][+-]?\d(?:_?\d)*)?|0x[\da-fA-F](?:_?[\da-fA-F])*(?:\.[\da-fA-F](?:_?[\da-fA-D])*)?(?:[pP][+-]?\d(?:_?\d)*)?)\b|\binf\b|\bnan(?::0x[\da-fA-F](?:_?[\da-fA-D])*)?\b/,punctuation:/[()]/};const o=a},6043:()=>{Prism.languages.bicep={comment:[{pattern:/(^|[^\\])\/\*[\s\S]*?(?:\*\/|$)/,lookbehind:!0,greedy:!0},{pattern:/(^|[^\\:])\/\/.*/,lookbehind:!0,greedy:!0}],property:[{pattern:/([\r\n][ \t]*)[a-z_]\w*(?=[ \t]*:)/i,lookbehind:!0},{pattern:/([\r\n][ \t]*)'(?:\\.|\$(?!\{)|[^'\\\r\n$])*'(?=[ \t]*:)/,lookbehind:!0,greedy:!0}],string:[{pattern:/'''[^'][\s\S]*?'''/,greedy:!0},{pattern:/(^|[^\\'])'(?:\\.|\$(?!\{)|[^'\\\r\n$])*'/,lookbehind:!0,greedy:!0}],"interpolated-string":{pattern:/(^|[^\\'])'(?:\\.|\$(?:(?!\{)|\{[^{}\r\n]*\})|[^'\\\r\n$])*'/,lookbehind:!0,greedy:!0,inside:{interpolation:{pattern:/\$\{[^{}\r\n]*\}/,inside:{expression:{pattern:/(^\$\{)[\s\S]+(?=\}$)/,lookbehind:!0},punctuation:/^\$\{|\}$/}},string:/[\s\S]+/}},datatype:{pattern:/(\b(?:output|param)\b[ \t]+\w+[ \t]+)\w+\b/,lookbehind:!0,alias:"class-name"},boolean:/\b(?:false|true)\b/,keyword:/\b(?:existing|for|if|in|module|null|output|param|resource|targetScope|var)\b/,decorator:/@\w+\b/,function:/\b[a-z_]\w*(?=[ \t]*\()/i,number:/(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:E[+-]?\d+)?/i,operator:/--|\+\+|\*\*=?|=>|&&=?|\|\|=?|[!=]==|<<=?|>>>?=?|[-+*/%&|^!=<>]=?|\.{3}|\?\?=?|\?\.?|[~:]/,punctuation:/[{}[\];(),.:]/},Prism.languages.bicep["interpolated-string"].inside.interpolation.inside.expression.inside=Prism.languages.bicep},9016:()=>{!function(e){function t(e,t){return e.replace(/<<(\d+)>>/g,(function(e,n){return"(?:"+t[+n]+")"}))}function n(e,n,r){return RegExp(t(e,n),r||"")}function r(e,t){for(var n=0;n>/g,(function(){return"(?:"+e+")"}));return e.replace(/<>/g,"[^\\s\\S]")}var a="bool byte char decimal double dynamic float int long object sbyte short string uint ulong ushort var void",o="class enum interface record struct",i="add alias and ascending async await by descending from(?=\\s*(?:\\w|$)) get global group into init(?=\\s*;) join let nameof not notnull on or orderby partial remove select set unmanaged value when where with(?=\\s*{)",l="abstract as base break case catch checked const continue default delegate do else event explicit extern finally fixed for foreach goto if implicit in internal is lock namespace new null operator out override params private protected public readonly ref return sealed sizeof stackalloc static switch this throw try typeof unchecked unsafe using virtual volatile while yield";function s(e){return"\\b(?:"+e.trim().replace(/ /g,"|")+")\\b"}var u=s(o),c=RegExp(s(a+" "+o+" "+i+" "+l)),d=s(o+" "+i+" "+l),f=s(a+" "+o+" "+l),p=r(/<(?:[^<>;=+\-*/%&|^]|<>)*>/.source,2),h=r(/\((?:[^()]|<>)*\)/.source,2),m=/@?\b[A-Za-z_]\w*\b/.source,g=t(/<<0>>(?:\s*<<1>>)?/.source,[m,p]),b=t(/(?!<<0>>)<<1>>(?:\s*\.\s*<<1>>)*/.source,[d,g]),v=/\[\s*(?:,\s*)*\]/.source,y=t(/<<0>>(?:\s*(?:\?\s*)?<<1>>)*(?:\s*\?)?/.source,[b,v]),w=t(/[^,()<>[\];=+\-*/%&|^]|<<0>>|<<1>>|<<2>>/.source,[p,h,v]),k=t(/\(<<0>>+(?:,<<0>>+)+\)/.source,[w]),E=t(/(?:<<0>>|<<1>>)(?:\s*(?:\?\s*)?<<2>>)*(?:\s*\?)?/.source,[k,b,v]),S={keyword:c,punctuation:/[<>()?,.:[\]]/},x=/'(?:[^\r\n'\\]|\\.|\\[Uux][\da-fA-F]{1,8})'/.source,_=/"(?:\\.|[^\\"\r\n])*"/.source,C=/@"(?:""|\\[\s\S]|[^\\"])*"(?!")/.source;e.languages.csharp=e.languages.extend("clike",{string:[{pattern:n(/(^|[^$\\])<<0>>/.source,[C]),lookbehind:!0,greedy:!0},{pattern:n(/(^|[^@$\\])<<0>>/.source,[_]),lookbehind:!0,greedy:!0}],"class-name":[{pattern:n(/(\busing\s+static\s+)<<0>>(?=\s*;)/.source,[b]),lookbehind:!0,inside:S},{pattern:n(/(\busing\s+<<0>>\s*=\s*)<<1>>(?=\s*;)/.source,[m,E]),lookbehind:!0,inside:S},{pattern:n(/(\busing\s+)<<0>>(?=\s*=)/.source,[m]),lookbehind:!0},{pattern:n(/(\b<<0>>\s+)<<1>>/.source,[u,g]),lookbehind:!0,inside:S},{pattern:n(/(\bcatch\s*\(\s*)<<0>>/.source,[b]),lookbehind:!0,inside:S},{pattern:n(/(\bwhere\s+)<<0>>/.source,[m]),lookbehind:!0},{pattern:n(/(\b(?:is(?:\s+not)?|as)\s+)<<0>>/.source,[y]),lookbehind:!0,inside:S},{pattern:n(/\b<<0>>(?=\s+(?!<<1>>|with\s*\{)<<2>>(?:\s*[=,;:{)\]]|\s+(?:in|when)\b))/.source,[E,f,m]),inside:S}],keyword:c,number:/(?:\b0(?:x[\da-f_]*[\da-f]|b[01_]*[01])|(?:\B\.\d+(?:_+\d+)*|\b\d+(?:_+\d+)*(?:\.\d+(?:_+\d+)*)?)(?:e[-+]?\d+(?:_+\d+)*)?)(?:[dflmu]|lu|ul)?\b/i,operator:/>>=?|<<=?|[-=]>|([-+&|])\1|~|\?\?=?|[-+*/%&|^!=<>]=?/,punctuation:/\?\.?|::|[{}[\];(),.:]/}),e.languages.insertBefore("csharp","number",{range:{pattern:/\.\./,alias:"operator"}}),e.languages.insertBefore("csharp","punctuation",{"named-parameter":{pattern:n(/([(,]\s*)<<0>>(?=\s*:)/.source,[m]),lookbehind:!0,alias:"punctuation"}}),e.languages.insertBefore("csharp","class-name",{namespace:{pattern:n(/(\b(?:namespace|using)\s+)<<0>>(?:\s*\.\s*<<0>>)*(?=\s*[;{])/.source,[m]),lookbehind:!0,inside:{punctuation:/\./}},"type-expression":{pattern:n(/(\b(?:default|sizeof|typeof)\s*\(\s*(?!\s))(?:[^()\s]|\s(?!\s)|<<0>>)*(?=\s*\))/.source,[h]),lookbehind:!0,alias:"class-name",inside:S},"return-type":{pattern:n(/<<0>>(?=\s+(?:<<1>>\s*(?:=>|[({]|\.\s*this\s*\[)|this\s*\[))/.source,[E,b]),inside:S,alias:"class-name"},"constructor-invocation":{pattern:n(/(\bnew\s+)<<0>>(?=\s*[[({])/.source,[E]),lookbehind:!0,inside:S,alias:"class-name"},"generic-method":{pattern:n(/<<0>>\s*<<1>>(?=\s*\()/.source,[m,p]),inside:{function:n(/^<<0>>/.source,[m]),generic:{pattern:RegExp(p),alias:"class-name",inside:S}}},"type-list":{pattern:n(/\b((?:<<0>>\s+<<1>>|record\s+<<1>>\s*<<5>>|where\s+<<2>>)\s*:\s*)(?:<<3>>|<<4>>|<<1>>\s*<<5>>|<<6>>)(?:\s*,\s*(?:<<3>>|<<4>>|<<6>>))*(?=\s*(?:where|[{;]|=>|$))/.source,[u,g,m,E,c.source,h,/\bnew\s*\(\s*\)/.source]),lookbehind:!0,inside:{"record-arguments":{pattern:n(/(^(?!new\s*\()<<0>>\s*)<<1>>/.source,[g,h]),lookbehind:!0,greedy:!0,inside:e.languages.csharp},keyword:c,"class-name":{pattern:RegExp(E),greedy:!0,inside:S},punctuation:/[,()]/}},preprocessor:{pattern:/(^[\t ]*)#.*/m,lookbehind:!0,alias:"property",inside:{directive:{pattern:/(#)\b(?:define|elif|else|endif|endregion|error|if|line|nullable|pragma|region|undef|warning)\b/,lookbehind:!0,alias:"keyword"}}}});var T=_+"|"+x,L=t(/\/(?![*/])|\/\/[^\r\n]*[\r\n]|\/\*(?:[^*]|\*(?!\/))*\*\/|<<0>>/.source,[T]),P=r(t(/[^"'/()]|<<0>>|\(<>*\)/.source,[L]),2),O=/\b(?:assembly|event|field|method|module|param|property|return|type)\b/.source,N=t(/<<0>>(?:\s*\(<<1>>*\))?/.source,[b,P]);e.languages.insertBefore("csharp","class-name",{attribute:{pattern:n(/((?:^|[^\s\w>)?])\s*\[\s*)(?:<<0>>\s*:\s*)?<<1>>(?:\s*,\s*<<1>>)*(?=\s*\])/.source,[O,N]),lookbehind:!0,greedy:!0,inside:{target:{pattern:n(/^<<0>>(?=\s*:)/.source,[O]),alias:"keyword"},"attribute-arguments":{pattern:n(/\(<<0>>*\)/.source,[P]),inside:e.languages.csharp},"class-name":{pattern:RegExp(b),inside:{punctuation:/\./}},punctuation:/[:,]/}}});var A=/:[^}\r\n]+/.source,I=r(t(/[^"'/()]|<<0>>|\(<>*\)/.source,[L]),2),R=t(/\{(?!\{)(?:(?![}:])<<0>>)*<<1>>?\}/.source,[I,A]),D=r(t(/[^"'/()]|\/(?!\*)|\/\*(?:[^*]|\*(?!\/))*\*\/|<<0>>|\(<>*\)/.source,[T]),2),M=t(/\{(?!\{)(?:(?![}:])<<0>>)*<<1>>?\}/.source,[D,A]);function F(t,r){return{interpolation:{pattern:n(/((?:^|[^{])(?:\{\{)*)<<0>>/.source,[t]),lookbehind:!0,inside:{"format-string":{pattern:n(/(^\{(?:(?![}:])<<0>>)*)<<1>>(?=\}$)/.source,[r,A]),lookbehind:!0,inside:{punctuation:/^:/}},punctuation:/^\{|\}$/,expression:{pattern:/[\s\S]+/,alias:"language-csharp",inside:e.languages.csharp}}},string:/[\s\S]+/}}e.languages.insertBefore("csharp","string",{"interpolation-string":[{pattern:n(/(^|[^\\])(?:\$@|@\$)"(?:""|\\[\s\S]|\{\{|<<0>>|[^\\{"])*"/.source,[R]),lookbehind:!0,greedy:!0,inside:F(R,I)},{pattern:n(/(^|[^@\\])\$"(?:\\.|\{\{|<<0>>|[^\\"{])*"/.source,[M]),lookbehind:!0,greedy:!0,inside:F(M,D)}],char:{pattern:RegExp(x),greedy:!0}}),e.languages.dotnet=e.languages.cs=e.languages.csharp}(Prism)},7158:()=>{!function(e){var t=/\\[\r\n](?:\s|\\[\r\n]|#.*(?!.))*(?![\s#]|\\[\r\n])/.source,n=/(?:[ \t]+(?![ \t])(?:)?|)/.source.replace(//g,(function(){return t})),r=/"(?:[^"\\\r\n]|\\(?:\r\n|[\s\S]))*"|'(?:[^'\\\r\n]|\\(?:\r\n|[\s\S]))*'/.source,a=/--[\w-]+=(?:|(?!["'])(?:[^\s\\]|\\.)+)/.source.replace(//g,(function(){return r})),o={pattern:RegExp(r),greedy:!0},i={pattern:/(^[ \t]*)#.*/m,lookbehind:!0,greedy:!0};function l(e,t){return e=e.replace(//g,(function(){return a})).replace(//g,(function(){return n})),RegExp(e,t)}e.languages.docker={instruction:{pattern:/(^[ \t]*)(?:ADD|ARG|CMD|COPY|ENTRYPOINT|ENV|EXPOSE|FROM|HEALTHCHECK|LABEL|MAINTAINER|ONBUILD|RUN|SHELL|STOPSIGNAL|USER|VOLUME|WORKDIR)(?=\s)(?:\\.|[^\r\n\\])*(?:\\$(?:\s|#.*$)*(?![\s#])(?:\\.|[^\r\n\\])*)*/im,lookbehind:!0,greedy:!0,inside:{options:{pattern:l(/(^(?:ONBUILD)?\w+)(?:)*/.source,"i"),lookbehind:!0,greedy:!0,inside:{property:{pattern:/(^|\s)--[\w-]+/,lookbehind:!0},string:[o,{pattern:/(=)(?!["'])(?:[^\s\\]|\\.)+/,lookbehind:!0}],operator:/\\$/m,punctuation:/=/}},keyword:[{pattern:l(/(^(?:ONBUILD)?HEALTHCHECK(?:)*)(?:CMD|NONE)\b/.source,"i"),lookbehind:!0,greedy:!0},{pattern:l(/(^(?:ONBUILD)?FROM(?:)*(?!--)[^ \t\\]+)AS/.source,"i"),lookbehind:!0,greedy:!0},{pattern:l(/(^ONBUILD)\w+/.source,"i"),lookbehind:!0,greedy:!0},{pattern:/^\w+/,greedy:!0}],comment:i,string:o,variable:/\$(?:\w+|\{[^{}"'\\]*\})/,operator:/\\$/m}},comment:i},e.languages.dockerfile=e.languages.docker}(Prism)},6862:()=>{!function(e){var t=e.languages.powershell={comment:[{pattern:/(^|[^`])<#[\s\S]*?#>/,lookbehind:!0},{pattern:/(^|[^`])#.*/,lookbehind:!0}],string:[{pattern:/"(?:`[\s\S]|[^`"])*"/,greedy:!0,inside:null},{pattern:/'(?:[^']|'')*'/,greedy:!0}],namespace:/\[[a-z](?:\[(?:\[[^\]]*\]|[^\[\]])*\]|[^\[\]])*\]/i,boolean:/\$(?:false|true)\b/i,variable:/\$\w+\b/,function:[/\b(?:Add|Approve|Assert|Backup|Block|Checkpoint|Clear|Close|Compare|Complete|Compress|Confirm|Connect|Convert|ConvertFrom|ConvertTo|Copy|Debug|Deny|Disable|Disconnect|Dismount|Edit|Enable|Enter|Exit|Expand|Export|Find|ForEach|Format|Get|Grant|Group|Hide|Import|Initialize|Install|Invoke|Join|Limit|Lock|Measure|Merge|Move|New|Open|Optimize|Out|Ping|Pop|Protect|Publish|Push|Read|Receive|Redo|Register|Remove|Rename|Repair|Request|Reset|Resize|Resolve|Restart|Restore|Resume|Revoke|Save|Search|Select|Send|Set|Show|Skip|Sort|Split|Start|Step|Stop|Submit|Suspend|Switch|Sync|Tee|Test|Trace|Unblock|Undo|Uninstall|Unlock|Unprotect|Unpublish|Unregister|Update|Use|Wait|Watch|Where|Write)-[a-z]+\b/i,/\b(?:ac|cat|chdir|clc|cli|clp|clv|compare|copy|cp|cpi|cpp|cvpa|dbp|del|diff|dir|ebp|echo|epal|epcsv|epsn|erase|fc|fl|ft|fw|gal|gbp|gc|gci|gcs|gdr|gi|gl|gm|gp|gps|group|gsv|gu|gv|gwmi|iex|ii|ipal|ipcsv|ipsn|irm|iwmi|iwr|kill|lp|ls|measure|mi|mount|move|mp|mv|nal|ndr|ni|nv|ogv|popd|ps|pushd|pwd|rbp|rd|rdr|ren|ri|rm|rmdir|rni|rnp|rp|rv|rvpa|rwmi|sal|saps|sasv|sbp|sc|select|set|shcm|si|sl|sleep|sls|sort|sp|spps|spsv|start|sv|swmi|tee|trcm|type|write)\b/i],keyword:/\b(?:Begin|Break|Catch|Class|Continue|Data|Define|Do|DynamicParam|Else|ElseIf|End|Exit|Filter|Finally|For|ForEach|From|Function|If|InlineScript|Parallel|Param|Process|Return|Sequence|Switch|Throw|Trap|Try|Until|Using|Var|While|Workflow)\b/i,operator:{pattern:/(^|\W)(?:!|-(?:b?(?:and|x?or)|as|(?:Not)?(?:Contains|In|Like|Match)|eq|ge|gt|is(?:Not)?|Join|le|lt|ne|not|Replace|sh[lr])\b|-[-=]?|\+[+=]?|[*\/%]=?)/i,lookbehind:!0},punctuation:/[|{}[\];(),.]/};t.string[0].inside={function:{pattern:/(^|[^`])\$\((?:\$\([^\r\n()]*\)|(?!\$\()[^\r\n)])*\)/,lookbehind:!0,inside:t},boolean:t.boolean,variable:t.variable}}(Prism)},3476:(e,t,n)=>{var r={"./prism-bicep":6043,"./prism-csharp":9016,"./prism-docker":7158,"./prism-powershell":6862};function a(e){var t=o(e);return n(t)}function o(e){if(!n.o(r,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return r[e]}a.keys=function(){return Object.keys(r)},a.resolve=o,e.exports=a,a.id=3476},2703:(e,t,n)=>{"use strict";var r=n(414);function a(){}function o(){}o.resetWarningCache=a,e.exports=function(){function e(e,t,n,a,o,i){if(i!==r){var l=new Error("Calling PropTypes validators directly is not supported by the `prop-types` package. Use PropTypes.checkPropTypes() to call them. Read more at http://fb.me/use-check-prop-types");throw l.name="Invariant Violation",l}}function t(){return e}e.isRequired=e;var n={array:e,bigint:e,bool:e,func:e,number:e,object:e,string:e,symbol:e,any:e,arrayOf:t,element:e,elementType:e,instanceOf:t,node:e,objectOf:t,oneOf:t,oneOfType:t,shape:t,exact:t,checkPropTypes:o,resetWarningCache:a};return n.PropTypes=n,n}},5697:(e,t,n)=>{e.exports=n(2703)()},414:e=>{"use strict";e.exports="SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED"},4448:(e,t,n)=>{"use strict";var r=n(7294),a=n(7418),o=n(3840);function i(e){for(var t="https://reactjs.org/docs/error-decoder.html?invariant="+e,n=1;n