mirror of
https://github.com/dragonchain/dragonchain.git
synced 2025-08-15 00:03:18 -04:00
Compare commits
30 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
1a25eda598 | ||
|
495130a40f | ||
|
a7151a6520 | ||
|
a184a850a9 | ||
|
a1a8bb887b | ||
|
4579be1fcd | ||
|
24a28ac609 | ||
|
0d3fb63500 | ||
|
0f60f78115 | ||
|
eb432480e9 | ||
|
34d34e344b | ||
|
53ba26d629 | ||
|
5ae03cc0b2 | ||
|
f1d7a88b48 | ||
|
7bf7b756e0 | ||
|
b817ea1de6 | ||
|
0aa8658c40 | ||
|
cbd7f0dea8 | ||
|
d3f1f808dc | ||
|
7855f47432 | ||
|
9cc1439227 | ||
|
7d3f59d756 | ||
|
9f84f21f6c | ||
|
55b6fe4256 | ||
|
9103ee4c42 | ||
|
8e68babfd7 | ||
|
6dc803cb27 | ||
|
f7e2227791 | ||
|
2be3d2bcd3 | ||
|
364fe2ee05 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -11,6 +11,7 @@ coverage.xml
|
|||||||
.envrc
|
.envrc
|
||||||
.direnv
|
.direnv
|
||||||
.venv
|
.venv
|
||||||
|
venv
|
||||||
|
|
||||||
# Installer logs
|
# Installer logs
|
||||||
pip-log.txt
|
pip-log.txt
|
||||||
|
24
.vscode/settings.json
vendored
24
.vscode/settings.json
vendored
@ -1,16 +1,12 @@
|
|||||||
{
|
{
|
||||||
"files.watcherExclude": {
|
"files.watcherExclude": {
|
||||||
"**/.mypy_cache": true,
|
"**/.mypy_cache": true,
|
||||||
"**/.venv": true
|
"**/.venv": true
|
||||||
},
|
},
|
||||||
"python.linting.flake8Enabled": true,
|
"python.linting.flake8Enabled": true,
|
||||||
"python.formatting.provider": "black",
|
"python.formatting.provider": "black",
|
||||||
"python.formatting.blackArgs": [
|
"python.formatting.blackArgs": ["-l", "150", "-t", "py38"],
|
||||||
"-l",
|
"editor.formatOnSave": true,
|
||||||
"150",
|
"restructuredtext.confPath": "${workspaceFolder}/docs",
|
||||||
"-t",
|
"cSpell.words": ["Dragonchain"]
|
||||||
"py38"
|
|
||||||
],
|
|
||||||
"editor.formatOnSave": true,
|
|
||||||
"restructuredtext.confPath": "${workspaceFolder}/docs"
|
|
||||||
}
|
}
|
||||||
|
21
CHANGELOG.md
21
CHANGELOG.md
@ -1,5 +1,26 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 4.5.1
|
||||||
|
|
||||||
|
- **Bugs**
|
||||||
|
- Fixed bug in querying transactions that takes longer than `10s` result in replay attack error
|
||||||
|
- Fixed bug with retrieving transactions with payloads larger than `1mb` results in S3 Select error `OverMaxRecordSize`
|
||||||
|
|
||||||
|
## 4.5.0
|
||||||
|
|
||||||
|
- **Feature:**
|
||||||
|
- Add new endpoint `GET /v1/verifications/interchains/<block_id>` for getting the subsequent interchain broadcasts
|
||||||
|
- **Packaging:**
|
||||||
|
- Update web3, docker, bit, requests, fastjsonschema, kubernetes, redis, redisearch, Flask, pycoin, base58, and boto3 dependencies
|
||||||
|
- Update redisearch in helm chart to 1.6.13
|
||||||
|
- Update redis in helm chart to 6.0.5
|
||||||
|
- **Bugs**
|
||||||
|
- Fixed bug in broadcast processor causing failure when a block cannot be found in redis
|
||||||
|
- Fixed bug in querying interchain broadcasts when the old L5 chain ID is a UUID and not public key
|
||||||
|
- Fixed bug in re-indexing of L5 verifications where redisearch throws document exists error
|
||||||
|
- **Development:**
|
||||||
|
- Use helm 3.2.4 and yq 3.3.2 for dependency container and lint checking
|
||||||
|
|
||||||
## 4.4.0
|
## 4.4.0
|
||||||
|
|
||||||
- **Feature:**
|
- **Feature:**
|
||||||
|
@ -17,9 +17,9 @@ RUN python3 -m pip install --no-cache-dir --upgrade -r dev_requirements.txt
|
|||||||
|
|
||||||
# Install Helm for chart linting and/or yq for doc builds if it doesn't exist
|
# Install Helm for chart linting and/or yq for doc builds if it doesn't exist
|
||||||
RUN if ! command -v helm; then \
|
RUN if ! command -v helm; then \
|
||||||
wget -O helm-v3.0.2-linux-amd64.tar.gz 'https://get.helm.sh/helm-v3.0.2-linux-amd64.tar.gz' && \
|
wget -O helm-v3.2.4-linux-amd64.tar.gz 'https://get.helm.sh/helm-v3.2.4-linux-amd64.tar.gz' && \
|
||||||
tar xzf helm-v3.0.2-linux-amd64.tar.gz && mv linux-amd64/helm /usr/local/bin/helm && \
|
tar xzf helm-v3.2.4-linux-amd64.tar.gz && mv linux-amd64/helm /usr/local/bin/helm && \
|
||||||
rm -rf helm-v3.0.2-linux-amd64.tar.gz linux-amd64; fi && \
|
rm -rf helm-v3.2.4-linux-amd64.tar.gz linux-amd64; fi && \
|
||||||
if ! command -v yq; then \
|
if ! command -v yq; then \
|
||||||
wget -O yq 'https://github.com/mikefarah/yq/releases/download/2.4.1/yq_linux_amd64' && \
|
wget -O yq 'https://github.com/mikefarah/yq/releases/download/3.3.2/yq_linux_amd64' && \
|
||||||
chmod +x yq && mv yq /usr/local/bin/; fi
|
chmod +x yq && mv yq /usr/local/bin/; fi
|
||||||
|
@ -17,9 +17,9 @@ RUN python3 -m pip install --no-cache-dir --upgrade -r dev_requirements.txt
|
|||||||
|
|
||||||
# Install Helm for chart linting and/or yq for doc builds if it doesn't exist
|
# Install Helm for chart linting and/or yq for doc builds if it doesn't exist
|
||||||
RUN if ! command -v helm; then \
|
RUN if ! command -v helm; then \
|
||||||
wget -O helm-v3.0.2-linux-arm64.tar.gz 'https://get.helm.sh/helm-v3.0.2-linux-arm64.tar.gz' && \
|
wget -O helm-v3.2.4-linux-arm64.tar.gz 'https://get.helm.sh/helm-v3.2.4-linux-arm64.tar.gz' && \
|
||||||
tar xzf helm-v3.0.2-linux-arm64.tar.gz && mv linux-arm64/helm /usr/local/bin/helm && \
|
tar xzf helm-v3.2.4-linux-arm64.tar.gz && mv linux-arm64/helm /usr/local/bin/helm && \
|
||||||
rm -rf helm-v3.0.2-linux-arm64.tar.gz linux-arm64; fi && \
|
rm -rf helm-v3.2.4-linux-arm64.tar.gz linux-arm64; fi && \
|
||||||
if ! command -v yq; then \
|
if ! command -v yq; then \
|
||||||
wget -O yq 'https://github.com/mikefarah/yq/releases/download/2.4.1/yq_linux_arm64' && \
|
wget -O yq 'https://github.com/mikefarah/yq/releases/download/3.3.2/yq_linux_arm64' && \
|
||||||
chmod +x yq && mv yq /usr/local/bin/; fi
|
chmod +x yq && mv yq /usr/local/bin/; fi
|
||||||
|
@ -17,11 +17,11 @@ RUN python3 -m pip install --no-cache-dir --upgrade -r dev_requirements.txt
|
|||||||
|
|
||||||
# Install Helm for chart linting and/or yq for doc builds if it doesn't exist
|
# Install Helm for chart linting and/or yq for doc builds if it doesn't exist
|
||||||
RUN if ! command -v helm; then \
|
RUN if ! command -v helm; then \
|
||||||
wget -O helm-v3.0.2-linux-amd64.tar.gz 'https://get.helm.sh/helm-v3.0.2-linux-amd64.tar.gz' && \
|
wget -O helm-v3.2.4-linux-amd64.tar.gz 'https://get.helm.sh/helm-v3.2.4-linux-amd64.tar.gz' && \
|
||||||
tar xzf helm-v3.0.2-linux-amd64.tar.gz && mv linux-amd64/helm /usr/local/bin/helm && \
|
tar xzf helm-v3.2.4-linux-amd64.tar.gz && mv linux-amd64/helm /usr/local/bin/helm && \
|
||||||
rm -rf helm-v3.0.2-linux-amd64.tar.gz linux-amd64; fi && \
|
rm -rf helm-v3.2.4-linux-amd64.tar.gz linux-amd64; fi && \
|
||||||
if ! command -v yq; then \
|
if ! command -v yq; then \
|
||||||
wget -O yq 'https://github.com/mikefarah/yq/releases/download/2.4.1/yq_linux_amd64' && \
|
wget -O yq 'https://github.com/mikefarah/yq/releases/download/3.3.2/yq_linux_amd64' && \
|
||||||
chmod +x yq && mv yq /usr/local/bin/; fi
|
chmod +x yq && mv yq /usr/local/bin/; fi
|
||||||
|
|
||||||
# Copy our actual application
|
# Copy our actual application
|
||||||
|
@ -17,11 +17,11 @@ RUN python3 -m pip install --no-cache-dir --upgrade -r dev_requirements.txt
|
|||||||
|
|
||||||
# Install Helm for chart linting and/or yq for doc builds if it doesn't exist
|
# Install Helm for chart linting and/or yq for doc builds if it doesn't exist
|
||||||
RUN if ! command -v helm; then \
|
RUN if ! command -v helm; then \
|
||||||
wget -O helm-v3.0.2-linux-arm64.tar.gz 'https://get.helm.sh/helm-v3.0.2-linux-arm64.tar.gz' && \
|
wget -O helm-v3.2.4-linux-arm64.tar.gz 'https://get.helm.sh/helm-v3.2.4-linux-arm64.tar.gz' && \
|
||||||
tar xzf helm-v3.0.2-linux-arm64.tar.gz && mv linux-arm64/helm /usr/local/bin/helm && \
|
tar xzf helm-v3.2.4-linux-arm64.tar.gz && mv linux-arm64/helm /usr/local/bin/helm && \
|
||||||
rm -rf helm-v3.0.2-linux-arm64.tar.gz linux-arm64; fi && \
|
rm -rf helm-v3.2.4-linux-arm64.tar.gz linux-arm64; fi && \
|
||||||
if ! command -v yq; then \
|
if ! command -v yq; then \
|
||||||
wget -O yq 'https://github.com/mikefarah/yq/releases/download/2.4.1/yq_linux_arm64' && \
|
wget -O yq 'https://github.com/mikefarah/yq/releases/download/3.3.2/yq_linux_arm64' && \
|
||||||
chmod +x yq && mv yq /usr/local/bin/; fi
|
chmod +x yq && mv yq /usr/local/bin/; fi
|
||||||
|
|
||||||
# Copy our actual application
|
# Copy our actual application
|
||||||
|
@ -72,7 +72,7 @@ Once the values are set, install the helm chart with:
|
|||||||
```sh
|
```sh
|
||||||
helm repo add dragonchain https://dragonchain-charts.s3.amazonaws.com
|
helm repo add dragonchain https://dragonchain-charts.s3.amazonaws.com
|
||||||
helm repo update
|
helm repo update
|
||||||
helm upgrade --install my-dragonchain --values opensource-config.yaml --namespace dragonchain dragonchain/dragonchain-k8s --version 1.0.8
|
helm upgrade --install my-dragonchain --values opensource-config.yaml --namespace dragonchain dragonchain/dragonchain-k8s --version 1.0.9
|
||||||
```
|
```
|
||||||
|
|
||||||
If you need to change any values AFTER the helm chart has already been
|
If you need to change any values AFTER the helm chart has already been
|
||||||
|
@ -1,18 +1,9 @@
|
|||||||
# Community
|
# Community
|
||||||
|
|
||||||
Dragonchain has a slack for technical discussion and support.
|
If you need technical support please email info@dragonchain.com.
|
||||||
|
|
||||||
We highly recommend joining if you want to participate in technical discussion
|
For general Dragonchain (company/business) chat please join us in the [Dragonchain Lair](https://den.social/l/Dragonchain/) on Den.social.
|
||||||
to try to understand or contribute to dragonchain, or need technical support.
|
|
||||||
|
|
||||||
Please note that this slack is for technical purposes only, and not general
|
## More Information
|
||||||
Dragonchain (company/business) chat. For that, please use the
|
|
||||||
[Dragonchain Telegram](https://t.me/dragontalk).
|
|
||||||
|
|
||||||
## Form
|
More information about Dragonchain [can be found here](https://docs.dragonchain.com/).
|
||||||
|
|
||||||
If you are interested in joining this slack, please fill out
|
|
||||||
[this form](https://forms.gle/ec7sACnfnpLCv6tXA).
|
|
||||||
|
|
||||||
After submitting the form, we will review the application and send an
|
|
||||||
invitation via email.
|
|
||||||
|
@ -99,7 +99,7 @@ their operation type, and whether or not their endpoint permission object has a
|
|||||||
custom schema:
|
custom schema:
|
||||||
|
|
||||||
| API Resource | Endpoint Name | Operation Type | Endpoint Schema |
|
| API Resource | Endpoint Name | Operation Type | Endpoint Schema |
|
||||||
| ------------------- | -------------------------------------- | -------------- | --------------- |
|
|---------------------|----------------------------------------|----------------|-----------------|
|
||||||
| `api_keys` | `create_api_key` | `create` | default |
|
| `api_keys` | `create_api_key` | `create` | default |
|
||||||
| `api_keys` | `get_api_key` | `read` | default |
|
| `api_keys` | `get_api_key` | `read` | default |
|
||||||
| `api_keys` | `list_api_keys` | `read` | default |
|
| `api_keys` | `list_api_keys` | `read` | default |
|
||||||
@ -136,6 +136,7 @@ custom schema:
|
|||||||
| `transactions` | `get_transaction` | `read` | default |
|
| `transactions` | `get_transaction` | `read` | default |
|
||||||
| `verifications` | `get_verifications` | `read` | default |
|
| `verifications` | `get_verifications` | `read` | default |
|
||||||
| `verifications` | `get_pending_verifications` | `read` | default |
|
| `verifications` | `get_pending_verifications` | `read` | default |
|
||||||
|
| `verifications` | `query_interchain_verifications` | `read` | default |
|
||||||
|
|
||||||
### Custom Endpoint Permissions
|
### Custom Endpoint Permissions
|
||||||
|
|
||||||
|
@ -208,7 +208,7 @@ def schedule_notification_for_broadcast_sync(notification_location: str) -> None
|
|||||||
|
|
||||||
|
|
||||||
def verification_storage_location(l1_block_id: str, level_received_from: int, chain_id: str) -> str:
|
def verification_storage_location(l1_block_id: str, level_received_from: int, chain_id: str) -> str:
|
||||||
""" Format the path for the storage of a verification object
|
"""Format the path for the storage of a verification object
|
||||||
Args:
|
Args:
|
||||||
l1_block_id: the id of the L1 block which this verification verifies
|
l1_block_id: the id of the L1 block which this verification verifies
|
||||||
level_received_from: the level from which this verification was received
|
level_received_from: the level from which this verification was received
|
||||||
|
@ -199,7 +199,7 @@ async def process_verification_notifications(session: aiohttp.ClientSession) ->
|
|||||||
async def send_notification_verification(
|
async def send_notification_verification(
|
||||||
session: aiohttp.ClientSession, url: str, verification_bytes: bytes, signature: str, redis_list_value: str
|
session: aiohttp.ClientSession, url: str, verification_bytes: bytes, signature: str, redis_list_value: str
|
||||||
) -> None:
|
) -> None:
|
||||||
""" Send a notification verification to a preconfigured address
|
"""Send a notification verification to a preconfigured address
|
||||||
|
|
||||||
This is the actual async broadcast of a single notification at its most atomic
|
This is the actual async broadcast of a single notification at its most atomic
|
||||||
|
|
||||||
@ -223,7 +223,7 @@ async def send_notification_verification(
|
|||||||
)
|
)
|
||||||
_log.debug(f"Notification <- {resp.status} {url}")
|
_log.debug(f"Notification <- {resp.status} {url}")
|
||||||
except Exception:
|
except Exception:
|
||||||
_log.exception(f"Unable to send verification notification.")
|
_log.exception("Unable to send verification notification.")
|
||||||
|
|
||||||
await broadcast_functions.remove_notification_verification_for_broadcast_async(redis_list_value)
|
await broadcast_functions.remove_notification_verification_for_broadcast_async(redis_list_value)
|
||||||
|
|
||||||
@ -245,6 +245,9 @@ async def process_blocks_for_broadcast(session: aiohttp.ClientSession) -> None:
|
|||||||
for block_id, score in await broadcast_functions.get_blocks_to_process_for_broadcast_async():
|
for block_id, score in await broadcast_functions.get_blocks_to_process_for_broadcast_async():
|
||||||
_log.info(f"[BROADCAST PROCESSOR] Checking block {block_id}")
|
_log.info(f"[BROADCAST PROCESSOR] Checking block {block_id}")
|
||||||
current_level = await broadcast_functions.get_current_block_level_async(block_id)
|
current_level = await broadcast_functions.get_current_block_level_async(block_id)
|
||||||
|
if current_level == -1:
|
||||||
|
_log.warning(f"Failed to lookup current level for block {block_id}.")
|
||||||
|
continue
|
||||||
try:
|
try:
|
||||||
claim: Any = matchmaking.get_or_create_claim_check(block_id, _requirements)
|
claim: Any = matchmaking.get_or_create_claim_check(block_id, _requirements)
|
||||||
except exceptions.InsufficientFunds:
|
except exceptions.InsufficientFunds:
|
||||||
|
@ -285,11 +285,11 @@ class ContractJob(object):
|
|||||||
def create_openfaas_secrets(self) -> None:
|
def create_openfaas_secrets(self) -> None:
|
||||||
"""Creates secrets for openfaas functions
|
"""Creates secrets for openfaas functions
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
existing_model (obj, optional): The existing model for this contract if action is update
|
existing_model (obj, optional): The existing model for this contract if action is update
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
None
|
None
|
||||||
"""
|
"""
|
||||||
existing_secrets = self.model.existing_secrets or []
|
existing_secrets = self.model.existing_secrets or []
|
||||||
|
|
||||||
@ -320,8 +320,8 @@ class ContractJob(object):
|
|||||||
def delete_openfaas_secrets(self) -> None:
|
def delete_openfaas_secrets(self) -> None:
|
||||||
"""Deletes secrets for an openfaas function
|
"""Deletes secrets for an openfaas function
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
None
|
None
|
||||||
"""
|
"""
|
||||||
_log.info(f"Deleting OpenFaaS secrets: {self.model.existing_secrets}")
|
_log.info(f"Deleting OpenFaaS secrets: {self.model.existing_secrets}")
|
||||||
for secret in self.model.existing_secrets:
|
for secret in self.model.existing_secrets:
|
||||||
@ -334,8 +334,8 @@ class ContractJob(object):
|
|||||||
def deploy_to_openfaas(self) -> None:
|
def deploy_to_openfaas(self) -> None:
|
||||||
"""Deploy this job's smart contract to OpenFaaS and update the faas_spec
|
"""Deploy this job's smart contract to OpenFaaS and update the faas_spec
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
None, or throws exceptions.InternalServerError
|
None, or throws exceptions.InternalServerError
|
||||||
"""
|
"""
|
||||||
_log.info("Deploying to OpenFaaS cluster")
|
_log.info("Deploying to OpenFaaS cluster")
|
||||||
spec = self.get_openfaas_spec()
|
spec = self.get_openfaas_spec()
|
||||||
@ -356,8 +356,8 @@ class ContractJob(object):
|
|||||||
def delete_openfaas_function(self) -> None:
|
def delete_openfaas_function(self) -> None:
|
||||||
"""Delete this job's smart contract in OpenFaaS and remove the faas_spec
|
"""Delete this job's smart contract in OpenFaaS and remove the faas_spec
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
None, or throws exceptions.InternalServerError
|
None, or throws exceptions.InternalServerError
|
||||||
"""
|
"""
|
||||||
_log.info("Deleting OpenFaaS function")
|
_log.info("Deleting OpenFaaS function")
|
||||||
response = requests.delete(
|
response = requests.delete(
|
||||||
|
@ -356,7 +356,7 @@ def verify_request_authorization( # noqa: C901
|
|||||||
# Signature is valid and key is allowed; Return the api key used on success
|
# Signature is valid and key is allowed; Return the api key used on success
|
||||||
return auth_key
|
return auth_key
|
||||||
except Exception:
|
except Exception:
|
||||||
_log.exception(f"Uncaught exception checking if api key is allowed")
|
_log.exception("Uncaught exception checking if api key is allowed")
|
||||||
raise exceptions.ActionForbidden(f"This key is not allowed to perform {api_name}")
|
raise exceptions.ActionForbidden(f"This key is not allowed to perform {api_name}")
|
||||||
else:
|
else:
|
||||||
# HMAC doesn't match
|
# HMAC doesn't match
|
||||||
|
@ -32,7 +32,7 @@ def register_callback(txn_id: str, callback_url: str) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def fire_if_exists(txn_id: str, transaction_model: transaction_model.TransactionModel) -> None:
|
def fire_if_exists(txn_id: str, transaction_model: transaction_model.TransactionModel) -> None:
|
||||||
""" Fires a callback with a given payload, then removes from redis"""
|
"""Fires a callback with a given payload, then removes from redis"""
|
||||||
url = redis.hget_sync(CALLBACK_REDIS_KEY, txn_id)
|
url = redis.hget_sync(CALLBACK_REDIS_KEY, txn_id)
|
||||||
if url is not None:
|
if url is not None:
|
||||||
try:
|
try:
|
||||||
|
@ -105,3 +105,9 @@ def insert_block(block: "model.BlockModel") -> None:
|
|||||||
|
|
||||||
# Upload ref
|
# Upload ref
|
||||||
storage.put_object_as_json(f"{FOLDER}/{LAST_CLOSED_KEY}", last_block_ref)
|
storage.put_object_as_json(f"{FOLDER}/{LAST_CLOSED_KEY}", last_block_ref)
|
||||||
|
|
||||||
|
|
||||||
|
def insert_l5_verification(storage_location: str, block: "model.BlockModel") -> None:
|
||||||
|
if redisearch.ENABLED:
|
||||||
|
index_id = storage_location.split("/")[1]
|
||||||
|
redisearch.put_document(redisearch.Indexes.verification.value, index_id, block.export_as_search_index(), upsert=True)
|
||||||
|
@ -63,6 +63,7 @@ def store_full_txns(block_model: "l1_block_model.L1BlockModel") -> None:
|
|||||||
"""
|
"""
|
||||||
_log.info("[TRANSACTION DAO] Putting transaction to storage")
|
_log.info("[TRANSACTION DAO] Putting transaction to storage")
|
||||||
storage.put(f"{FOLDER}/{block_model.block_id}", block_model.export_as_full_transactions().encode("utf-8"))
|
storage.put(f"{FOLDER}/{block_model.block_id}", block_model.export_as_full_transactions().encode("utf-8"))
|
||||||
|
block_model.store_transaction_payloads()
|
||||||
txn_dict: Dict[str, Dict[str, Dict[str, Any]]] = {}
|
txn_dict: Dict[str, Dict[str, Dict[str, Any]]] = {}
|
||||||
txn_dict[redisearch.Indexes.transaction.value] = {}
|
txn_dict[redisearch.Indexes.transaction.value] = {}
|
||||||
# O(N) loop where N = # of txn
|
# O(N) loop where N = # of txn
|
||||||
|
@ -114,7 +114,7 @@ def create_new_transaction_type(txn_type_model: transaction_type_model.Transacti
|
|||||||
txn_type_dto = txn_type_model.export_as_at_rest()
|
txn_type_dto = txn_type_model.export_as_at_rest()
|
||||||
_log.info(f"Adding transaction index for {txn_type_model.txn_type}")
|
_log.info(f"Adding transaction index for {txn_type_model.txn_type}")
|
||||||
redisearch.create_transaction_index(txn_type_model.txn_type, txn_type_model.custom_indexes)
|
redisearch.create_transaction_index(txn_type_model.txn_type, txn_type_model.custom_indexes)
|
||||||
_log.debug(f"Queuing for activation")
|
_log.debug("Queuing for activation")
|
||||||
redis.lpush_sync(QUEUED_TXN_TYPES, txn_type_model.txn_type)
|
redis.lpush_sync(QUEUED_TXN_TYPES, txn_type_model.txn_type)
|
||||||
_log.debug(f"Adding the transaction type to storage")
|
_log.debug("Adding the transaction type to storage")
|
||||||
storage.put_object_as_json(f"{FOLDER}/{txn_type_model.txn_type}", txn_type_dto)
|
storage.put_object_as_json(f"{FOLDER}/{txn_type_model.txn_type}", txn_type_dto)
|
||||||
|
@ -32,9 +32,9 @@ REDIS_ENDPOINT = os.environ["REDIS_ENDPOINT"]
|
|||||||
LRU_REDIS_ENDPOINT = os.environ["LRU_REDIS_ENDPOINT"]
|
LRU_REDIS_ENDPOINT = os.environ["LRU_REDIS_ENDPOINT"]
|
||||||
REDIS_PORT = int(os.environ["REDIS_PORT"]) or 6379
|
REDIS_PORT = int(os.environ["REDIS_PORT"]) or 6379
|
||||||
|
|
||||||
redis_client: redis.Redis = cast(redis.Redis, None)
|
redis_client = cast(redis.Redis, None)
|
||||||
redis_client_lru: redis.Redis = cast(redis.Redis, None)
|
redis_client_lru = cast(redis.Redis, None)
|
||||||
async_redis_client: aioredis.Redis = cast(aioredis.Redis, None)
|
async_redis_client = cast(aioredis.Redis, None)
|
||||||
|
|
||||||
|
|
||||||
def _set_redis_client_if_necessary() -> None:
|
def _set_redis_client_if_necessary() -> None:
|
||||||
@ -396,4 +396,4 @@ def hexists_sync(name: str, key: str) -> bool:
|
|||||||
|
|
||||||
def zadd_sync(name: str, mapping: Dict[str, int], nx: bool = False, xx: bool = False, ch: bool = False, incr: bool = False) -> int:
|
def zadd_sync(name: str, mapping: Dict[str, int], nx: bool = False, xx: bool = False, ch: bool = False, incr: bool = False) -> int:
|
||||||
_set_redis_client_if_necessary()
|
_set_redis_client_if_necessary()
|
||||||
return redis_client.zadd(name, mapping, nx=nx, xx=xx, ch=ch, incr=incr)
|
return redis_client.zadd(name, mapping, nx=nx, xx=xx, ch=ch, incr=incr) # noqa: T484
|
||||||
|
@ -44,15 +44,19 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
_log = logger.get_logger()
|
_log = logger.get_logger()
|
||||||
|
|
||||||
|
BROADCAST_ENABLED = os.environ["BROADCAST"].lower() != "false"
|
||||||
LEVEL = os.environ["LEVEL"]
|
LEVEL = os.environ["LEVEL"]
|
||||||
ENABLED = not (LEVEL != "1" and os.environ.get("USE_REDISEARCH") == "false")
|
ENABLED = not (LEVEL != "1" and os.environ.get("USE_REDISEARCH") == "false")
|
||||||
if ENABLED:
|
if ENABLED:
|
||||||
REDISEARCH_ENDPOINT = os.environ["REDISEARCH_ENDPOINT"]
|
REDISEARCH_ENDPOINT = os.environ["REDISEARCH_ENDPOINT"]
|
||||||
REDIS_PORT = int(os.environ["REDIS_PORT"]) or 6379
|
REDIS_PORT = int(os.environ["REDIS_PORT"]) or 6379
|
||||||
|
|
||||||
|
INDEX_L5_VERIFICATION_GENERATION_KEY = "dc:l5_index_generation_complete"
|
||||||
INDEX_GENERATION_KEY = "dc:index_generation_complete"
|
INDEX_GENERATION_KEY = "dc:index_generation_complete"
|
||||||
|
L5_BLOCK_MIGRATION_KEY = "dc:migrations:l5_block"
|
||||||
BLOCK_MIGRATION_KEY = "dc:migrations:block"
|
BLOCK_MIGRATION_KEY = "dc:migrations:block"
|
||||||
TXN_MIGRATION_KEY = "dc:migrations:txn"
|
TXN_MIGRATION_KEY = "dc:migrations:txn"
|
||||||
|
L5_NODES = "dc:nodes:l5"
|
||||||
|
|
||||||
_escape_transformation = str.maketrans(
|
_escape_transformation = str.maketrans(
|
||||||
{
|
{
|
||||||
@ -91,6 +95,7 @@ class Indexes(enum.Enum):
|
|||||||
block = "bk"
|
block = "bk"
|
||||||
smartcontract = "sc"
|
smartcontract = "sc"
|
||||||
transaction = "tx"
|
transaction = "tx"
|
||||||
|
verification = "ver"
|
||||||
|
|
||||||
|
|
||||||
_redis_connection = None
|
_redis_connection = None
|
||||||
@ -299,23 +304,69 @@ def generate_indexes_if_necessary() -> None:
|
|||||||
"""Initialize redisearch with necessary indexes and fill them from storage if migration has not been marked as complete"""
|
"""Initialize redisearch with necessary indexes and fill them from storage if migration has not been marked as complete"""
|
||||||
redisearch_redis_client = _get_redisearch_index_client("").redis
|
redisearch_redis_client = _get_redisearch_index_client("").redis
|
||||||
needs_generation = not bool(redisearch_redis_client.get(INDEX_GENERATION_KEY))
|
needs_generation = not bool(redisearch_redis_client.get(INDEX_GENERATION_KEY))
|
||||||
|
needs_l5_generation = not bool(redisearch_redis_client.get(INDEX_L5_VERIFICATION_GENERATION_KEY))
|
||||||
# No-op if indexes are marked as already generated
|
# No-op if indexes are marked as already generated
|
||||||
if not needs_generation:
|
if not needs_generation and not needs_l5_generation:
|
||||||
return
|
return
|
||||||
# Create block index
|
|
||||||
_log.info("Creating block indexes")
|
if needs_l5_generation:
|
||||||
_generate_block_indexes()
|
# Create L5 verification indexes
|
||||||
# Create indexes for transactions
|
_generate_l5_verification_indexes()
|
||||||
_log.info("Creating transaction indexes")
|
# Mark index generation as complete
|
||||||
_generate_transaction_indexes()
|
redisearch_redis_client.delete(L5_BLOCK_MIGRATION_KEY)
|
||||||
# Create smart contract index
|
redisearch_redis_client.set(INDEX_L5_VERIFICATION_GENERATION_KEY, "a")
|
||||||
_log.info("Creating smart contract indexes")
|
|
||||||
_generate_smart_contract_indexes()
|
if needs_generation:
|
||||||
# Mark index generation as complete
|
# Create block index
|
||||||
_log.info("Marking redisearch index generation complete")
|
_log.info("Creating block indexes")
|
||||||
redisearch_redis_client.delete(BLOCK_MIGRATION_KEY)
|
_generate_block_indexes()
|
||||||
redisearch_redis_client.delete(TXN_MIGRATION_KEY)
|
# Create indexes for transactions
|
||||||
redisearch_redis_client.set(INDEX_GENERATION_KEY, "a")
|
_log.info("Creating transaction indexes")
|
||||||
|
_generate_transaction_indexes()
|
||||||
|
# Create smart contract index
|
||||||
|
_log.info("Creating smart contract indexes")
|
||||||
|
_generate_smart_contract_indexes()
|
||||||
|
# Mark index generation as complete
|
||||||
|
_log.info("Marking redisearch index generation complete")
|
||||||
|
redisearch_redis_client.delete(BLOCK_MIGRATION_KEY)
|
||||||
|
redisearch_redis_client.delete(TXN_MIGRATION_KEY)
|
||||||
|
redisearch_redis_client.set(INDEX_GENERATION_KEY, "a")
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_l5_verification_indexes() -> None:
|
||||||
|
client = _get_redisearch_index_client(Indexes.verification.value)
|
||||||
|
try:
|
||||||
|
client.create_index(
|
||||||
|
[
|
||||||
|
redisearch.NumericField("block_id", sortable=True),
|
||||||
|
redisearch.NumericField("prev_id", sortable=True),
|
||||||
|
redisearch.NumericField("timestamp", sortable=True),
|
||||||
|
redisearch.TagField("dc_id"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
except redis.exceptions.ResponseError as e:
|
||||||
|
if not str(e).startswith("Index already exists"): # We don't care if index already exists
|
||||||
|
raise
|
||||||
|
_log.info("Listing all blocks in storage")
|
||||||
|
block_paths = storage.list_objects("BLOCK/")
|
||||||
|
pattern = re.compile(r"BLOCK\/([0-9]+)-([Ll])5(.*)$")
|
||||||
|
for block_path in block_paths:
|
||||||
|
if LEVEL == "1" and BROADCAST_ENABLED and re.search(pattern, block_path):
|
||||||
|
if not client.redis.sismember(L5_BLOCK_MIGRATION_KEY, block_path):
|
||||||
|
raw_block = storage.get_json_from_object(block_path)
|
||||||
|
block = l5_block_model.new_from_at_rest(raw_block)
|
||||||
|
storage_location = block_path.split("/")[1]
|
||||||
|
try:
|
||||||
|
put_document(Indexes.verification.value, storage_location, block.export_as_search_index())
|
||||||
|
except redis.exceptions.ResponseError as e:
|
||||||
|
if not str(e).startswith("Document already exists"):
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
_log.info(f"Document {storage_location} already exists")
|
||||||
|
client.redis.sadd(L5_NODES, block.dc_id)
|
||||||
|
client.redis.sadd(L5_BLOCK_MIGRATION_KEY, block_path)
|
||||||
|
else:
|
||||||
|
_log.info(f"Skipping already indexed L5 block {block_path}")
|
||||||
|
|
||||||
|
|
||||||
def _generate_block_indexes() -> None:
|
def _generate_block_indexes() -> None:
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import patch, MagicMock
|
from unittest.mock import patch, MagicMock, call
|
||||||
|
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
@ -194,6 +194,7 @@ class TestRedisearch(unittest.TestCase):
|
|||||||
redisearch._get_redisearch_index_client.assert_any_call("bk")
|
redisearch._get_redisearch_index_client.assert_any_call("bk")
|
||||||
redisearch._get_redisearch_index_client.assert_any_call("sc")
|
redisearch._get_redisearch_index_client.assert_any_call("sc")
|
||||||
redisearch._get_redisearch_index_client.assert_any_call("tx")
|
redisearch._get_redisearch_index_client.assert_any_call("tx")
|
||||||
mock_redis.get.assert_called_once_with("dc:index_generation_complete")
|
redisearch._get_redisearch_index_client.assert_any_call("ver")
|
||||||
mock_redis.set.assert_called_once()
|
mock_redis.get.assert_has_calls([call("dc:index_generation_complete"), call("dc:l5_index_generation_complete")])
|
||||||
|
self.assertEqual(mock_redis.set.call_count, 2)
|
||||||
mock_put_document.assert_called()
|
mock_put_document.assert_called()
|
||||||
|
@ -112,6 +112,7 @@ ENDPOINT_MAP = {
|
|||||||
"get_transaction": _check_default_endpoint_permission,
|
"get_transaction": _check_default_endpoint_permission,
|
||||||
"get_verifications": _check_default_endpoint_permission,
|
"get_verifications": _check_default_endpoint_permission,
|
||||||
"get_pending_verifications": _check_default_endpoint_permission,
|
"get_pending_verifications": _check_default_endpoint_permission,
|
||||||
|
"query_interchain_verifications": _check_default_endpoint_permission,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -163,7 +163,7 @@ class BinanceNetwork(model.InterchainModel):
|
|||||||
response_rpc = self._call_node_rpc("status", {}).json()
|
response_rpc = self._call_node_rpc("status", {}).json()
|
||||||
response_api = self._call_node_api("tokens/BNB").json()
|
response_api = self._call_node_api("tokens/BNB").json()
|
||||||
if response_rpc.get("error") or response_api.get("error"):
|
if response_rpc.get("error") or response_api.get("error"):
|
||||||
raise exceptions.InterchainConnectionError(f"[BINANCE] Node ping checks failed!")
|
raise exceptions.InterchainConnectionError("[BINANCE] Node ping checks failed!")
|
||||||
|
|
||||||
# https://docs.binance.org/api-reference/node-rpc.html#6114-query-tx
|
# https://docs.binance.org/api-reference/node-rpc.html#6114-query-tx
|
||||||
def is_transaction_confirmed(self, transaction_hash: str) -> bool:
|
def is_transaction_confirmed(self, transaction_hash: str) -> bool:
|
||||||
@ -203,7 +203,7 @@ class BinanceNetwork(model.InterchainModel):
|
|||||||
# cannot check HTTP status codes, errors will return 200 :
|
# cannot check HTTP status codes, errors will return 200 :
|
||||||
if response_json.get("error") is not None:
|
if response_json.get("error") is not None:
|
||||||
if "interface is nil, not types.NamedAccount" in response_json["error"]["data"] and response.status_code == 500:
|
if "interface is nil, not types.NamedAccount" in response_json["error"]["data"] and response.status_code == 500:
|
||||||
_log.warning(f"[BINANCE] Non 200 response from Binance node:")
|
_log.warning("[BINANCE] Non 200 response from Binance node:")
|
||||||
_log.warning(f"[BINANCE] response code: {response.status_code}")
|
_log.warning(f"[BINANCE] response code: {response.status_code}")
|
||||||
_log.warning(f"[BINANCE] response error: {response_json['error']['data']}")
|
_log.warning(f"[BINANCE] response error: {response_json['error']['data']}")
|
||||||
_log.warning("[BINANCE] This is actually expected for a zero balance address.")
|
_log.warning("[BINANCE] This is actually expected for a zero balance address.")
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
import base64
|
import base64
|
||||||
from typing import Dict, Any
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
from eth_typing import URI, ChecksumAddress, HexStr
|
||||||
import secp256k1
|
import secp256k1
|
||||||
import web3
|
import web3
|
||||||
import web3.gas_strategies.time_based
|
import web3.gas_strategies.time_based
|
||||||
@ -127,6 +128,8 @@ def new_from_at_rest(ethereum_network_at_rest: Dict[str, Any]) -> "EthereumNetwo
|
|||||||
|
|
||||||
|
|
||||||
class EthereumNetwork(model.InterchainModel):
|
class EthereumNetwork(model.InterchainModel):
|
||||||
|
address: ChecksumAddress
|
||||||
|
|
||||||
def __init__(self, name: str, rpc_address: str, chain_id: int, b64_private_key: str):
|
def __init__(self, name: str, rpc_address: str, chain_id: int, b64_private_key: str):
|
||||||
self.blockchain = "ethereum"
|
self.blockchain = "ethereum"
|
||||||
self.name = name
|
self.name = name
|
||||||
@ -134,7 +137,7 @@ class EthereumNetwork(model.InterchainModel):
|
|||||||
self.chain_id = chain_id
|
self.chain_id = chain_id
|
||||||
self.priv_key = eth_keys.keys.PrivateKey(base64.b64decode(b64_private_key))
|
self.priv_key = eth_keys.keys.PrivateKey(base64.b64decode(b64_private_key))
|
||||||
self.address = self.priv_key.public_key.to_checksum_address()
|
self.address = self.priv_key.public_key.to_checksum_address()
|
||||||
self.w3 = web3.Web3(web3.HTTPProvider(self.rpc_address))
|
self.w3 = web3.Web3(web3.HTTPProvider(URI(self.rpc_address)))
|
||||||
# Set gas strategy
|
# Set gas strategy
|
||||||
self.w3.eth.setGasPriceStrategy(web3.gas_strategies.time_based.medium_gas_price_strategy)
|
self.w3.eth.setGasPriceStrategy(web3.gas_strategies.time_based.medium_gas_price_strategy)
|
||||||
|
|
||||||
@ -181,12 +184,12 @@ class EthereumNetwork(model.InterchainModel):
|
|||||||
"""
|
"""
|
||||||
_log.info(f"[ETHEREUM] Getting confirmations for {transaction_hash}")
|
_log.info(f"[ETHEREUM] Getting confirmations for {transaction_hash}")
|
||||||
try:
|
try:
|
||||||
transaction_block_number = self.w3.eth.getTransaction(transaction_hash)["blockNumber"]
|
transaction_block_number = self.w3.eth.getTransaction(HexStr(transaction_hash))["blockNumber"]
|
||||||
except web3.exceptions.TransactionNotFound:
|
except web3.exceptions.TransactionNotFound:
|
||||||
raise exceptions.TransactionNotFound(f"Transaction {transaction_hash} not found")
|
raise exceptions.TransactionNotFound(f"Transaction {transaction_hash} not found")
|
||||||
latest_block_number = self.get_current_block()
|
latest_block_number = self.get_current_block()
|
||||||
_log.info(f"[ETHEREUM] Latest ethereum block number: {latest_block_number} | Block number of transaction: {transaction_block_number}")
|
_log.info(f"[ETHEREUM] Latest ethereum block number: {latest_block_number} | Block number of transaction: {transaction_block_number}")
|
||||||
return transaction_block_number and (latest_block_number - transaction_block_number) >= CONFIRMATIONS_CONSIDERED_FINAL
|
return bool(transaction_block_number) and (latest_block_number - transaction_block_number) >= CONFIRMATIONS_CONSIDERED_FINAL
|
||||||
|
|
||||||
def check_balance(self) -> int:
|
def check_balance(self) -> int:
|
||||||
"""Check the balance of the address for this network
|
"""Check the balance of the address for this network
|
||||||
@ -242,7 +245,7 @@ class EthereumNetwork(model.InterchainModel):
|
|||||||
The hex string of the published transaction hash
|
The hex string of the published transaction hash
|
||||||
"""
|
"""
|
||||||
_log.debug(f"[ETH] Publishing transaction {signed_transaction}")
|
_log.debug(f"[ETH] Publishing transaction {signed_transaction}")
|
||||||
return self.w3.toHex(self.w3.eth.sendRawTransaction(signed_transaction))
|
return self.w3.toHex(self.w3.eth.sendRawTransaction(HexStr(signed_transaction)))
|
||||||
|
|
||||||
def _publish_l5_transaction(self, transaction_payload: str) -> str:
|
def _publish_l5_transaction(self, transaction_payload: str) -> str:
|
||||||
"""Publish a transaction to this network with a certain data payload
|
"""Publish a transaction to this network with a certain data payload
|
||||||
@ -268,8 +271,8 @@ class EthereumNetwork(model.InterchainModel):
|
|||||||
Returns:
|
Returns:
|
||||||
Gas price estimate in wei
|
Gas price estimate in wei
|
||||||
"""
|
"""
|
||||||
_log.debug(f"[ETHEREUM] Getting estimated gas price")
|
_log.debug("[ETHEREUM] Getting estimated gas price")
|
||||||
gas_price = max(int(self.w3.eth.generateGasPrice()), 100000000) # Calculate gas price, but set minimum to 0.1 gwei for safety
|
gas_price = max(int(self.w3.eth.generateGasPrice() or 0), 100000000) # Calculate gas price, but set minimum to 0.1 gwei for safety
|
||||||
_log.info(f"[ETHEREUM] Current estimated gas price: {gas_price}")
|
_log.info(f"[ETHEREUM] Current estimated gas price: {gas_price}")
|
||||||
return gas_price
|
return gas_price
|
||||||
|
|
||||||
|
@ -22,6 +22,7 @@ from typing import Dict, Any, List, Set, TYPE_CHECKING
|
|||||||
|
|
||||||
import fastjsonschema
|
import fastjsonschema
|
||||||
|
|
||||||
|
from dragonchain.lib.interfaces import storage
|
||||||
from dragonchain.lib.dto import transaction_model
|
from dragonchain.lib.dto import transaction_model
|
||||||
from dragonchain.lib.dto import schema
|
from dragonchain.lib.dto import schema
|
||||||
from dragonchain.lib.dto import model
|
from dragonchain.lib.dto import model
|
||||||
@ -182,6 +183,11 @@ class L1BlockModel(model.BlockModel):
|
|||||||
"""Export full transactions in block as NDJSON (for storage select when querying)"""
|
"""Export full transactions in block as NDJSON (for storage select when querying)"""
|
||||||
txn_string = ""
|
txn_string = ""
|
||||||
for transaction in self.transactions:
|
for transaction in self.transactions:
|
||||||
txn_string += '{"txn_id": "' + transaction.txn_id + '", '
|
txn_string += '{"txn_id": "' + transaction.txn_id + '", "stripped_payload": true, '
|
||||||
txn_string += '"txn": ' + json.dumps(transaction.export_as_full(), separators=(",", ":")) + "}\n"
|
txn_string += '"txn": ' + json.dumps(transaction.export_as_full(), separators=(",", ":")) + "}\n"
|
||||||
return txn_string
|
return txn_string
|
||||||
|
|
||||||
|
def store_transaction_payloads(self) -> None:
|
||||||
|
"""Stores full transaction payloads for block"""
|
||||||
|
for transaction in self.transactions:
|
||||||
|
storage.put(f"PAYLOADS/{transaction.txn_id}", json.dumps(transaction.payload, separators=(",", ":")).encode("utf-8"))
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
# language governing permissions and limitations under the Apache License.
|
# language governing permissions and limitations under the Apache License.
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
import fastjsonschema
|
import fastjsonschema
|
||||||
|
|
||||||
@ -29,7 +30,7 @@ _validate_l5_block_at_rest = fastjsonschema.compile(schema.l5_block_at_rest_sche
|
|||||||
def new_from_at_rest(block: dict) -> "L5BlockModel":
|
def new_from_at_rest(block: dict) -> "L5BlockModel":
|
||||||
"""
|
"""
|
||||||
Used in querying from the DAO
|
Used in querying from the DAO
|
||||||
Input: Block::L4::AtRest DTO
|
Input: Block::L5::AtRest DTO
|
||||||
Returns: BlockModel object
|
Returns: BlockModel object
|
||||||
"""
|
"""
|
||||||
# Validate inputted schema
|
# Validate inputted schema
|
||||||
@ -139,3 +140,12 @@ class L5BlockModel(model.BlockModel):
|
|||||||
"l4-blocks": self.l4_blocks,
|
"l4-blocks": self.l4_blocks,
|
||||||
"proof": proof,
|
"proof": proof,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def export_as_search_index(self) -> Dict[str, Any]:
|
||||||
|
"""Export as block search index DTO"""
|
||||||
|
return {
|
||||||
|
"block_id": int(self.block_id),
|
||||||
|
"timestamp": int(self.timestamp),
|
||||||
|
"prev_id": int(self.prev_id) if self.prev_id else 0,
|
||||||
|
"dc_id": self.dc_id if self.dc_id else "",
|
||||||
|
}
|
||||||
|
@ -106,7 +106,7 @@ transaction_full_schema = {
|
|||||||
"payload": {"type": "string"},
|
"payload": {"type": "string"},
|
||||||
"proof": {"type": "object", "properties": {"full": {"type": "string"}, "stripped": {"type": "string"}}, "required": ["full", "stripped"]},
|
"proof": {"type": "object", "properties": {"full": {"type": "string"}, "stripped": {"type": "string"}}, "required": ["full", "stripped"]},
|
||||||
},
|
},
|
||||||
"required": ["version", "dcrn", "header", "payload", "proof"],
|
"required": ["version", "dcrn", "header", "proof"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -848,7 +848,11 @@ permission_document_schema_v1 = {
|
|||||||
"verifications": {
|
"verifications": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": add_crud_default_properties(
|
"properties": add_crud_default_properties(
|
||||||
{"get_verifications": default_endpoint_property_schema, "get_pending_verifications": default_endpoint_property_schema}
|
{
|
||||||
|
"get_verifications": default_endpoint_property_schema,
|
||||||
|
"get_pending_verifications": default_endpoint_property_schema,
|
||||||
|
"query_interchain_verifications": default_endpoint_property_schema,
|
||||||
|
}
|
||||||
),
|
),
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
},
|
},
|
||||||
|
@ -164,7 +164,6 @@ class TransactionModel(model.Model):
|
|||||||
"tag": self.tag,
|
"tag": self.tag,
|
||||||
"invoker": self.invoker or "",
|
"invoker": self.invoker or "",
|
||||||
},
|
},
|
||||||
"payload": self.payload,
|
|
||||||
"proof": {"full": self.full_hash, "stripped": self.signature},
|
"proof": {"full": self.full_hash, "stripped": self.signature},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -234,7 +233,7 @@ class TransactionModel(model.Model):
|
|||||||
indexable_object = jsonpath.jsonpath(json_payload, path)
|
indexable_object = jsonpath.jsonpath(json_payload, path)
|
||||||
_log.debug(f"indexable_object: {indexable_object}")
|
_log.debug(f"indexable_object: {indexable_object}")
|
||||||
# If we found a valid item at the specified indexable path
|
# If we found a valid item at the specified indexable path
|
||||||
if indexable_object and isinstance(indexable_object, list) and len(indexable_object) == 1:
|
if indexable_object and indexable_object[0] and isinstance(indexable_object, list) and len(indexable_object) == 1:
|
||||||
index_item = indexable_object[0]
|
index_item = indexable_object[0]
|
||||||
# Check that the item we extracted is a string for tag or text type custom indexes
|
# Check that the item we extracted is a string for tag or text type custom indexes
|
||||||
if index["type"] == "tag" or index["type"] == "text":
|
if index["type"] == "tag" or index["type"] == "text":
|
||||||
|
@ -91,7 +91,7 @@ def select_transaction(location: str, block_id: str, txn_id: str) -> dict:
|
|||||||
obj = s3.select_object_content(
|
obj = s3.select_object_content(
|
||||||
Bucket=location,
|
Bucket=location,
|
||||||
Key=f"TRANSACTION/{block_id}",
|
Key=f"TRANSACTION/{block_id}",
|
||||||
Expression=f"select s.txn from s3object s where s.txn_id = '{txn_id}' limit 1", # nosec (this s3 select query is safe)
|
Expression=f"select s.txn, s.stripped_payload from s3object s where s.txn_id = '{txn_id}' limit 1", # nosec (this s3 select query is safe)
|
||||||
ExpressionType="SQL",
|
ExpressionType="SQL",
|
||||||
InputSerialization={"JSON": {"Type": "DOCUMENT"}},
|
InputSerialization={"JSON": {"Type": "DOCUMENT"}},
|
||||||
OutputSerialization={"JSON": {"RecordDelimiter": "\n"}},
|
OutputSerialization={"JSON": {"RecordDelimiter": "\n"}},
|
||||||
@ -104,7 +104,14 @@ def select_transaction(location: str, block_id: str, txn_id: str) -> dict:
|
|||||||
if event.get("Records"):
|
if event.get("Records"):
|
||||||
txn_data = f'{txn_data}{event["Records"]["Payload"].decode("utf-8")}'
|
txn_data = f'{txn_data}{event["Records"]["Payload"].decode("utf-8")}'
|
||||||
if txn_data:
|
if txn_data:
|
||||||
return json.loads(txn_data)["txn"]
|
loaded_txn = json.loads(txn_data)
|
||||||
|
if loaded_txn.get("stripped_payload"):
|
||||||
|
payload_key = f"PAYLOADS/{txn_id}"
|
||||||
|
if does_object_exist(location, payload_key):
|
||||||
|
loaded_txn["txn"]["payload"] = json.loads(get(location, payload_key).decode("utf-8"))
|
||||||
|
else:
|
||||||
|
loaded_txn["txn"]["payload"] = json.dumps({})
|
||||||
|
return loaded_txn["txn"]
|
||||||
raise exceptions.NotFound
|
raise exceptions.NotFound
|
||||||
|
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ class TestS3Interface(unittest.TestCase):
|
|||||||
mock_select_object_content.assert_called_once_with(
|
mock_select_object_content.assert_called_once_with(
|
||||||
Bucket="loc",
|
Bucket="loc",
|
||||||
Key="TRANSACTION/block",
|
Key="TRANSACTION/block",
|
||||||
Expression="select s.txn from s3object s where s.txn_id = 'txn' limit 1",
|
Expression="select s.txn, s.stripped_payload from s3object s where s.txn_id = 'txn' limit 1",
|
||||||
ExpressionType="SQL",
|
ExpressionType="SQL",
|
||||||
InputSerialization={"JSON": {"Type": "DOCUMENT"}},
|
InputSerialization={"JSON": {"Type": "DOCUMENT"}},
|
||||||
OutputSerialization={"JSON": {"RecordDelimiter": "\n"}},
|
OutputSerialization={"JSON": {"RecordDelimiter": "\n"}},
|
||||||
|
@ -100,6 +100,12 @@ def select_transaction(location: str, block_id: str, txn_id: str) -> dict:
|
|||||||
try:
|
try:
|
||||||
loaded_txn = json.loads(transaction)
|
loaded_txn = json.loads(transaction)
|
||||||
if loaded_txn["txn_id"] == txn_id:
|
if loaded_txn["txn_id"] == txn_id:
|
||||||
|
if loaded_txn.get("stripped_payload"):
|
||||||
|
payload_key = os.path.join("PAYLOADS", txn_id)
|
||||||
|
if does_object_exist(location, payload_key):
|
||||||
|
loaded_txn["txn"]["payload"] = json.loads(get(location, payload_key).decode("utf-8"))
|
||||||
|
else:
|
||||||
|
loaded_txn["txn"]["payload"] = json.dumps({})
|
||||||
return loaded_txn["txn"]
|
return loaded_txn["txn"]
|
||||||
except Exception:
|
except Exception:
|
||||||
_log.exception("Error loading retrieved transaction from disk select_transaction")
|
_log.exception("Error loading retrieved transaction from disk select_transaction")
|
||||||
|
@ -191,4 +191,4 @@ class TestStorageInterface(unittest.TestCase):
|
|||||||
def test_save_error_message(self, mock_time):
|
def test_save_error_message(self, mock_time):
|
||||||
storage.put = MagicMock()
|
storage.put = MagicMock()
|
||||||
storage.save_error_message("some message")
|
storage.save_error_message("some message")
|
||||||
storage.put.assert_called_once_with(f"error_testing_123.log", b"some message", should_cache=False)
|
storage.put.assert_called_once_with("error_testing_123.log", b"some message", should_cache=False)
|
||||||
|
@ -23,7 +23,7 @@ bind = f"0.0.0.0:{os.environ['WEB_PORT']}"
|
|||||||
reuse_port = True
|
reuse_port = True
|
||||||
backlog = 4000
|
backlog = 4000
|
||||||
worker_class = "gevent"
|
worker_class = "gevent"
|
||||||
workers = 4
|
workers = 3
|
||||||
keepalive = 90 # Make sure this is higher than the load balancer/ingress controller tcp keepalive timeout
|
keepalive = 90 # Make sure this is higher than the load balancer/ingress controller tcp keepalive timeout
|
||||||
sendfile = False
|
sendfile = False
|
||||||
accesslog = "-"
|
accesslog = "-"
|
||||||
|
@ -27,6 +27,8 @@ from dragonchain.lib.dto import l4_block_model
|
|||||||
from dragonchain.lib.dto import l5_block_model
|
from dragonchain.lib.dto import l5_block_model
|
||||||
from dragonchain.lib.dto import api_key_model
|
from dragonchain.lib.dto import api_key_model
|
||||||
from dragonchain.lib.dao import api_key_dao
|
from dragonchain.lib.dao import api_key_dao
|
||||||
|
from dragonchain.lib.dao import block_dao
|
||||||
|
from dragonchain.lib.database import redisearch
|
||||||
from dragonchain.lib import matchmaking
|
from dragonchain.lib import matchmaking
|
||||||
from dragonchain.lib import keys
|
from dragonchain.lib import keys
|
||||||
from dragonchain.lib import queue
|
from dragonchain.lib import queue
|
||||||
@ -89,6 +91,10 @@ def process_receipt_v1(block_dto: Dict[str, Any]) -> None:
|
|||||||
_log.exception("matchmaking add_receipt failed!")
|
_log.exception("matchmaking add_receipt failed!")
|
||||||
# Update the broadcast system about this receipt
|
# Update the broadcast system about this receipt
|
||||||
broadcast_functions.set_receieved_verification_for_block_from_chain_sync(l1_block_id, level_received_from, block_model.dc_id)
|
broadcast_functions.set_receieved_verification_for_block_from_chain_sync(l1_block_id, level_received_from, block_model.dc_id)
|
||||||
|
if level_received_from == 5:
|
||||||
|
client = redisearch._get_redisearch_index_client(redisearch.Indexes.verification.value)
|
||||||
|
client.redis.sadd(redisearch.L5_NODES, block_model.dc_id)
|
||||||
|
block_dao.insert_l5_verification(storage_location, block_model)
|
||||||
else:
|
else:
|
||||||
_log.warning(
|
_log.warning(
|
||||||
f"Chain {block_model.dc_id} (level {level_received_from}) returned a receipt that wasn't expected (possibly expired?) for block {l1_block_id}. Rejecting receipt" # noqa: B950
|
f"Chain {block_model.dc_id} (level {level_received_from}) returned a receipt that wasn't expected (possibly expired?) for block {l1_block_id}. Rejecting receipt" # noqa: B950
|
||||||
|
@ -46,7 +46,7 @@ def get_id_by_txn_type_v1(txn_type: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
def list_contracts_v1() -> Dict[str, List[Dict[str, Any]]]:
|
def list_contracts_v1() -> Dict[str, List[Dict[str, Any]]]:
|
||||||
""" Function used by the smartcontract endpoint with method GET.
|
"""Function used by the smartcontract endpoint with method GET.
|
||||||
Returns a list of all smart contracts.
|
Returns a list of all smart contracts.
|
||||||
Returns:
|
Returns:
|
||||||
The search results of the query specified.
|
The search results of the query specified.
|
||||||
|
@ -14,17 +14,20 @@
|
|||||||
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
# KIND, either express or implied. See the Apache License for the specific
|
# KIND, either express or implied. See the Apache License for the specific
|
||||||
# language governing permissions and limitations under the Apache License.
|
# language governing permissions and limitations under the Apache License.
|
||||||
|
import re
|
||||||
from typing import Dict, Union, List, Any, Optional
|
from typing import Dict, Union, List, Any, Optional, cast
|
||||||
|
|
||||||
from dragonchain.broadcast_processor import broadcast_functions
|
from dragonchain.broadcast_processor import broadcast_functions
|
||||||
from dragonchain.lib.interfaces import storage
|
from dragonchain.lib.interfaces import storage
|
||||||
|
from dragonchain.lib.database import redisearch
|
||||||
from dragonchain.lib import matchmaking
|
from dragonchain.lib import matchmaking
|
||||||
from dragonchain import exceptions
|
from dragonchain import exceptions
|
||||||
from dragonchain import logger
|
from dragonchain import logger
|
||||||
|
|
||||||
_log = logger.get_logger()
|
_log = logger.get_logger()
|
||||||
|
|
||||||
|
_uuid_regex = re.compile(r"[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}")
|
||||||
|
|
||||||
|
|
||||||
def get_pending_verifications_v1(block_id: str) -> Dict[str, List[str]]:
|
def get_pending_verifications_v1(block_id: str) -> Dict[str, List[str]]:
|
||||||
"""Get any scheduled pending verifications"""
|
"""Get any scheduled pending verifications"""
|
||||||
@ -50,6 +53,38 @@ def query_verifications_v1(block_id: str, params: Optional[Dict[str, Any]] = Non
|
|||||||
return _get_verification_records(block_id, level)
|
return _get_verification_records(block_id, level)
|
||||||
|
|
||||||
|
|
||||||
|
def query_interchain_broadcasts_v1(block_id: str) -> List[Any]:
|
||||||
|
"""Return the subsequent broadcasts to other L5 networks"""
|
||||||
|
_log.info(f"Getting subsequent L5 verifications for {block_id}")
|
||||||
|
results = []
|
||||||
|
l5_block = None
|
||||||
|
l5_verifications = _get_verification_records(block_id, 5)
|
||||||
|
if len(l5_verifications) > 0:
|
||||||
|
l5_block = cast(List[Any], l5_verifications)[0]
|
||||||
|
timestamp = l5_block["header"]["timestamp"]
|
||||||
|
dc_id = l5_block["header"]["dc_id"]
|
||||||
|
l5_nodes = redisearch._get_redisearch_index_client(redisearch.Indexes.verification.value).redis.smembers(redisearch.L5_NODES)
|
||||||
|
|
||||||
|
results = [
|
||||||
|
_query_l5_verification(l5_dc_id.decode("utf-8"), timestamp)
|
||||||
|
for l5_dc_id in l5_nodes
|
||||||
|
if l5_dc_id.decode("utf-8") != dc_id and not re.match(_uuid_regex, l5_dc_id.decode("utf-8"))
|
||||||
|
]
|
||||||
|
return ([l5_block] if l5_block else []) + [storage.get_json_from_object(f"BLOCK/{x}") for x in results if x is not None]
|
||||||
|
|
||||||
|
|
||||||
|
def _query_l5_verification(l5_dc_id: str, timestamp: str) -> str:
|
||||||
|
query_result = redisearch.search(
|
||||||
|
index=redisearch.Indexes.verification.value,
|
||||||
|
query_str=f"@dc_id:{{{l5_dc_id}}} @timestamp:[{int(timestamp)+1} +inf]",
|
||||||
|
only_id=True,
|
||||||
|
limit=1,
|
||||||
|
sort_by="timestamp",
|
||||||
|
sort_asc=True,
|
||||||
|
)
|
||||||
|
return query_result.docs[0].id if query_result and len(query_result.docs) > 0 else None
|
||||||
|
|
||||||
|
|
||||||
def _get_verification_records(block_id: str, level: int = 0) -> Union[List[Any], Dict[str, List[Any]]]:
|
def _get_verification_records(block_id: str, level: int = 0) -> Union[List[Any], Dict[str, List[Any]]]:
|
||||||
if level:
|
if level:
|
||||||
if level in [2, 3, 4, 5]:
|
if level in [2, 3, 4, 5]:
|
||||||
|
@ -14,13 +14,13 @@
|
|||||||
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
# KIND, either express or implied. See the Apache License for the specific
|
# KIND, either express or implied. See the Apache License for the specific
|
||||||
# language governing permissions and limitations under the Apache License.
|
# language governing permissions and limitations under the Apache License.
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import patch, call
|
from unittest.mock import patch, call, MagicMock
|
||||||
|
|
||||||
from dragonchain import test_env # noqa: F401
|
from dragonchain import test_env # noqa: F401
|
||||||
from dragonchain import exceptions
|
from dragonchain import exceptions
|
||||||
from dragonchain.webserver.lib import verifications
|
from dragonchain.webserver.lib import verifications
|
||||||
|
from dragonchain.lib.database import redisearch
|
||||||
|
|
||||||
|
|
||||||
class TestVerificationDAO(unittest.TestCase):
|
class TestVerificationDAO(unittest.TestCase):
|
||||||
@ -76,3 +76,60 @@ class TestVerificationDAO(unittest.TestCase):
|
|||||||
def test__all_records_returns_correctly(self, mock_level_records):
|
def test__all_records_returns_correctly(self, mock_level_records):
|
||||||
self.assertEqual(verifications._all_records(1), {"2": ["return"], "3": ["return"], "4": ["return"], "5": ["return"]})
|
self.assertEqual(verifications._all_records(1), {"2": ["return"], "3": ["return"], "4": ["return"], "5": ["return"]})
|
||||||
mock_level_records.assert_has_calls([call(1, 2), call(1, 3), call(1, 4), call(1, 5)])
|
mock_level_records.assert_has_calls([call(1, 2), call(1, 3), call(1, 4), call(1, 5)])
|
||||||
|
|
||||||
|
@patch("dragonchain.lib.database.redisearch.search", return_value=MagicMock(docs=[MagicMock(id="banana")]))
|
||||||
|
def test__query_l5_verifications_returns_correctly(self, mock_redisearch_search):
|
||||||
|
self.assertEqual(verifications._query_l5_verification("banana", "123456789"), "banana")
|
||||||
|
mock_redisearch_search.assert_called_once_with(
|
||||||
|
index="ver", query_str="@dc_id:{banana} @timestamp:[123456790 +inf]", only_id=True, limit=1, sort_by="timestamp", sort_asc=True
|
||||||
|
)
|
||||||
|
|
||||||
|
@patch("dragonchain.lib.database.redisearch.search", return_value=MagicMock(docs=[]))
|
||||||
|
def test__query_l5_verifications_returns_correctly_when_no_results_found(self, mock_redisearch_search):
|
||||||
|
self.assertIsNone(verifications._query_l5_verification("banana", "123456789"))
|
||||||
|
mock_redisearch_search.assert_called_once_with(
|
||||||
|
index="ver", query_str="@dc_id:{banana} @timestamp:[123456790 +inf]", only_id=True, limit=1, sort_by="timestamp", sort_asc=True
|
||||||
|
)
|
||||||
|
|
||||||
|
@patch("dragonchain.webserver.lib.verifications._get_verification_records", return_value=[])
|
||||||
|
def test_query_interchain_broadcasts_v1_returns_correctly_when_block_id_doesnt_exist(self, mock_get_verification_records):
|
||||||
|
self.assertEqual(verifications.query_interchain_broadcasts_v1("123"), [])
|
||||||
|
mock_get_verification_records.assert_called_once_with("123", 5)
|
||||||
|
|
||||||
|
@patch("dragonchain.webserver.lib.verifications._query_l5_verification", return_value="banana")
|
||||||
|
@patch(
|
||||||
|
"dragonchain.webserver.lib.verifications._get_verification_records", return_value=[{"header": {"dc_id": "banana", "timestamp": "12345987"}}]
|
||||||
|
)
|
||||||
|
@patch("dragonchain.webserver.lib.verifications.storage.get_json_from_object", return_value="return")
|
||||||
|
def test_query_interchain_broadcasts_v1_returns_correctly(self, mock_get_object, mock_get_verification_records, mock_query_l5_verifications):
|
||||||
|
mock_redis = MagicMock(smembers=MagicMock(return_value=[b"mydragonchain"]))
|
||||||
|
redisearch._get_redisearch_index_client = MagicMock(return_value=MagicMock(redis=mock_redis))
|
||||||
|
self.assertEqual(verifications.query_interchain_broadcasts_v1("12345"), [{"header": {"dc_id": "banana", "timestamp": "12345987"}}, "return"])
|
||||||
|
mock_query_l5_verifications.assert_called_once_with("mydragonchain", "12345987")
|
||||||
|
mock_get_object.assert_called_once_with("BLOCK/banana")
|
||||||
|
|
||||||
|
@patch("dragonchain.webserver.lib.verifications._query_l5_verification", return_value=None)
|
||||||
|
@patch(
|
||||||
|
"dragonchain.webserver.lib.verifications._get_verification_records", return_value=[{"header": {"dc_id": "banana", "timestamp": "12345987"}}]
|
||||||
|
)
|
||||||
|
@patch("dragonchain.webserver.lib.verifications.storage.get_json_from_object")
|
||||||
|
def test_query_interchain_broadcasts_v1_returns_correctly_when_verification_not_found(
|
||||||
|
self, mock_get_object, mock_get_verification_records, mock_query_l5_verifications
|
||||||
|
):
|
||||||
|
mock_redis = MagicMock(smembers=MagicMock(return_value=[b"mydragonchain"]))
|
||||||
|
redisearch._get_redisearch_index_client = MagicMock(return_value=MagicMock(redis=mock_redis))
|
||||||
|
self.assertEqual(verifications.query_interchain_broadcasts_v1("12345"), [{"header": {"dc_id": "banana", "timestamp": "12345987"}}])
|
||||||
|
mock_query_l5_verifications.assert_called_once_with("mydragonchain", "12345987")
|
||||||
|
mock_get_object.assert_not_called()
|
||||||
|
|
||||||
|
@patch("dragonchain.webserver.lib.verifications._query_l5_verification", return_value=None)
|
||||||
|
@patch("dragonchain.webserver.lib.verifications._get_verification_records", return_value=[])
|
||||||
|
@patch("dragonchain.webserver.lib.verifications.storage.get_json_from_object")
|
||||||
|
def test_query_interchain_broadcasts_v1_returns_correctly_when_block_doesnt_exist(
|
||||||
|
self, mock_get_object, mock_get_verification_records, mock_query_l5_verifications
|
||||||
|
):
|
||||||
|
mock_redis = MagicMock(smembers=MagicMock(return_value=[b"mydragonchain"]))
|
||||||
|
redisearch._get_redisearch_index_client = MagicMock(return_value=MagicMock(redis=mock_redis))
|
||||||
|
self.assertEqual(verifications.query_interchain_broadcasts_v1("12345"), [])
|
||||||
|
mock_query_l5_verifications.assert_not_called()
|
||||||
|
mock_get_object.assert_not_called()
|
||||||
|
@ -28,6 +28,7 @@ def apply_routes(app: flask.Flask):
|
|||||||
app.add_url_rule("/verifications/<block_id>", "get_verification_v1", get_verification_v1, methods=["GET"])
|
app.add_url_rule("/verifications/<block_id>", "get_verification_v1", get_verification_v1, methods=["GET"])
|
||||||
app.add_url_rule("/v1/verifications/<block_id>", "get_verification_v1", get_verification_v1, methods=["GET"])
|
app.add_url_rule("/v1/verifications/<block_id>", "get_verification_v1", get_verification_v1, methods=["GET"])
|
||||||
app.add_url_rule("/v1/verifications/pending/<block_id>", "get_pending_verifications_v1", get_pending_verifications_v1, methods=["GET"])
|
app.add_url_rule("/v1/verifications/pending/<block_id>", "get_pending_verifications_v1", get_pending_verifications_v1, methods=["GET"])
|
||||||
|
app.add_url_rule("/v1/verifications/interchains/<block_id>", "query_l5_verifications_v1", query_l5_verifications_v1, methods=["GET"])
|
||||||
|
|
||||||
|
|
||||||
@request_authorizer.Authenticated(api_resource="verifications", api_operation="read", api_name="get_verifications")
|
@request_authorizer.Authenticated(api_resource="verifications", api_operation="read", api_name="get_verifications")
|
||||||
@ -39,3 +40,8 @@ def get_verification_v1(block_id: str, **kwargs) -> Tuple[str, int, Dict[str, st
|
|||||||
@request_authorizer.Authenticated(api_resource="verifications", api_operation="read", api_name="get_pending_verifications")
|
@request_authorizer.Authenticated(api_resource="verifications", api_operation="read", api_name="get_pending_verifications")
|
||||||
def get_pending_verifications_v1(block_id: str, **kwargs) -> Tuple[str, int, Dict[str, str]]:
|
def get_pending_verifications_v1(block_id: str, **kwargs) -> Tuple[str, int, Dict[str, str]]:
|
||||||
return helpers.flask_http_response(200, verifications.get_pending_verifications_v1(block_id))
|
return helpers.flask_http_response(200, verifications.get_pending_verifications_v1(block_id))
|
||||||
|
|
||||||
|
|
||||||
|
@request_authorizer.Authenticated(api_resource="verifications", api_operation="read", api_name="query_interchain_verifications")
|
||||||
|
def query_l5_verifications_v1(block_id: str, **kwargs) -> Tuple[str, int, Dict[str, str]]:
|
||||||
|
return helpers.flask_http_response(200, verifications.query_interchain_broadcasts_v1(block_id))
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
apiVersion: v1
|
apiVersion: v1
|
||||||
name: dragonchain-k8s
|
name: dragonchain-k8s
|
||||||
version: 1.0.8
|
version: 1.0.9
|
||||||
icon: https://dragonchain-assets.s3.amazonaws.com/jojo.png
|
icon: https://dragonchain-assets.s3.amazonaws.com/jojo.png
|
||||||
description: Dragonchain on kubernetes
|
description: Dragonchain on kubernetes
|
||||||
appVersion: 4.4.0
|
appVersion: 4.5.1
|
||||||
keywords:
|
keywords:
|
||||||
- dragonchain
|
- dragonchain
|
||||||
- blockchain
|
- blockchain
|
||||||
|
@ -61,6 +61,17 @@ redisearch:
|
|||||||
requests:
|
requests:
|
||||||
cpu: 50m
|
cpu: 50m
|
||||||
memory: 300Mi
|
memory: 300Mi
|
||||||
|
image:
|
||||||
|
repository: docker.io/redislabs/redisearch
|
||||||
|
tag: 1.6.13
|
||||||
|
storage:
|
||||||
|
spec:
|
||||||
|
storageClassName: standard
|
||||||
|
accessModes:
|
||||||
|
- ReadWriteOnce
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: 1Gi
|
||||||
|
|
||||||
# Webserver configuration
|
# Webserver configuration
|
||||||
webserver:
|
webserver:
|
||||||
@ -142,7 +153,7 @@ dragonchain:
|
|||||||
image:
|
image:
|
||||||
pullPolicy: Always
|
pullPolicy: Always
|
||||||
value: docker.io/dragonchain/dragonchain_core
|
value: docker.io/dragonchain/dragonchain_core
|
||||||
version: 4.4.0
|
version: 4.5.1
|
||||||
storage:
|
storage:
|
||||||
spec:
|
spec:
|
||||||
storageClassName: standard
|
storageClassName: standard
|
||||||
@ -155,20 +166,7 @@ dragonchain:
|
|||||||
redis:
|
redis:
|
||||||
image:
|
image:
|
||||||
repository: docker.io/redis
|
repository: docker.io/redis
|
||||||
tag: 5.0.7-alpine
|
tag: 6.0.5-alpine
|
||||||
storage:
|
|
||||||
spec:
|
|
||||||
storageClassName: standard
|
|
||||||
accessModes:
|
|
||||||
- ReadWriteOnce
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
storage: 1Gi
|
|
||||||
|
|
||||||
redisearch:
|
|
||||||
image:
|
|
||||||
repository: docker.io/redislabs/redisearch
|
|
||||||
tag: 1.6.7
|
|
||||||
storage:
|
storage:
|
||||||
spec:
|
spec:
|
||||||
storageClassName: standard
|
storageClassName: standard
|
||||||
|
@ -72,7 +72,7 @@ dragonchain:
|
|||||||
# -- dragonchain.image.version --
|
# -- dragonchain.image.version --
|
||||||
# The tag to use with the dragonchain image
|
# The tag to use with the dragonchain image
|
||||||
# Check here for available values: https://hub.docker.com/r/dragonchain/dragonchain_core/tags
|
# Check here for available values: https://hub.docker.com/r/dragonchain/dragonchain_core/tags
|
||||||
version: 4.4.0
|
version: 4.5.1
|
||||||
|
|
||||||
# -- dragonchain.storage --
|
# -- dragonchain.storage --
|
||||||
# The mount path of a persistent volume to be used by all dragonchain microservices.
|
# The mount path of a persistent volume to be used by all dragonchain microservices.
|
||||||
|
@ -1,20 +1,20 @@
|
|||||||
boto3==1.11.9
|
boto3==1.14.24
|
||||||
redis==3.4.1
|
redis==3.5.3
|
||||||
apscheduler==3.6.3
|
apscheduler==3.6.3
|
||||||
jsonpath==0.82
|
jsonpath==0.82
|
||||||
Flask==1.1.1
|
Flask==1.1.2
|
||||||
requests==2.22.0
|
requests==2.24.0
|
||||||
fastjsonschema==2.14.2
|
fastjsonschema==2.14.4
|
||||||
secp256k1==0.13.2
|
secp256k1==0.13.2
|
||||||
web3==5.5.0
|
web3==5.12.0
|
||||||
kubernetes==10.0.1
|
kubernetes==11.0.0
|
||||||
docker==4.1.0
|
docker==4.2.2
|
||||||
gunicorn[gevent]==20.0.4
|
gunicorn[gevent]==20.0.4
|
||||||
aiohttp[speedups]==3.6.2
|
aiohttp[speedups]==3.6.2
|
||||||
aioredis==1.3.1
|
aioredis==1.3.1
|
||||||
base58==2.0.0
|
base58==2.0.1
|
||||||
bit==0.6.0
|
bit==0.6.2
|
||||||
redisearch==0.8.3
|
redisearch==1.0.0
|
||||||
bnb-tx==0.0.4
|
bnb-tx==0.0.4
|
||||||
pycoin==0.90.20190728
|
pycoin==0.90.20200322
|
||||||
mnemonic==0.19
|
mnemonic==0.19
|
||||||
|
13
tools.sh
13
tools.sh
@ -59,8 +59,17 @@ elif [ "$1" = "bandit" ]; then
|
|||||||
elif [ "$1" = "docs" ]; then
|
elif [ "$1" = "docs" ]; then
|
||||||
rm -rf docs/static/chart && mkdir -p docs/static/chart
|
rm -rf docs/static/chart && mkdir -p docs/static/chart
|
||||||
cp -v helm/opensource-config.yaml docs/static/chart/
|
cp -v helm/opensource-config.yaml docs/static/chart/
|
||||||
CHART_VERSION="$(yq r helm/dragonchain-k8s/Chart.yaml version)"
|
######################################
|
||||||
sed -i "s/--version [0-9]\\{1,\\}\\.[0-9]\\{1,\\}\\.[0-9]\\{1,\\}/--version $CHART_VERSION/" docs/deployment/deploying.md
|
# yq version 3 syntax
|
||||||
|
######################################
|
||||||
|
#CHART_VERSION="$(yq r helm/dragonchain-k8s/Chart.yaml version)"
|
||||||
|
######################################
|
||||||
|
# yq version 4 syntax
|
||||||
|
######################################
|
||||||
|
#NOT WORKING CHART_VERSION="$(yq '.version' helm/dragonchain-k8s/Chart.yaml | head -n 1)"
|
||||||
|
# use hard coded version
|
||||||
|
#sed -i '' "s/--version [0-9]\\{1,\\}\\.[0-9]\\{1,\\}\\.[0-9]\\{1,\\}/--version $CHART_VERSION/" docs/deployment/deploying.md
|
||||||
|
sed -i '' "s/--version [0-9]\\{1,\\}\\.[0-9]\\{1,\\}\\.[0-9]\\{1,\\}/--version 1.0.9/" docs/deployment/deploying.md
|
||||||
(
|
(
|
||||||
cd docs || exit 1
|
cd docs || exit 1
|
||||||
make html
|
make html
|
||||||
|
Loading…
x
Reference in New Issue
Block a user