Skip to content

Commit

Permalink
Merge pull request #1681 from CounterpartyXCP/develop
Browse files Browse the repository at this point in the history
v10.1.0
  • Loading branch information
adamkrellenstein authored Apr 16, 2024
2 parents ae2ba6c + 1cfe415 commit a02a28e
Show file tree
Hide file tree
Showing 25 changed files with 445 additions and 361 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test_compose.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ docker rmi counterparty/counterparty:$VERSION || true
docker build -t counterparty/counterparty:$VERSION .

# remove the counterparty-core data
sudo rm -rf ~/.local/share/counterparty-docker-data/counterparty/*
# sudo rm -rf ~/.local/share/counterparty-docker-data/counterparty/*

# re-start containers
BITCOIN_CHAIN=test docker compose up -d
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test_compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Docker Compose

on:
push:
branches: ['develop', 'master', 'readme']
branches: ['develop', 'master']
#branches: ['compose']

jobs:
Expand Down
10 changes: 10 additions & 0 deletions counterparty-core/counterpartycore/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,14 @@
"help": "how long to keep a lock on a UTXO being tracked",
},
],
[
("--no-mempool",),
{"action": "store_true", "default": False, "help": "Disable mempool parsing"},
],
[
("--skip-db-check",),
{"action": "store_true", "default": False, "help": "Skip integrity check on the database"},
],
]


Expand Down Expand Up @@ -388,6 +396,8 @@ def main():
p2sh_dust_return_pubkey=args.p2sh_dust_return_pubkey,
utxo_locks_max_addresses=args.utxo_locks_max_addresses,
utxo_locks_max_age=args.utxo_locks_max_age,
no_mempool=args.no_mempool,
skip_db_check=args.skip_db_check,
)

server.initialise_log_config(
Expand Down
2 changes: 1 addition & 1 deletion counterparty-core/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@ colorlog==6.8.0
python-dateutil==2.8.2
requests==2.31.0
termcolor==2.4.0
counterparty-lib==10.1.0-rc.1
counterparty-lib==10.1.0
109 changes: 77 additions & 32 deletions counterparty-lib/counterpartylib/lib/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,23 @@

logger = logging.getLogger(config.LOGGER_NAME)

if os.environ.get("SENTRY_DSN"):
import sentry_sdk

environment = os.environ.get("SENTRY_ENVIRONMENT", "development")

release = os.environ.get("SENTRY_RELEASE", config.__version__)

logger.info("Sentry DSN found, initializing Sentry")

sentry_sdk.init(
dsn=os.environ["SENTRY_DSN"],
environment=environment,
release=release,
traces_sample_rate=1.0,
)


API_TABLES = [
"assets",
"balances",
Expand Down Expand Up @@ -227,7 +244,7 @@ def check_database_state(db, blockcount):
f"""Checks {config.XCP_NAME} database to see if is caught up with backend.""" # noqa: B021
if ledger.CURRENT_BLOCK_INDEX + 1 < blockcount:
raise DatabaseError(f"{config.XCP_NAME} database is behind backend.")
logger.debug("Database state check passed.")
# logger.debug("Database state check passed.")
return


Expand Down Expand Up @@ -327,7 +344,10 @@ def value_to_marker(value):
raise APIError(f"A specified filter is missing the '{field}' field")
if not isinstance(filter_["value"], (str, int, float, list)):
raise APIError(f"Invalid value for the field '{filter_['field']}'")
if isinstance(filter_["value"], list) and filter_["op"].upper() not in ["IN", "NOT IN"]:
if isinstance(filter_["value"], list) and filter_["op"].upper() not in [
"IN",
"NOT IN",
]:
raise APIError(f"Invalid value for the field '{filter_['field']}'")
if filter_["op"].upper() not in [
"=",
Expand Down Expand Up @@ -1015,7 +1035,7 @@ def get_running_info():
"last_block": last_block,
"indexd_caught_up": indexd_caught_up,
"indexd_blocks_behind": indexd_blocks_behind,
"last_message_index": last_message["message_index"] if last_message else -1,
"last_message_index": (last_message["message_index"] if last_message else -1),
"api_limit_rows": config.API_LIMIT_ROWS,
"running_testnet": config.TESTNET,
"running_regtest": config.REGTEST,
Expand Down Expand Up @@ -1131,9 +1151,17 @@ def getrawtransaction_batch(txhash_list, verbose=False, skip_missing=False):
def get_tx_info(tx_hex, block_index=None):
# block_index mandatory for transactions before block 335000
source, destination, btc_amount, fee, data, extra = gettxinfo.get_tx_info(
self.db, BlockchainParser().deserialize_tx(tx_hex), block_index=block_index
self.db,
BlockchainParser().deserialize_tx(tx_hex),
block_index=block_index,
)
return (
source,
destination,
btc_amount,
fee,
util.hexlify(data) if data else "",
)
return source, destination, btc_amount, fee, util.hexlify(data) if data else ""

@dispatcher.add_method
def unpack(data_hex):
Expand All @@ -1157,16 +1185,14 @@ def search_pubkey(pubkeyhash, provided_pubkeys=None):

@dispatcher.add_method
def get_dispenser_info(tx_hash=None, tx_index=None):
cursor = self.db.cursor() # noqa: F841

if tx_hash is None and tx_index is None:
raise APIError("You must provided a tx hash or a tx index")

dispensers = []
if tx_hash is not None:
dispensers = get_dispenser_info(self.db, tx_hash=tx_hash)
dispensers = ledger.get_dispenser_info(self.db, tx_hash=tx_hash)
else:
dispensers = get_dispenser_info(self.db, tx_index=tx_index)
dispensers = ledger.get_dispenser_info(self.db, tx_index=tx_index)

if len(dispensers) == 1:
dispenser = dispensers[0]
Expand All @@ -1178,10 +1204,13 @@ def get_dispenser_info(tx_hash=None, tx_index=None):

if dispenser["oracle_address"] != None: # noqa: E711
fiat_price = util.satoshirate_to_fiat(dispenser["satoshirate"])
oracle_price, oracle_fee, oracle_fiat_label, oracle_price_last_updated = (
ledger.get_oracle_last_price(
self.db, dispenser["oracle_address"], ledger.CURRENT_BLOCK_INDEX
)
(
oracle_price,
oracle_fee,
oracle_fiat_label,
oracle_price_last_updated,
) = ledger.get_oracle_last_price(
self.db, dispenser["oracle_address"], ledger.CURRENT_BLOCK_INDEX
)

if oracle_price > 0:
Expand Down Expand Up @@ -1251,27 +1280,40 @@ def handle_order_matches(tx_hash):
def handle_healthz():
msg, code = "Healthy", 200

type_ = request.args.get("type", "heavy")
type_ = request.args.get("type", "light")

def light_check():
latest_block_index = backend.getblockcount()
check_database_state(self.db, latest_block_index)

def heavy_check():
compose_transaction(
self.db,
name="send",
params={
"source": config.UNSPENDABLE,
"destination": config.UNSPENDABLE,
"asset": config.XCP,
"quantity": 100000000,
},
allow_unconfirmed_inputs=True,
fee=1000,
)

try:
if type_ == "light":
logger.debug("Performing light healthz check.")
latest_block_index = backend.getblockcount()
check_database_state(self.db, latest_block_index)
else:
if type_ == "heavy":
# Perform a heavy healthz check.
# Do everything in light but also compose a
# send tx

logger.debug("Performing heavy healthz check.")
compose_transaction(
self.db,
name="send",
params={
"source": config.UNSPENDABLE,
"destination": config.UNSPENDABLE,
"asset": config.XCP,
"quantity": 100000000,
},
allow_unconfirmed_inputs=True,
fee=1000,
)

light_check()
heavy_check()
else:
logger.debug("Performing light healthz check.")
light_check()

except Exception:
msg, code = "Unhealthy", 503

Expand Down Expand Up @@ -1446,7 +1488,10 @@ def handle_rest(path_args, flask_request):
# Run the query.
try:
query_data = get_rows(
self.db, table=query_type, filters=data_filter, filterop=operator
self.db,
table=query_type,
filters=data_filter,
filterop=operator,
)
except APIError as error: # noqa: F841
return flask.Response("API Error", 400, mimetype="application/json")
Expand Down
Loading

0 comments on commit a02a28e

Please sign in to comment.