From 9a5dad8e24e9e88b5abf74fecaca8ff25abd81a2 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 9 Jun 2023 14:19:01 +0800 Subject: [PATCH 1/7] Deploy to testnet (#1291) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Miles Zhang Co-authored-by: Chen Yu Co-authored-by: iamyates Co-authored-by: ShiningRay Co-authored-by: 代码之力 <9770+ShiningRay@users.noreply.github.com> Co-authored-by: NanZhang fix memory leak in udt transaction count (#1302) fix: update cota sync data (#1303) fix: rectify the cout error in the progress bar (#1304) --- .env.example | 4 +- .../workflows/request-to-deploy-mainnet.yml | 2 +- .../workflows/request-to-deploy-testnet.yml | 2 +- Gemfile | 2 + Gemfile.lock | 45 + Procfile | 1 + .../api/v1/address_transactions_controller.rb | 66 +- .../api/v1/block_transactions_controller.rb | 14 +- app/controllers/api/v1/blocks_controller.rb | 35 +- .../api/v1/ckb_transactions_controller.rb | 25 +- .../v1/contract_transactions_controller.rb | 7 +- .../api/v1/udt_transactions_controller.rb | 9 +- app/controllers/api/v1/udts_controller.rb | 62 +- .../api/v2/ckb_transactions_controller.rb | 104 +++ .../api/v2/nft/collections_controller.rb | 25 +- .../api/v2/nft/transfers_controller.rb | 43 + .../api/v2/pending_transactions_controller.rb | 43 +- .../api/v2/transactions_controller.rb | 33 +- app/jobs/commit_block_job.rb | 30 + app/jobs/export_address_transactions_job.rb | 71 ++ ...ocess_block_job.rb => import_block_job.rb} | 5 +- ...ction_job.rb => import_transaction_job.rb} | 47 +- app/jobs/revert_block_job.rb | 25 +- app/models/address.rb | 2 +- app/models/address_block_snapshot.rb | 22 + app/models/block.rb | 6 +- app/models/cell_datum.rb | 1 + app/models/cell_input.rb | 36 +- app/models/cell_output.rb | 55 +- .../ckb_sync/new_node_data_processor.rb | 36 +- app/models/ckb_sync/node_data_processor.rb | 805 ------------------ app/models/ckb_transaction.rb | 76 +- app/models/lock_script.rb | 9 + app/models/pool_transaction_entry.rb | 94 -- app/models/reject_reason.rb | 17 + app/models/statistic_info.rb | 13 +- app/models/suggest_query.rb | 7 +- app/models/type_script.rb | 9 + app/models/udt.rb | 54 +- app/serializers/ckb_transaction_serializer.rb | 21 +- app/utils/ckb_utils.rb | 11 +- ...ddress_unclaimed_compensation_generator.rb | 7 +- .../flush_inputs_outputs_cache_worker.rb | 2 +- app/workers/pool_transaction_check_worker.rb | 21 +- ...transaction_update_reject_reason_worker.rb | 8 +- ...4_ckb_transactions_count_on_udts_worker.rb | 10 + config/environments/test.rb | 2 +- config/routes.rb | 24 +- config/routes/v2.rb | 12 +- config/schedule.yml | 4 + ..._add_h24_ckb_transactions_count_to_udts.rb | 17 + ...61651_change_cell_input_previous_output.rb | 16 + .../20230526070328_create_reject_reasons.rb | 9 + .../20230526085258_address_block_snapshot.rb | 12 + .../20230526135653_migrate_reject_messages.rb | 19 + db/structure.sql | 142 ++- lib/scheduler.rb | 9 + lib/tasks/migration/fill_nrc_721_token.rake | 53 +- lib/tasks/migration/register_udt.rake | 215 ++--- lib/tasks/migration/update_cell_type.rake | 34 + .../update_token_transfer_action.rake | 33 + lib/websocket.rb | 67 ++ .../address_transactions_controller_test.rb | 203 ++++- .../v1/block_transactions_controller_test.rb | 11 +- .../api/v1/blocks_controller_test.rb | 17 + .../v1/ckb_transactions_controller_test.rb | 52 +- .../api/v1/udts_controller_test.rb | 11 +- .../api/v2/nft/transfers_controller_test.rb | 12 + .../pending_transactions_controller_test.rb | 16 +- .../api/v2/statistics_controller_test.rb | 12 +- .../api/v2/transactions_controller_test.rb | 29 + test/factories/address_block_snapshot.rb | 15 + test/factories/block.rb | 3 +- test/factories/cell_input.rb | 16 +- test/factories/cell_output.rb | 2 + test/factories/ckb_transaction.rb | 36 +- test/factories/lock_script.rb | 2 +- test/factories/pool_transaction_entry.rb | 5 - test/factories/reject_reasons.rb | 6 + test/factories/token_collection.rb | 1 - test/factories/token_item.rb | 1 - test/factories/token_transfer.rb | 2 - test/factories/udt_transaction.rb | 6 + test/jobs/import_transaction_job_test.rb | 138 +++ test/jobs/revert_block_job_test.rb | 34 + test/models/ckb_sync/dao_events_test.rb | 315 ++++--- .../ckb_sync/node_data_processor_test.rb | 457 +++++++--- test/models/ckb_transaction_test.rb | 188 ++-- test/models/pending_transaction_test.rb | 30 + test/models/pool_transaction_entry_test.rb | 37 - .../reject_reason_test.rb} | 2 +- test/models/suggest_query_test.rb | 12 +- .../tasks/migrations/update_cell_type_test.rb | 60 ++ .../migrations/update_token_transfer_test.rb | 38 + .../pool_transaction_check_worker_test.rb | 57 +- ...action_update_reject_reason_worker_test.rb | 8 +- ..._transactions_count_on_udts_worker_test.rb | 33 + 97 files changed, 2880 insertions(+), 1677 deletions(-) create mode 100644 app/controllers/api/v2/ckb_transactions_controller.rb create mode 100644 app/jobs/commit_block_job.rb create mode 100644 app/jobs/export_address_transactions_job.rb rename app/jobs/{process_block_job.rb => import_block_job.rb} (75%) rename app/jobs/{process_transaction_job.rb => import_transaction_job.rb} (87%) create mode 100644 app/models/address_block_snapshot.rb delete mode 100644 app/models/ckb_sync/node_data_processor.rb delete mode 100644 app/models/pool_transaction_entry.rb create mode 100644 app/models/reject_reason.rb create mode 100644 app/workers/update_h24_ckb_transactions_count_on_udts_worker.rb create mode 100644 db/migrate/20230504023535_add_h24_ckb_transactions_count_to_udts.rb create mode 100644 db/migrate/20230518061651_change_cell_input_previous_output.rb create mode 100644 db/migrate/20230526070328_create_reject_reasons.rb create mode 100644 db/migrate/20230526085258_address_block_snapshot.rb create mode 100644 db/migrate/20230526135653_migrate_reject_messages.rb create mode 100644 lib/tasks/migration/update_cell_type.rake create mode 100644 lib/tasks/migration/update_token_transfer_action.rake create mode 100644 lib/websocket.rb create mode 100644 test/controllers/api/v2/transactions_controller_test.rb create mode 100644 test/factories/address_block_snapshot.rb delete mode 100644 test/factories/pool_transaction_entry.rb create mode 100644 test/factories/reject_reasons.rb create mode 100644 test/factories/udt_transaction.rb create mode 100644 test/jobs/import_transaction_job_test.rb create mode 100644 test/jobs/revert_block_job_test.rb create mode 100644 test/models/pending_transaction_test.rb delete mode 100644 test/models/pool_transaction_entry_test.rb rename test/{jobs/process_transaction_job_test.rb => models/reject_reason_test.rb} (58%) create mode 100644 test/tasks/migrations/update_cell_type_test.rb create mode 100644 test/tasks/migrations/update_token_transfer_test.rb create mode 100644 test/workers/update_h24_ckb_transactions_count_on_udts_worker_test.rb diff --git a/.env.example b/.env.example index 7fea6cf09..72ae7259f 100644 --- a/.env.example +++ b/.env.example @@ -5,7 +5,7 @@ CKB_NET_MODE="mainnet" # ckb node url CKB_NODE_URL="http://localhost:8114" - +CKB_WS_URL="http://localhost:28114" # -------------------------------- Rails(database, redis, memcached) segment ---- # (optional if you use config/database.yml) @@ -112,4 +112,4 @@ ASSET_URL="" # (optional) # used in Rails test environment, setting to true enables SimpleCov::Formatter::Codecov # true | false -CI="false" \ No newline at end of file +CI="false" diff --git a/.github/workflows/request-to-deploy-mainnet.yml b/.github/workflows/request-to-deploy-mainnet.yml index 9e931908d..ebf3ca495 100644 --- a/.github/workflows/request-to-deploy-mainnet.yml +++ b/.github/workflows/request-to-deploy-mainnet.yml @@ -16,6 +16,6 @@ jobs: source_branch: 'testnet' destination_branch: 'master' pr_title: 'Deploy to mainnet' - pr_reviewer: 'ShiningRay,iamyates,zmcNotafraid,keith-cy' + pr_reviewer: 'ShiningRay,rabbitz,zmcNotafraid,keith-cy' pr_label: 'auto-pr' github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/request-to-deploy-testnet.yml b/.github/workflows/request-to-deploy-testnet.yml index 16e254aef..87a3bae47 100644 --- a/.github/workflows/request-to-deploy-testnet.yml +++ b/.github/workflows/request-to-deploy-testnet.yml @@ -16,6 +16,6 @@ jobs: source_branch: 'develop' destination_branch: 'testnet' pr_title: 'Deploy to testnet' - pr_reviewer: 'ShiningRay,iamyates,zmcNotafraid,keith-cy' + pr_reviewer: 'ShiningRay,rabbitz,zmcNotafraid,keith-cy' pr_label: 'auto-pr' github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/Gemfile b/Gemfile index 9a9c7122c..06d49b66f 100644 --- a/Gemfile +++ b/Gemfile @@ -129,3 +129,5 @@ gem "rack-cache" gem "dalli" gem "after_commit_everywhere" gem "kredis" + +gem "async-websocket", "~> 0.22.1", require: false diff --git a/Gemfile.lock b/Gemfile.lock index 50216d439..b1edaf885 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -97,6 +97,27 @@ GEM rake (>= 10.4, < 14.0) ansi (1.5.0) ast (2.4.2) + async (2.3.1) + console (~> 1.10) + io-event (~> 1.1) + timers (~> 4.1) + async-http (0.59.5) + async (>= 1.25) + async-io (>= 1.28) + async-pool (>= 0.2) + protocol-http (~> 0.23) + protocol-http1 (~> 0.14.0) + protocol-http2 (~> 0.14.0) + traces (>= 0.8.0) + async-io (1.34.1) + async + async-pool (0.3.12) + async (>= 1.25) + async-websocket (0.22.1) + async-http (~> 0.54) + async-io (~> 1.23) + protocol-rack (~> 0.1) + protocol-websocket (~> 0.9.1) awesome_print (1.9.2) backport (1.2.0) benchmark (0.2.1) @@ -119,6 +140,8 @@ GEM deep_merge (~> 1.2, >= 1.2.1) dry-validation (~> 1.0, >= 1.0.0) connection_pool (2.4.0) + console (1.16.2) + fiber-local crack (0.4.5) rexml crass (1.0.6) @@ -192,6 +215,7 @@ GEM ffi-compiler (1.0.1) ffi (>= 1.0.0) rake + fiber-local (1.0.0) fugit (1.8.1) et-orbi (~> 1, >= 1.2.7) raabro (~> 1.4) @@ -211,6 +235,7 @@ GEM http-form_data (2.3.0) i18n (1.12.0) concurrent-ruby (~> 1.0) + io-event (1.1.6) jaro_winkler (1.5.4) jbuilder (2.11.5) actionview (>= 5.0.0) @@ -271,6 +296,9 @@ GEM net-protocol newrelic_rpm (8.12.0) nio4r (2.5.8) + nokogiri (1.14.3) + mini_portile2 (~> 2.8.0) + racc (~> 1.4) nokogiri (1.14.3-arm64-darwin) racc (~> 1.4) nokogiri (1.14.3-x86_64-linux) @@ -283,6 +311,19 @@ GEM ast (~> 2.4.1) pg (1.4.5) pkg-config (1.5.1) + protocol-hpack (1.4.2) + protocol-http (0.24.0) + protocol-http1 (0.14.6) + protocol-http (~> 0.22) + protocol-http2 (0.14.2) + protocol-hpack (~> 1.4) + protocol-http (~> 0.18) + protocol-rack (0.2.4) + protocol-http (~> 0.23) + rack (>= 1.0) + protocol-websocket (0.9.1) + protocol-http (~> 0.2) + protocol-http1 (~> 0.2) pry (0.14.1) coderay (~> 1.1) method_source (~> 1.0) @@ -432,6 +473,8 @@ GEM thor (1.2.1) tilt (2.1.0) timeout (0.3.0) + timers (4.3.5) + traces (0.8.0) tzinfo (2.0.5) concurrent-ruby (~> 1.0) unf (0.1.4) @@ -453,12 +496,14 @@ GEM PLATFORMS arm64-darwin-21 + ruby x86_64-linux DEPENDENCIES activerecord-import after_commit_everywhere annotate + async-websocket (~> 0.22.1) awesome_print benchmark_methods bigdecimal diff --git a/Procfile b/Procfile index 5cf89a37a..26e8598b4 100644 --- a/Procfile +++ b/Procfile @@ -2,3 +2,4 @@ web: bundle exec puma -C config/puma.rb worker: bundle exec sidekiq -C config/sidekiq.yml -e production blocksyncer: bundle exec ruby lib/ckb_block_node_processor.rb scheduler: bundle exec ruby lib/scheduler.rb +poolsyncer: bundle exec ruby lib/websocket.rb diff --git a/app/controllers/api/v1/address_transactions_controller.rb b/app/controllers/api/v1/address_transactions_controller.rb index 60130650b..09fa00cb2 100644 --- a/app/controllers/api/v1/address_transactions_controller.rb +++ b/app/controllers/api/v1/address_transactions_controller.rb @@ -1,24 +1,68 @@ +require "csv" module Api module V1 class AddressTransactionsController < ApplicationController before_action :validate_query_params before_action :validate_pagination_params, :pagination_params + before_action :set_address_transactions, only: [:show, :download_csv] def show - @address = Address.find_address!(params[:id]) - raise Api::V1::Exceptions::AddressNotFoundError if @address.is_a?(NullAddress) + @tx_ids = AccountBook. + joins(:ckb_transaction). + where(address_id: @address.id) + + params[:sort] ||= "ckb_transaction_id.desc" + order_by, asc_or_desc = params[:sort].split(".", 2) + order_by = + case order_by + when "time" then "ckb_transactions.block_timestamp" + else order_by + end + + head :not_found and return unless order_by.in? %w[ + ckb_transaction_id block_timestamp + ckb_transactions.block_timestamp + ] + + @tx_ids = @tx_ids. + order(order_by => asc_or_desc). + select("ckb_transaction_id"). + page(@page).per(@page_size).fast_page + + order_by = "id" if order_by == "ckb_transaction_id" + @ckb_transactions = CkbTransaction.tx_committed.where(id: @tx_ids.map(&:ckb_transaction_id)). + select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at, :capacity_involved). + order(order_by => asc_or_desc) - @tx_ids = AccountBook.where(address_id: @address.id).order("ckb_transaction_id" => :desc).select("ckb_transaction_id").page(@page).per(@page_size).fast_page - @ckb_transactions = CkbTransaction.tx_committed.where(id: @tx_ids.map(&:ckb_transaction_id)).select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at).order(id: :desc) json = - Rails.cache.realize("#{@ckb_transactions.cache_key}/#{@address.query_address}", version: @ckb_transactions.cache_version) do - @options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: @ckb_transactions, page: @page, page_size: @page_size, records_counter: @tx_ids).call + Rails.cache.realize("#{@ckb_transactions.cache_key}/#{@address.query_address}", + version: @ckb_transactions.cache_version) do + @options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: @ckb_transactions, + page: @page, page_size: @page_size, records_counter: @tx_ids).call json_result end render json: json end + def download_csv + args = params.permit(:id, :start_date, :end_date, :start_number, :end_number, address_transaction: {}). + merge(address_id: @address.id) + data = ExportAddressTransactionsJob.perform_now(args.to_h) + + file = + CSV.generate do |csv| + csv << [ + "TXn hash", "Blockno", "UnixTimestamp", "Method", "CKB In", "CKB OUT", "TxnFee(CKB)", + "date(UTC)" + ] + data.each { |row| csv << row } + end + + send_data file, type: "text/csv; charset=utf-8; header=present", + disposition: "attachment;filename=ckb_transactions.csv" + end + private def validate_query_params @@ -38,7 +82,10 @@ def pagination_params end def json_result - ckb_transaction_serializer = CkbTransactionsSerializer.new(@ckb_transactions, @options.merge(params: { previews: true, address: @address })) + ckb_transaction_serializer = CkbTransactionsSerializer.new(@ckb_transactions, + @options.merge(params: { + previews: true, + address: @address })) if QueryKeyUtils.valid_address?(params[:id]) if @address.address_hash == @address.query_address @@ -50,6 +97,11 @@ def json_result ckb_transaction_serializer.serialized_json end end + + def set_address_transactions + @address = Address.find_address!(params[:id]) + raise Api::V1::Exceptions::AddressNotFoundError if @address.is_a?(NullAddress) + end end end end diff --git a/app/controllers/api/v1/block_transactions_controller.rb b/app/controllers/api/v1/block_transactions_controller.rb index b637a693c..6c1162a9b 100644 --- a/app/controllers/api/v1/block_transactions_controller.rb +++ b/app/controllers/api/v1/block_transactions_controller.rb @@ -6,13 +6,17 @@ class BlockTransactionsController < ApplicationController include Pagy::Backend def show block = Block.find_by!(block_hash: params[:id]) + temp_transactions = block.ckb_transactions + .select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at) + .where(block_timestamp: block.timestamp) + temp_transactions = temp_transactions.where(tx_hash: params[:tx_hash]) if params[:tx_hash].present? + temp_transactions = temp_transactions.order(id: :desc) + @pagy, ckb_transactions = pagy( - block.ckb_transactions - .select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at) - .where(block_timestamp: block.timestamp) - .order(:id), + temp_transactions, items: params[:page_size] || 10, - overflow: :empty_page) + overflow: :empty_page + ) json = Rails.cache.realize(ckb_transactions.cache_key, version: ckb_transactions.cache_version) do diff --git a/app/controllers/api/v1/blocks_controller.rb b/app/controllers/api/v1/blocks_controller.rb index 3583e7476..0a42d0c4d 100644 --- a/app/controllers/api/v1/blocks_controller.rb +++ b/app/controllers/api/v1/blocks_controller.rb @@ -1,3 +1,4 @@ +require 'csv' module Api module V1 class BlocksController < ApplicationController @@ -12,7 +13,19 @@ def index BlockListSerializer.new(blocks).serialized_json end else - blocks = Block.recent.select(:id, :miner_hash, :number, :timestamp, :reward, :ckb_transactions_count, :live_cell_changes, :updated_at).page(@page).per(@page_size).fast_page + blocks = Block.select(:id, :miner_hash, :number, :timestamp, :reward, :ckb_transactions_count, :live_cell_changes, :updated_at) + params[:sort] ||= "timestamp.desc" + + order_by, asc_or_desc = params[:sort].split('.', 2) + order_by = case order_by + when 'height' then 'number' + when 'transactions' then 'ckb_transactions_count' + else order_by + end + + head :not_found and return unless order_by.in? %w[number reward timestamp ckb_transactions_count] + blocks = blocks.order(order_by => asc_or_desc).page(@page).per(@page_size).fast_page + json = Rails.cache.realize(blocks.cache_key, version: blocks.cache_version, race_condition_ttl: 3.seconds) do records_counter = RecordCounters::Blocks.new @@ -30,6 +43,26 @@ def show render json: json_block end + def download_csv + blocks = Block.select(:id, :miner_hash, :number, :timestamp, :reward, :ckb_transactions_count, :live_cell_changes, :updated_at) + + blocks = blocks.where('timestamp >= ?', DateTime.strptime(params[:start_date], '%Y-%m-%d').to_time.to_i * 1000 ) if params[:start_date].present? + blocks = blocks.where('timestamp <= ?', DateTime.strptime(params[:end_date], '%Y-%m-%d').to_time.to_i * 1000 ) if params[:end_date].present? + blocks = blocks.where('number >= ?', params[:start_number]) if params[:start_number].present? + blocks = blocks.where('number <= ?', params[:end_number]) if params[:end_number].present? + + blocks = blocks.order('number desc').limit(5000) + + file = CSV.generate do |csv| + csv << ["Blockno", "Transactions", "UnixTimestamp", "Reward(CKB)", "Miner", "date(UTC)"] + blocks.find_each.with_index do |block, index| + row = [block.number, block.ckb_transactions_count, (block.timestamp / 1000), block.reward, block.miner_hash, + Time.at((block.timestamp / 1000).to_i).in_time_zone('UTC').strftime('%Y-%m-%d %H:%M:%S') ] + csv << row + end + end + send_data file, :type => 'text/csv; charset=utf-8; header=present', :disposition => "attachment;filename=blocks.csv" + end private def from_home_page? diff --git a/app/controllers/api/v1/ckb_transactions_controller.rb b/app/controllers/api/v1/ckb_transactions_controller.rb index da2f07ce3..55ec69cc9 100644 --- a/app/controllers/api/v1/ckb_transactions_controller.rb +++ b/app/controllers/api/v1/ckb_transactions_controller.rb @@ -17,9 +17,24 @@ def index end render json: json else - ckb_transactions = CkbTransaction.tx_committed.recent.normal.select( + ckb_transactions = CkbTransaction.normal.select( :id, :tx_hash, :block_number, :block_timestamp, :live_cell_changes, :capacity_involved, :updated_at - ).page(@page).per(@page_size).fast_page + ) + + params[:sort] ||= "id.desc" + + order_by, asc_or_desc = params[:sort].split('.', 2) + order_by = case order_by + when 'height' then 'block_number' + when 'capacity' then 'capacity_involved' + else order_by + end + + head :not_found and return unless order_by.in? %w[id block_number block_timestamp transaction_fee capacity_involved] + + ckb_transactions = ckb_transactions.order(order_by => asc_or_desc) + .page(@page).per(@page_size).fast_page + json = Rails.cache.realize(ckb_transactions.cache_key, version: ckb_transactions.cache_version, race_condition_ttl: 3.seconds) do @@ -56,10 +71,10 @@ def query ).select( "ckb_transaction_id" ).page(@page).per(@page_size).fast_page - CkbTransaction.tx_committed.where(id: @tx_ids.map(&:ckb_transaction_id)).order(id: :desc) + CkbTransaction.where(id: @tx_ids.map(&:ckb_transaction_id)).order(id: :desc) else records_counter = RecordCounters::Transactions.new - CkbTransaction.tx_committed.recent.normal.page(@page).per(@page_size).fast_page + CkbTransaction.recent.normal.page(@page).per(@page_size).fast_page end ckb_transactions = ckb_transactions.select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at) @@ -81,7 +96,7 @@ def query end def show - ckb_transaction = CkbTransaction.tx_committed.cached_find(params[:id]) || PoolTransactionEntry.find_by(tx_hash: params[:id]) + ckb_transaction = CkbTransaction.cached_find(params[:id]) raise Api::V1::Exceptions::CkbTransactionNotFoundError if ckb_transaction.blank? diff --git a/app/controllers/api/v1/contract_transactions_controller.rb b/app/controllers/api/v1/contract_transactions_controller.rb index 7ab69e7e3..bfb079dec 100644 --- a/app/controllers/api/v1/contract_transactions_controller.rb +++ b/app/controllers/api/v1/contract_transactions_controller.rb @@ -7,7 +7,12 @@ def show raise Api::V1::Exceptions::ContractNotFoundError if params[:id] != DaoContract::CONTRACT_NAME dao_contract = DaoContract.default_contract - ckb_transactions = dao_contract.ckb_transactions.includes(:cell_inputs, :cell_outputs).tx_committed.select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at).recent.page(@page).per(@page_size).fast_page + ckb_transactions = dao_contract.ckb_transactions.includes(:cell_inputs, :cell_outputs).tx_committed + .select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at).recent + + ckb_transactions = ckb_transactions.where(tx_hash: params[:tx_hash]) if params[:tx_hash].present? + ckb_transactions = ckb_transactions + .page(@page).per(@page_size).fast_page json = Rails.cache.realize(ckb_transactions.cache_key, version: ckb_transactions.cache_version) do records_counter = RecordCounters::DaoTransactions.new(dao_contract) diff --git a/app/controllers/api/v1/udt_transactions_controller.rb b/app/controllers/api/v1/udt_transactions_controller.rb index 480ce56f5..9fef60888 100644 --- a/app/controllers/api/v1/udt_transactions_controller.rb +++ b/app/controllers/api/v1/udt_transactions_controller.rb @@ -6,7 +6,14 @@ class UdtTransactionsController < ApplicationController def show udt = Udt.find_by!(type_hash: params[:id], published: true) - ckb_transactions = udt.ckb_transactions.tx_committed.select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at).recent.page(@page).per(@page_size).fast_page + ckb_transactions = udt.ckb_transactions.tx_committed + .select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at).recent + + # TODO minted? burn? transfer? + ckb_transactions = ckb_transactions.where(tx_hash: params[:tx_hash]) if params[:tx_hash].present? + ckb_transactions = ckb_transactions + .page(@page).per(@page_size).fast_page + json = Rails.cache.realize("#{udt.symbol}/#{ckb_transactions.cache_key}", version: ckb_transactions.cache_version) do records_counter = RecordCounters::UdtTransactions.new(udt) diff --git a/app/controllers/api/v1/udts_controller.rb b/app/controllers/api/v1/udts_controller.rb index f086d752b..873c86322 100644 --- a/app/controllers/api/v1/udts_controller.rb +++ b/app/controllers/api/v1/udts_controller.rb @@ -1,9 +1,27 @@ +require 'csv' class Api::V1::UdtsController < ApplicationController before_action :validate_query_params, only: :show before_action :validate_pagination_params, :pagination_params, only: :index def index - udts = Udt.sudt.order(addresses_count: :desc, id: :asc).page(@page).per(@page_size).fast_page + udts = Udt.sudt + + params[:sort] ||= "id.desc" + + order_by, asc_or_desc = params[:sort].split('.', 2) + order_by = case order_by + when 'created_time' then 'block_timestamp' + # current we don't support this in DB + # need a new PR https://github.com/nervosnetwork/ckb-explorer/pull/1266/ + # when 'transactions' then 'h24_ckb_transactions_count' + else order_by + end + + head :not_found and return unless order_by.in? %w[id addresses_count block_timestamp ] + + udts = udts.order(order_by => asc_or_desc) + .page(@page).per(@page_size).fast_page + options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: udts, page: @page, page_size: @page_size).call render json: UdtSerializer.new(udts, options) end @@ -16,6 +34,48 @@ def show raise Api::V1::Exceptions::UdtNotFoundError end + def download_csv + udt = Udt.find_by!(type_hash: params[:id], published: true) + + ckb_transactions = CkbTransaction.joins(:contained_udts).where("udt_transactions.udt_id = ?", udt.id) + ckb_transactions = ckb_transactions.where('ckb_transactions.block_timestamp >= ?', DateTime.strptime(params[:start_date], '%Y-%m-%d').to_time.to_i * 1000 ) if params[:start_date].present? + ckb_transactions = ckb_transactions.where('ckb_transactions.block_timestamp <= ?', DateTime.strptime(params[:end_date], '%Y-%m-%d').to_time.to_i * 1000 ) if params[:end_date].present? + ckb_transactions = ckb_transactions.where('ckb_transactions.block_number >= ?', params[:start_number]) if params[:start_number].present? + ckb_transactions = ckb_transactions.where('ckb_transactions.block_number <= ?', params[:end_number]) if params[:end_number].present? + ckb_transactions = ckb_transactions.order('ckb_transactions.block_timestamp desc').limit(5000) + + file = CSV.generate do |csv| + csv << ["Txn hash", "Blockno", "UnixTimestamp", "Method", "Token In", "Token In Name", "Token OUT", "Token OUT Name", "Token From", "Token To", "TxnFee(CKB)", "date(UTC)" ] + + ckb_transactions.find_each do |ckb_transaction| + + token_inputs = ckb_transaction.display_inputs.select { |e| e[:cell_type] == 'udt' } + token_outputs = ckb_transaction.display_outputs.select { |e| e[:cell_type] == 'udt' } + + max = token_inputs.size > token_outputs.size ? token_inputs.size : token_outputs.size + next if max == 0 + + (0 .. (max-1) ).each do |i| + token_input = token_inputs[i] + token_output = token_outputs[i] + operation_type = "Transfer" + row = [ + ckb_transaction.tx_hash, ckb_transaction.block_number, ckb_transaction.block_timestamp, operation_type, + (token_input[:udt_info][:amount].to_d / token_input[:udt_info][:decimal] rescue '/'), + (token_input[:udt_info][:symbol] rescue '/'), + (token_output[:udt_info][:amount].to_d / token_input[:udt_info][:decimal] rescue '/'), + (token_output[:udt_info][:symbol] rescue '/'), + (token_input[:addresses_hash] rescue '/'), + (token_output[:addresses_hash] rescue '/'), + ckb_transaction.transaction_fee, ckb_transaction.block_timestamp + ] + csv << row + end + end + end + send_data file, :type => 'text/csv; charset=utf-8; header=present', :disposition => "attachment;filename=udt_transactions.csv" + end + private def validate_query_params diff --git a/app/controllers/api/v2/ckb_transactions_controller.rb b/app/controllers/api/v2/ckb_transactions_controller.rb new file mode 100644 index 000000000..c50e9e4d8 --- /dev/null +++ b/app/controllers/api/v2/ckb_transactions_controller.rb @@ -0,0 +1,104 @@ +module Api + module V2 + class CkbTransactionsController < ApplicationController + before_action :find_transaction, only: :details + before_action :set_page_and_page_size, only: :details + + def details + capacities = {} + @ckb_transaction.display_inputs.select{ |e| e[:cell_type] == 'normal' }.each {|input| + capacities[input[:address_hash]] ||= 0 + capacities[input[:address_hash]] -= input[:capacity].to_d + } + + @ckb_transaction.display_outputs.select{ |e| e[:cell_type] == 'normal' }.each {|output| + capacities[output[:address_hash]] ||= 0 + capacities[output[:address_hash]] += output[:capacity].to_d + } + json = capacities.map { |address, value| + { + address: address, + transfers: [ + { + asset: "CKB", + capacity: value, + token_name: "CKB", + entity_type: "CKB", + transfer_type: "ordinary_transfer" + } + ] + } + } + + render json: {data: json} + end + + private + def find_transaction + @ckb_transaction = CkbTransaction.find_by(tx_hash: params[:id]) + end + + def set_page_and_page_size + @page = params[:page] || 1 + @page_size = params[:page_size] || 10 + end + + def get_transaction_content address_ids, cell_outputs + transaction_data = [] + transfers = [] + address_ids.each do |address_id| + cell_outputs.where(address_id: address_id.address_id).each do |cell_output| + entity_type = "CKB" + transfer_type = "ordinary_transfer" + if cell_output.nervos_dao_deposit? + transfer_type = "nervos_dao_deposit" + elsif cell_output.nervos_dao_withdrawing? + transfer_type = "nervos_dao_withdrawing" + interest_data = get_nervos_dao_withdrawing_data(cell_output) + transfer.merge(interest_data) + elsif cell_output.cell_type.in?(%w(nrc_721_token nrc_721_factory)) + entity_type = "nft" + transfer_type = "nft_transfer" + elsif cell_output.cell_type.in?(%w(m_nft_issuer m_nft_class m_nft_token)) + transfer_type = "nft_mint" + entity_type = "nft" + nft_token = "NFT" # token 缩写 + nft_id = "001" # NFT ID + end + transfer = { + asset: "unknown #62bc", + capacity: cell_output.capacity.to_s, + entity_type: entity_type, + transfer_type: transfer_type, + } + transfers.push(transfer) + end + address = Address.find address_id.address_id + data = { + address: address.address_hash, + transfers: transfers + } + transaction_data.push(data) + end + return transaction_data + end + + def get_nervos_dao_withdrawing_data + nervos_dao_deposit_cell = @transaction.cell_inputs.order(:id)[cell_output.cell_index].previous_cell_output + compensation_started_block = Block.find(nervos_dao_deposit_cell.block.id) + compensation_ended_block = Block.select(:number, :timestamp).find(@transaction.block_id) + interest = CkbUtils.dao_interest(cell_output) + interest_data = { + compensation_started_block_number: compensation_started_block.number.to_s, + compensation_ended_block_number: compensation_ended_block.number.to_s, + compensation_started_timestamp: compensation_started_block.timestamp.to_s, + compensation_ended_timestamp: compensation_ended_block.timestamp.to_s, + interest: interest, + locked_until_block_timestamp: @transaction.block.timestamp, + locked_until_block_number: @transaction.block.number, + } + return interest_data + end + end + end +end diff --git a/app/controllers/api/v2/nft/collections_controller.rb b/app/controllers/api/v2/nft/collections_controller.rb index cadc93a24..ee6660a33 100644 --- a/app/controllers/api/v2/nft/collections_controller.rb +++ b/app/controllers/api/v2/nft/collections_controller.rb @@ -2,10 +2,29 @@ module Api module V2 class NFT::CollectionsController < BaseController def index - pagy, collections = pagy(TokenCollection.order(id: :desc)) + params[:sort] ||= "id.desc" + + order_by, asc_or_desc = params[:sort].split('.', 2) + order_by = case order_by + # TODO need to merge PR: https://github.com/nervosnetwork/ckb-explorer/pull/1266 + when 'transactions' then 'h24_transactions_count' + when 'holder' then 'holders_count' + when 'minted' then 'items_count' + else order_by + end + + head :not_found and return unless order_by.in? %w[id holders_count items_count] + + collections = TokenCollection + collections = collections.where(standard: params[:type]) if params[:type].present? + collections = collections + .order(order_by => asc_or_desc) + .page(@page).per(@page_size).fast_page + + @pagy, @collections = pagy(collections) render json: { - data: collections, - pagination: pagy_metadata(pagy) + data: @collections, + pagination: pagy_metadata(@pagy) } end diff --git a/app/controllers/api/v2/nft/transfers_controller.rb b/app/controllers/api/v2/nft/transfers_controller.rb index ef471e3d1..2f4c50c8e 100644 --- a/app/controllers/api/v2/nft/transfers_controller.rb +++ b/app/controllers/api/v2/nft/transfers_controller.rb @@ -1,6 +1,8 @@ +require 'csv' module Api module V2 class NFT::TransfersController < BaseController + before_action :set_token_transfer, only: [:download_csv] def index if params[:collection_id].present? @@ -40,6 +42,47 @@ def show token_transfer = TokenTransfer.find(params[:id]) render json: token_transfer end + + def download_csv + + token_transfers = TokenTransfer + .joins(:item, :ckb_transaction) + .includes(:ckb_transaction, :from, :to) + .where('token_items.collection_id = ?', @collection.id ) + + token_transfers = token_transfers.where('ckb_transactions.block_timestamp >= ?', DateTime.strptime(params[:start_date], '%Y-%m-%d').to_time.to_i * 1000 ) if params[:start_date].present? + token_transfers = token_transfers.where('ckb_transactions.block_timestamp <= ?', DateTime.strptime(params[:end_date], '%Y-%m-%d').to_time.to_i * 1000 ) if params[:end_date].present? + token_transfers = token_transfers.where('ckb_transactions.block_number >= ?', params[:start_number]) if params[:start_number].present? + token_transfers = token_transfers.where('ckb_transactions.block_number <= ?', params[:end_number]) if params[:end_number].present? + + token_transfers = token_transfers + .order('token_transfers.id desc') + .limit(5000) + + file = CSV.generate do |csv| + csv << ['Txn hash', 'Blockno', 'UnixTimestamp', 'NFT ID', 'Method', 'NFT from', 'NFT to', 'TxnFee(CKB)', 'date(UTC)'] + token_transfers.find_each do |transfer| + ckb_transaction = transfer.ckb_transaction + row = [ckb_transaction.tx_hash, ckb_transaction.block_number, ckb_transaction.block_timestamp, + transfer.item.token_id, transfer.action, transfer.from.address_hash, transfer.to.address_hash, + ckb_transaction.transaction_fee, ckb_transaction.block_timestamp] + csv << row + end + end + send_data file, :type => 'text/csv; charset=utf-8; header=present', :disposition => "attachment;filename=token_transfers.csv" + end + + private + def set_token_transfer + if params[:collection_id].present? + if /\A\d+\z/.match?(params[:collection_id]) + @collection = TokenCollection.find params[:collection_id] + else + @collection = TokenCollection.find_by_sn params[:collection_id] + end + end + end + end end end diff --git a/app/controllers/api/v2/pending_transactions_controller.rb b/app/controllers/api/v2/pending_transactions_controller.rb index ccfe6956e..0ee3650d9 100644 --- a/app/controllers/api/v2/pending_transactions_controller.rb +++ b/app/controllers/api/v2/pending_transactions_controller.rb @@ -2,25 +2,46 @@ module Api::V2 class PendingTransactionsController < BaseController before_action :set_page_and_page_size def index - pending_transactions = PoolTransactionEntry.pool_transaction_pending.order('id desc').page(@page).per(@page_size).fast_page - head :not_found and return if pending_transactions.blank? + pending_transactions = CkbTransaction.tx_pending + + params[:sort] ||= "id.desc" + order_by, asc_or_desc = params[:sort].split('.', 2) + order_by = case order_by + when 'time' then 'created_at' + when 'fee' then 'transaction_fee' + # current we don't support this in DB + #when 'capacity' then 'capacity_involved' + else order_by + end + + head :not_found and return unless order_by.in? %w[id created_at transaction_fee] + + pending_transactions = pending_transactions.order(order_by => asc_or_desc) + .page(@page).per(@page_size).fast_page render json: { - data: pending_transactions.map {|tx| - tx.as_json - .merge({ - transaction_hash: tx.tx_hash, - capacity_involved: tx.display_inputs.sum{|e| e["capacity"] }, - create_timestamp: (tx.created_at.to_f * 1000).to_i - }) - }, + data: pending_transactions.map do |tx| + { + transaction_hash: tx.tx_hash, + capacity_involved: tx.capacity_involved, + transaction_fee: tx.transaction_fee, + created_at: tx.created_at, + create_timestamp: (tx.created_at.to_f * 1000).to_i + } + end, meta: { - total: PoolTransactionEntry.pool_transaction_pending.count, + total: CkbTransaction.tx_pending.count, page_size: @page_size.to_i } } end + def count + render json: { + data: CkbTransaction.tx_pending.count + } + end + def set_page_and_page_size @page = params[:page] || 1 @page_size = params[:page_size] || 10 diff --git a/app/controllers/api/v2/transactions_controller.rb b/app/controllers/api/v2/transactions_controller.rb index 65cdcae54..756c3faf8 100644 --- a/app/controllers/api/v2/transactions_controller.rb +++ b/app/controllers/api/v2/transactions_controller.rb @@ -1,7 +1,7 @@ module Api module V2 class TransactionsController < BaseController - before_action :find_transaction, only: [:raw] + before_action :find_transaction, only: [:raw, :details] def raw if stale?(etag: @transaction.tx_hash, public: true) expires_in 1.day @@ -9,10 +9,39 @@ def raw end end + def details + capacities = {} + @transaction.display_inputs.select{ |e| e[:cell_type] == 'normal' }.each {|input| + capacities[input[:address_hash]] ||= 0 + capacities[input[:address_hash]] -= input[:capacity].to_d + } + + @transaction.display_outputs.select{ |e| e[:cell_type] == 'normal' }.each {|output| + capacities[output[:address_hash]] ||= 0 + capacities[output[:address_hash]] += output[:capacity].to_d + } + json = capacities.map { |address, value| + { + address: address, + transfers: [ + { + asset: "CKB", + capacity: value, + token_name: "CKB", + entity_type: "CKB", + transfer_type: "simple_transfer" + } + ] + } + } + + render json: {data: json} + end + protected def find_transaction - @transaction = CkbTransaction.cached_find(params[:id]) || PoolTransactionEntry.find_by(tx_hash: params[:id]) + @transaction = CkbTransaction.cached_find(params[:id]) end end end diff --git a/app/jobs/commit_block_job.rb b/app/jobs/commit_block_job.rb new file mode 100644 index 000000000..1e600baa7 --- /dev/null +++ b/app/jobs/commit_block_job.rb @@ -0,0 +1,30 @@ +# Make specific block committed +# this will make the block's transactions committed +# make cell outputs generated to live +# set related statistic info, address info to latest state +class CommitBlockJob < ApplicationJob + def perform(block) + case block + when Integer + block = Block.find_by id: block + when String + block = Block.find_by block_hash: block + when Block + else + raise ArgumentError + end + + # binding.pry + block.contained_transactions.each do |tx| + end + + # reject all the other cell output that is consumed by + # transactions in current block + + # collect all the previous cell outupts + # select related cell inputs which are not in current block + # mark the transaction contains these cell input to be rejected + + # update some statistics and state + end +end diff --git a/app/jobs/export_address_transactions_job.rb b/app/jobs/export_address_transactions_job.rb new file mode 100644 index 000000000..fe758017b --- /dev/null +++ b/app/jobs/export_address_transactions_job.rb @@ -0,0 +1,71 @@ +class ExportAddressTransactionsJob < ApplicationJob + def perform(args) + tx_ids = AccountBook.joins(:ckb_transaction). + where(address_id: args[:address_id]). + order(ckb_transaction_id: :asc). + limit(5000) + + if args[:start_date].present? + start_date = DateTime.strptime(args[:start_date], "%Y-%m-%d").to_time.to_i * 1000 + tx_ids = tx_ids.where("ckb_transactions.block_timestamp >= ?", start_date) + end + + if args[:end_date].present? + end_date = DateTime.strptime(args[:end_date], "%Y-%m-%d").to_time.to_i * 1000 + tx_ids = tx_ids.where("ckb_transactions.block_timestamp <= ?", end_date) + end + + if args[:start_number].present? + tx_ids = tx_ids.where("ckb_transactions.block_number >= ?", args[:start_number]) + end + + if args[:end_number].present? + tx_ids = tx_ids.where("ckb_transactions.block_number <= ?", args[:end_number]) + end + + rows = [] + ckb_transactions = CkbTransaction.includes(:inputs, :outputs). + select(:id, :tx_hash, :transaction_fee, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at). + where(id: tx_ids.pluck(:ckb_transaction_id)) + + ckb_transactions.find_in_batches(batch_size: 1000, order: :desc) do |transactions| + transactions.each do |transaction| + rows += generate_data(transaction) + end + end + + rows + end + + private + + def generate_data(transaction) + inputs = + if transaction.is_cellbase + return [nil] + else + cell_inputs_for_display = transaction.inputs.sort_by(&:id) + cell_inputs_for_display.map(&:capacity) + end + + cell_outputs_for_display = transaction.outputs.sort_by(&:id) + outputs = cell_outputs_for_display.map(&:capacity) + + rows = [] + max = [inputs.size, outputs.size].max + (0..max - 1).each do |i| + rows << [ + transaction.tx_hash, + transaction.block_number, + transaction.block_timestamp, + "Transfer", + (inputs[i].to_d / 1e8 rescue "/"), + (outputs[i].to_d / 1e8 rescue "/"), + transaction.transaction_fee, + transaction.updated_at + ] + end + + rows + end +end diff --git a/app/jobs/process_block_job.rb b/app/jobs/import_block_job.rb similarity index 75% rename from app/jobs/process_block_job.rb rename to app/jobs/import_block_job.rb index 1db893895..d39e9cd0f 100644 --- a/app/jobs/process_block_job.rb +++ b/app/jobs/import_block_job.rb @@ -1,4 +1,4 @@ -class ProcessBlockJob < ApplicationJob +class ImportBlockJob < ApplicationJob def perform(block_hash) if block_hash.is_a?(Integer) block = Block.fetch_raw_hash_by_number(block_hash) @@ -8,8 +8,9 @@ def perform(block_hash) block = Block.fetch_raw_hash(block_hash) block_number = block["header"]["number"] end + block["transactions"].each do |tx| - ProcessTransactionJob.perform_later tx + ImportTransactionJob.perform_later tx, { block_hash: block["hash"] } end end end diff --git a/app/jobs/process_transaction_job.rb b/app/jobs/import_transaction_job.rb similarity index 87% rename from app/jobs/process_transaction_job.rb rename to app/jobs/import_transaction_job.rb index e518df47f..92bab7851 100644 --- a/app/jobs/process_transaction_job.rb +++ b/app/jobs/import_transaction_job.rb @@ -1,53 +1,81 @@ # process a raw transaction and save related records to database -class ProcessTransactionJob < ApplicationJob +class ImportTransactionJob < ApplicationJob queue_as :default attr_accessor :tx, :txid, :sdk_tx, :cell_dependencies_attrs, :by_type_hash, :by_data_hash, :deployed_cells_attrs, + :addresses, :address_changes # @param tx_hash [String] - def perform(tx_hash) + def perform(tx_hash, extra_data = {}) self.address_changes = {} if tx_hash.is_a?(Hash) CkbTransaction.write_raw_hash_cache tx_hash["hash"], tx_hash tx_hash = tx_hash["hash"] end # raw = CkbTransaction.fetch_raw_hash(tx_hash) - @sdk_tx = CkbTransaction.fetch_sdk_transaction(tx_hash) @tx = CkbTransaction.unscoped.create_with(tx_status: :pending).find_or_create_by! tx_hash: tx_hash return unless tx.tx_pending? + Rails.logger.info "Importing #{tx.tx_hash}" + @sdk_tx = CkbTransaction.fetch_sdk_transaction(tx_hash) + unless @sdk_tx + Rails.logger.info "Cannot fetch transaction details for #{tx_hash}" + return + end + @tx.cycles = extra_data[:cycles] + if extra_data[:timestamp] + @tx.created_at = Time.at(extra_data[:timestamp].to_d / 1000).utc + end + @tx.transaction_fee = extra_data[:fee] + @tx.bytes = extra_data[:size] || @sdk_tx.serialized_size_in_block + @tx.version = @sdk_tx.version + @tx.live_cell_changes = sdk_tx.outputs.count - sdk_tx.inputs.count + if extra_data[:block_hash] + block = Block.find_by block_hash: extra_data["block_hash"] + @tx.included_block_ids << block.id + end + @tx.save @txid = tx.id @deployed_cells_attrs = [] @cell_dependencies_attrs = [] @by_type_hash = {} @by_data_hash = {} + capacity_involved = 0 + # process inputs sdk_tx.inputs.each_with_index do |input, index| if input.previous_output.tx_hash == CellOutput::SYSTEM_TX_HASH tx.cell_inputs.create_with(index: index).create_or_find_by(previous_cell_output_id: nil, from_cell_base: true) else cell = CellOutput.find_by(tx_hash: input.previous_output.tx_hash, cell_index: input.previous_output.index) + if cell process_input tx.cell_inputs.create_with(previous_cell_output_id: cell.id).create_or_find_by!( ckb_transaction_id: txid, index: index ) process_deployed_cell(cell.lock_script) process_deployed_cell(cell.type_script) if cell.type_script + capacity_involved += cell.capacity else + tx.cell_inputs.create_or_find_by!( + previous_tx_hash: input.previous_output.tx_hash, + previous_index: input.previous_output.index, + since: input.since + ) puts "Missing input #{input.previous_output.to_h} in #{tx_hash}" # cannot find corresponding cell output, # maybe the transaction contains the cell output has not been processed, # so add current transaction to pending list, and wait for future processing + list = Kredis.unique_list "pending_transactions_for_input:#{input.previous_output.tx_hash}" list << tx_hash - return end end end - + @tx.update_column :capacity_involved, capacity_involved # process outputs sdk_tx.outputs.each_with_index do |output, index| output_data = sdk_tx.outputs_data[index] @@ -58,10 +86,11 @@ def perform(tx_hash) ) cell.lock_script = lock cell.type_script = t + cell.data = output_data cell.update!( address_id: lock.address_id, capacity: output.capacity, - occupied_capacity: CkbUtils.calculate_cell_min_capacity(output, output_data), + occupied_capacity: cell.calculate_min_capacity, status: "pending" ) @@ -82,7 +111,7 @@ def perform(tx_hash) # notify pending transaction to reprocess again pending_list = Kredis.unique_list "pending_transactions_for_input:#{tx_hash}" pending_list.elements.each do |_tx| - ProcessTransactionJob.perform_later _tx + ImportTransactionJob.perform_later _tx end pending_list.clear end @@ -244,6 +273,7 @@ def process_witnesses # @param cell_input [CellInput] def process_input(cell_input) cell_output = cell_input.previous_cell_output + address_id = cell_output.address_id changes = address_changes[address_id] ||= { @@ -251,7 +281,7 @@ def process_input(cell_input) balance_occupied: 0 } changes[:balance] -= cell_output.capacity - changes[:balance_occupied] -= cell_output.occupied_capacity + changes[:balance_occupied] -= cell_output.occupied_capacity if cell_output.occupied_capacity end # # calculate address and balance change for each cell output @@ -284,6 +314,7 @@ def save_changes "changes = transaction_address_changes.changes || excluded.changes" ) ) + AccountBook.upsert_all address_changes.keys.map{|address_id| {ckb_transaction_id: tx.id, address_id:}} end end end diff --git a/app/jobs/revert_block_job.rb b/app/jobs/revert_block_job.rb index 846f30e21..d5761cfd3 100644 --- a/app/jobs/revert_block_job.rb +++ b/app/jobs/revert_block_job.rb @@ -11,8 +11,8 @@ def perform(local_tip_block = nil) end ApplicationRecord.transaction do - PoolTransactionEntry.pool_transaction_pending. - where(tx_hash: local_tip_block.ckb_transactions.pluck(:tx_hash)).delete_all + CkbTransaction.tx_pending. + where(tx_hash: local_tip_block.ckb_transactions.pluck(:tx_hash)).update_all(tx_status: "pending") benchmark :revert_dao_contract_related_operations, local_tip_block benchmark :revert_mining_info, local_tip_block @@ -43,14 +43,23 @@ def perform(local_tip_block = nil) end def update_address_balance_and_ckb_transactions_count(local_tip_block) + snapshots = AddressBlockSnapshot.where.not(block_id: local_tip_block.id).where(address_id: local_tip_block.address_ids).order(block_number: :desc).distinct.group_by(&:address_id) local_tip_block.contained_addresses.each do |address| - address.live_cells_count = address.cell_outputs.live.count - # address.ckb_transactions_count = address.custom_ckb_transactions.count - address.ckb_transactions_count = AccountBook.where(address_id: address.id).count - address.dao_transactions_count = AddressDaoTransaction.where(address_id: address.id).count - address.cal_balance! - address.save! + snapshot = snapshots.fetch(address.id, []).first + if snapshot.present? + attrs = snapshot.final_state + address.update!(attrs) + else + address.live_cells_count = address.cell_outputs.live.count + # address.ckb_transactions_count = address.custom_ckb_transactions.count + address.ckb_transactions_count = AccountBook.where(address_id: address.id).count + address.dao_transactions_count = AddressDaoTransaction.where(address_id: address.id).count + address.cal_balance! + address.save! + end end + + AddressBlockSnapshot.where(block_id: local_tip_block.id).delete_all end def recalculate_dao_contract_transactions_count(local_tip_block) diff --git a/app/models/address.rb b/app/models/address.rb index 38d491248..0ecbc34f5 100644 --- a/app/models/address.rb +++ b/app/models/address.rb @@ -170,7 +170,7 @@ def cached_lock_script def flush_cache $redis.pipelined do - $redis.del(*cache_keys) + Rails.cache.delete_multi(cache_keys) end end diff --git a/app/models/address_block_snapshot.rb b/app/models/address_block_snapshot.rb new file mode 100644 index 000000000..6379d5fe7 --- /dev/null +++ b/app/models/address_block_snapshot.rb @@ -0,0 +1,22 @@ +# save the relationship of dao transactions in address +class AddressBlockSnapshot < ApplicationRecord + belongs_to :block + belongs_to :address +end + +# == Schema Information +# +# Table name: address_block_snapshots +# +# id :bigint not null, primary key +# address_id :bigint +# block_id :bigint +# block_number :bigint +# final_state :jsonb +# +# Indexes +# +# index_address_block_snapshots_on_address_id (address_id) +# index_address_block_snapshots_on_block_id (block_id) +# index_address_block_snapshots_on_block_id_and_address_id (block_id,address_id) UNIQUE +# diff --git a/app/models/block.rb b/app/models/block.rb index 3dca3b088..079bcbc22 100644 --- a/app/models/block.rb +++ b/app/models/block.rb @@ -21,7 +21,9 @@ class Block < ApplicationRecord has_many :ckb_transactions has_many :block_transactions # the transactions included in the block no matter if the block is included in chain - has_many :contained_transactions, through: :block_transactions + has_many :contained_transactions, class_name: "CkbTransaction", + through: :block_transactions, + inverse_of: :included_blocks has_many :uncle_blocks has_many :cell_outputs has_many :cell_inputs @@ -264,7 +266,7 @@ def miner_address def flush_cache $redis.pipelined do - $redis.del(*cache_keys) + Rails.cache.delete_multi(cache_keys) end end diff --git a/app/models/cell_datum.rb b/app/models/cell_datum.rb index bccaaa49c..ec8ed3892 100644 --- a/app/models/cell_datum.rb +++ b/app/models/cell_datum.rb @@ -3,6 +3,7 @@ class CellDatum < ApplicationRecord validates :data, presence: true, length: { minimum: 1 } after_create :update_data_hash + def update_data_hash cell_output.update(data_hash: CKB::Utils.bin_to_hex(CKB::Blake2b.digest(data))) end diff --git a/app/models/cell_input.rb b/app/models/cell_input.rb index 65d1a2cff..6a64fc275 100644 --- a/app/models/cell_input.rb +++ b/app/models/cell_input.rb @@ -1,7 +1,9 @@ class CellInput < ApplicationRecord + attr_accessor :previous_output + belongs_to :ckb_transaction belongs_to :previous_cell_output, class_name: "CellOutput", optional: true - belongs_to :block + belongs_to :block, optional: true delegate :lock_script, :type_script, to: :previous_cell_output, allow_nil: true @@ -20,8 +22,8 @@ def to_raw if previous_cell_output { previous_output: { - index: "0x#{previous_cell_output.cell_index.to_s(16)}", - tx_hash: previous_cell_output.tx_hash + index: "0x#{(previous_index || previous_cell_output.cell_index).to_s(16)}", + tx_hash: previous_tx_hash || previous_cell_output.tx_hash }, since: hex_since } @@ -31,12 +33,12 @@ def to_raw index: "0xffffffff", tx_hash: "0x0000000000000000000000000000000000000000000000000000000000000000" }, - since: "0x#{since.to_s(16)}" + since: hex_since } end end - after_validation :match_cell_output + before_validation :match_cell_output def cache_keys %W(CellInput/#{id}/lock_script CellInput/#{id}/type_script) @@ -44,14 +46,20 @@ def cache_keys def flush_cache $redis.pipelined do - $redis.del(*cache_keys) + Rails.cache.delete_multi(cache_keys) end end def match_cell_output - if previous_output.present? && previous_output["tx_hash"] != CellOutput::SYSTEM_TX_HASH - self.previous_cell_output = CellOutput.find_by(tx_hash: previous_output["tx_hash"], - cell_index: previous_output["index"]) + if previous_output + self.previous_output = previous_output.with_indifferent_access + self.previous_tx_hash = previous_output["tx_hash"] + index = previous_output["index"] + self.previous_index = index.is_a?(String) ? index.hex : index + end + if previous_tx_hash && previous_tx_hash != CellOutput::SYSTEM_TX_HASH + self.previous_cell_output = CellOutput.find_by(tx_hash: previous_tx_hash, + cell_index: previous_index) end end @@ -64,7 +72,6 @@ def self.clean_data # Table name: cell_inputs # # id :bigint not null, primary key -# previous_output :jsonb # ckb_transaction_id :bigint # created_at :datetime not null # updated_at :datetime not null @@ -74,10 +81,13 @@ def self.clean_data # since :decimal(30, ) default(0) # cell_type :integer default("normal") # index :integer +# previous_tx_hash :binary +# previous_index :integer # # Indexes # -# index_cell_inputs_on_block_id (block_id) -# index_cell_inputs_on_ckb_transaction_id (ckb_transaction_id) -# index_cell_inputs_on_previous_cell_output_id (previous_cell_output_id) +# index_cell_inputs_on_block_id (block_id) +# index_cell_inputs_on_ckb_transaction_id (ckb_transaction_id) +# index_cell_inputs_on_previous_cell_output_id (previous_cell_output_id) +# index_cell_inputs_on_previous_tx_hash_and_previous_index (previous_tx_hash,previous_index) # diff --git a/app/models/cell_output.rb b/app/models/cell_output.rb index 7501d2e04..926f875ad 100644 --- a/app/models/cell_output.rb +++ b/app/models/cell_output.rb @@ -23,7 +23,7 @@ class CellOutput < ApplicationRecord # the inputs which consumes this cell output # but one cell may be included by many pending transactions, # the cell_inputs won't always be the same as `consumed_by`.`cell_inputs` - has_many :cell_inputs, foreign_key: :previous_output_id + has_many :cell_inputs, foreign_key: :previous_cell_output_id belongs_to :deployed_cell, optional: true # the block_id is actually the same as ckb_transaction.block_id, must be on chain # but one cell may be included by pending transactions, so block_id may be null @@ -101,6 +101,10 @@ def binary_data [data[2..]].pack("H*") end + def dao + self[:dao] || block.dao + end + # find cell output according to the out point( tx_hash and output index ) # @param [String] tx_hash # @param [Integer] index @@ -121,6 +125,18 @@ def node_output CKB::Types::Output.new(capacity: capacity.to_i, lock: lock, type: type) end + # @param data [String] 0x... + def calculate_bytesize + data ||= self.data || "0x" + bytesize = 8 + CKB::Utils.hex_to_bin(data).bytesize + lock_script.calculate_bytesize + bytesize += type_script.calculate_bytesize if type_script + bytesize + end + + def calculate_min_capacity + CKB::Utils.byte_to_shannon(calculate_bytesize) + end + def to_raw { capacity: "0x#{capacity.to_i.to_s(16)}", @@ -180,11 +196,28 @@ def nrc_721_nft_info factory_cell_type_script = self.type_script factory_cell = NrcFactoryCell.find_by(code_hash: factory_cell_type_script.code_hash, hash_type: factory_cell_type_script.hash_type, args: factory_cell_type_script.args, verified: true) - value = { symbol: factory_cell&.symbol } + value = { + symbol: factory_cell&.symbol, + amount: self.udt_amount, + decimal: '', + type_hash: self.type_hash, + published: factory_cell.verified, + display_name: factory_cell.name, + nan: '' + } when "nrc_721_token" udt = Udt.find_by(type_hash: type_hash) factory_cell = NrcFactoryCell.where(id: udt.nrc_factory_cell_id, verified: true).first - value = { symbol: factory_cell&.symbol, amount: UdtAccount.where(udt_id: udt.id).first.nft_token_id } + udt_account = UdtAccount.where(udt_id: udt.id).first + value = { + symbol: factory_cell&.symbol, + amount: udt_account.nft_token_id, + decimal: udt_account.decimal, + type_hash: type_hash, + published: true, + display_name: udt_account.full_name, + uan: '' + } else raise "invalid cell type" end @@ -284,6 +317,22 @@ def self.update_cell_types_for_cota end end end + + def cota_registry_info + return unless cota_registry? + + code_hash = CkbSync::Api.instance.cota_registry_code_hash + CkbUtils.hash_value_to_s( symbol: '', amount: self.udt_amount, decimal: '', type_hash: self.type_hash, + published: 'true', display_name: '', uan: '', code_hash: self.code_hash) + end + + def cota_regular_info + return unless cota_regular? + + code_hash = CkbSync::Api.instance.cota_regular_code_hash + CkbUtils.hash_value_to_s( symbol: '', amount: self.udt_amount, decimal: '', type_hash: self.type_hash, + published: 'true', display_name: '', uan: '', code_hash: self.code_hash) + end end # == Schema Information diff --git a/app/models/ckb_sync/new_node_data_processor.rb b/app/models/ckb_sync/new_node_data_processor.rb index b68618b4f..4bfa7d69b 100644 --- a/app/models/ckb_sync/new_node_data_processor.rb +++ b/app/models/ckb_sync/new_node_data_processor.rb @@ -92,7 +92,7 @@ def process_block(node_block) # maybe can be changed to asynchronous update update_udt_info(local_block) process_dao_events!(local_block) - update_addresses_info(addrs_changes) + update_addresses_info(addrs_changes, local_block) end flush_inputs_outputs_caches(local_block) @@ -282,13 +282,13 @@ def process_withdraw_dao_events!(local_block, new_dao_depositors, dao_contract) def process_interest_dao_events!(local_block, dao_contract) addrs_withdraw_info = {} claimed_compensation = 0 - local_block.cell_inputs.nervos_dao_withdrawing.select(:id, :ckb_transaction_id, + local_block.cell_inputs.nervos_dao_withdrawing.select(:id, :ckb_transaction_id, :block_id, :previous_cell_output_id).find_in_batches do |dao_inputs| dao_events_attributes = [] dao_inputs.each do |dao_input| previous_cell_output = CellOutput. where(id: dao_input.previous_cell_output_id). - select(:address_id, :ckb_transaction_id, :dao, :cell_index, :capacity, :occupied_capacity). + select(:address_id, :block_id, :ckb_transaction_id, :dao, :cell_index, :capacity, :occupied_capacity). take! address = previous_cell_output.address interest = CkbUtils.dao_interest(previous_cell_output) @@ -393,7 +393,6 @@ def update_addresses_dao_info(addrs_deposit_info) def update_pool_tx_status(local_block) hashes = local_block.ckb_transactions.pluck(:tx_hash) - PoolTransactionEntry.pool_transaction_pending.where(tx_hash: hashes).update_all(tx_status: "committed") CkbTransaction.tx_pending.where(tx_hash: hashes).update_all(tx_status: "committed") end @@ -534,7 +533,7 @@ def update_mining_info(local_block) CkbUtils.update_current_block_mining_info(local_block) end - def update_addresses_info(addrs_change) + def update_addresses_info(addrs_change, local_block) ### Backup the old upsert code # addrs = [] # attributes = @@ -581,9 +580,26 @@ def update_addresses_info(addrs_change) live_cells_count: addr.live_cells_count + live_cells_diff, dao_transactions_count: addr.dao_transactions_count + dao_txs_count ) + + save_address_block_snapshot!(addr, local_block) end end + def save_address_block_snapshot!(addr, local_block) + AddressBlockSnapshot.create!( + address_id: addr.id, + block_id: local_block.id, + block_number: local_block.number, + final_state: { + balance: addr.balance, + balance_occupied: addr.balance_occupied, + ckb_transactions_count: addr.ckb_transactions_count, + live_cells_count: addr.live_cells_count, + dao_transactions_count: addr.dao_transactions_count + } + ) + end + def update_block_info!(local_block) local_block.update!(total_transaction_fee: local_block.ckb_transactions.sum(:transaction_fee), ckb_transactions_count: local_block.ckb_transactions.count, @@ -768,7 +784,7 @@ def build_cells_and_locks!( prev_outputs = nil build_cell_inputs(inputs, ckb_txs, local_block.id, cell_inputs_attributes, prev_cell_outputs_attributes, input_capacities, tags, udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids, prev_outputs, addrs_changes) - # binding.pry + CellInput.insert_all!(cell_inputs_attributes) CellOutput.upsert_all(prev_cell_outputs_attributes) if prev_cell_outputs_attributes.present? @@ -1044,7 +1060,7 @@ def udt_amount(cell_type, output_data, type_script_args) when "udt" CkbUtils.parse_udt_cell_data(output_data) when "m_nft_token" - "0x#{type_script_args[-8..-1]}".hex + "0x#{type_script_args[-8..]}".hex end end @@ -1053,7 +1069,8 @@ def cell_input_attributes(input, ckb_transaction_id, local_block_id, prev_output { cell_input: { ckb_transaction_id: ckb_transaction_id, - previous_output: input.previous_output, + previous_tx_hash: nil, + previous_index: 0, since: input.since, block_id: local_block_id, from_cell_base: from_cell_base?(input), @@ -1071,7 +1088,8 @@ def cell_input_attributes(input, ckb_transaction_id, local_block_id, prev_output { cell_input: { ckb_transaction_id: ckb_transaction_id, - previous_output: input.previous_output, + previous_tx_hash: input.previous_output.tx_hash, + previous_index: input.previous_output.index, since: input.since, block_id: local_block_id, from_cell_base: from_cell_base?(input), diff --git a/app/models/ckb_sync/node_data_processor.rb b/app/models/ckb_sync/node_data_processor.rb deleted file mode 100644 index 527199200..000000000 --- a/app/models/ckb_sync/node_data_processor.rb +++ /dev/null @@ -1,805 +0,0 @@ -module CkbSync - class NodeDataProcessor - def call - local_tip_block = Block.recent.first - tip_block_number = CkbSync::Api.instance.get_tip_block_number - target_block_number = local_tip_block.present? ? local_tip_block.number + 1 : 0 - return if target_block_number > tip_block_number - - target_block = CkbSync::Api.instance.get_block_by_number(target_block_number) - if !forked?(target_block, local_tip_block) - process_block(target_block) - else - invalid_block(local_tip_block) - end - end - - def process_block(node_block) - local_block = build_block(node_block) - node_block.uncles.each do |uncle_block| - build_uncle_block(uncle_block, local_block) - end - - ApplicationRecord.transaction do - outputs = [] - udt_infos = Set.new - new_dao_depositor_events = {} - local_block.save! - ckb_transactions = build_ckb_transactions(local_block, node_block.transactions, outputs, - new_dao_depositor_events, udt_infos) - local_block.ckb_transactions_count = ckb_transactions.size - local_block.live_cell_changes = ckb_transactions.sum(&:live_cell_changes) - CkbTransaction.import!(ckb_transactions, recursive: true, batch_size: 3500, validate: false) - update_pool_tx_status(ckb_transactions) - input_capacities = ckb_transactions.reject(&:is_cellbase).pluck(:id).index_with { |_id| [] } - update_tx_fee_related_data(local_block, input_capacities, udt_infos) - calculate_tx_fee(local_block, ckb_transactions, input_capacities, outputs.group_by(&:ckb_transaction_id)) - update_current_block_mining_info(local_block) - update_block_contained_address_info(local_block) - update_block_reward_info(local_block) - update_udt_accounts(udt_infos, local_block.timestamp) - update_udt_info(udt_infos) - dao_events = build_new_dao_depositor_events(new_dao_depositor_events) - DaoEvent.import!(dao_events, validate: false) - update_dao_contract_related_info(local_block) - increase_records_count(ckb_transactions) - cache_address_txs(local_block.ckb_transactions) - end - local_block - end - - private - - def cache_address_txs(ckb_transactions) - address_txs = Hash.new - ckb_transactions.each do |tx| - tx.contained_address_ids.each do |id| - if address_txs[id].present? - address_txs[id] << tx.id - else - address_txs[id] = [tx.id] - end - end - end - - AddressTxsCacheUpdateWorker.perform_async(address_txs) - end - - def update_pool_tx_status(ckb_transactions) - PoolTransactionEntry.pool_transaction_pending.where(tx_hash: ckb_transactions.pluck(:tx_hash)).update_all(tx_status: "committed") - end - - def increase_records_count(ckb_transactions) - block_counter = TableRecordCount.find_or_initialize_by(table_name: "blocks") - block_counter.increment!(:count) - ckb_transaction_counter = TableRecordCount.find_or_initialize_by(table_name: "ckb_transactions") - normal_transactions = ckb_transactions.reject { |tx| tx.is_cellbase } - ckb_transaction_counter.increment!(:count, normal_transactions.count) if normal_transactions.present? - end - - def update_udt_info(udt_infos) - return if udt_infos.blank? - - type_hashes = udt_infos.map { |udt_info| udt_info[:type_script].compute_hash }.uniq - columns = %i(type_hash total_amount addresses_count) - amount_hashes = UdtAccount.where(type_hash: type_hashes).group(:type_hash).sum(:amount) - addresses_count_hashes = UdtAccount.where(type_hash: type_hashes).group(:type_hash).count(:address_id) - import_values = - type_hashes.map do |type_hash| - [type_hash, amount_hashes[type_hash], addresses_count_hashes[type_hash]] - end - - Udt.import columns, import_values, validate: false, - on_duplicate_key_update: { conflict_target: [:type_hash], columns: [:total_amount, :addresses_count] } - end - - def update_udt_accounts(udt_infos, block_timestamp) - return if udt_infos.blank? - - udt_infos.each do |udt_output| - address = udt_output[:address] - udt_type_script = udt_output[:type_script] - udt_type = udt_output[:udt_type] - type_hash = udt_type_script.compute_hash - udt_account = address.udt_accounts.find_by(type_hash: type_hash, udt_type: udt_type) - if udt_type == "sudt" - amount = address.cell_outputs.live.udt.where(type_hash: type_hash).sum(:udt_amount) - elsif udt_type == "m_nft_token" - amount = address.cell_outputs.live.m_nft_token.where(type_hash: type_hash).sum(:udt_amount) - else - amount = 0 - end - - if udt_account.present? - case udt_type - when "sudt" - udt_account.update!(amount: amount) - when "m_nft_token" - udt_account.destroy unless address.cell_outputs.live.m_nft_token.where(type_hash: type_hash).exists? - end - else - udt = Udt.find_or_create_by!(type_hash: type_hash, udt_type: udt_type) - udt.update(block_timestamp: block_timestamp) if udt.block_timestamp.blank? - if udt.udt_type == "sudt" && udt.issuer_address.blank? - issuer_address = Address.where(lock_hash: udt_type_script.args).pick(:address_hash) - udt.issuer_address = issuer_address - end - udt.update(args: udt_type_script.args, hash_type: udt_type_script.hash_type, - issuer_address: udt.issuer_address) - address.udt_accounts.create!(udt_type: udt.udt_type, full_name: udt.full_name, symbol: udt.symbol, - decimal: udt.decimal, published: udt.published, code_hash: udt.code_hash, type_hash: udt.type_hash, amount: amount, udt: udt) - end - end - end - - def build_new_dao_depositor_events(new_dao_depositor_events) - new_dao_depositor_events.map do |address_id, tx_hash| - ckb_transaction = CkbTransaction.find_by(tx_hash: tx_hash) - ckb_transaction.dao_events.build(block: ckb_transaction.block, address_id: address_id, event_type: "new_dao_depositor", - value: 1, contract_id: DaoContract.default_contract.id, block_timestamp: ckb_transaction.block_timestamp) - end - end - - def update_dao_contract_related_info(local_block) - dao_contract = DaoContract.default_contract - dao_events = DaoEvent.where(block: local_block).pending - process_deposit_to_dao(dao_contract, dao_events) - process_new_dao_depositor(dao_contract, dao_events) - process_withdraw_from_dao(dao_contract, dao_events) - process_issue_interest(dao_contract, dao_events) - process_take_away_all_deposit(dao_contract, dao_events) - dao_contract.touch - end - - def process_take_away_all_deposit(dao_contract, dao_events) - take_away_all_deposit_dao_events = dao_events.where(event_type: "take_away_all_deposit") - take_away_all_deposit_dao_events.each do |event| - dao_contract.decrement!(:depositors_count) - event.address.update(is_depositor: false) - event.processed! - end - end - - def process_issue_interest(dao_contract, dao_events) - issue_interest_dao_events = dao_events.where(event_type: "issue_interest") - issue_interest_dao_events.each do |event| - dao_contract.increment!(:claimed_compensation, event.value) - address = event.address - address.increment!(:interest, event.value) - event.processed! - end - end - - def process_withdraw_from_dao(dao_contract, dao_events) - withdraw_from_dao_events = dao_events.where(event_type: "withdraw_from_dao") - withdraw_from_dao_events.each do |event| - dao_contract.increment!(:withdraw_transactions_count) - dao_contract.decrement!(:total_deposit, event.value) - address = event.address - address.decrement!(:dao_deposit, event.value) - event.processed! - end - end - - def process_new_dao_depositor(dao_contract, dao_events) - new_dao_depositor_events = dao_events.where(event_type: "new_dao_depositor") - new_dao_depositor_events.each do |event| - dao_contract.increment!(:depositors_count) - dao_contract.increment!(:total_depositors_count) - event.address.update(is_depositor: true) - event.processed! - end - end - - def process_deposit_to_dao(dao_contract, dao_events) - deposit_to_dao_events = dao_events.where(event_type: "deposit_to_dao") - deposit_to_dao_events.each do |event| - address = event.address - address.increment!(:dao_deposit, event.value) - dao_contract.increment!(:total_deposit, event.value) - dao_contract.increment!(:deposit_transactions_count) - event.processed! - end - end - - def update_block_reward_info(current_block) - target_block_number = current_block.target_block_number - target_block = current_block.target_block - return if target_block_number < 1 || target_block.blank? - - ApplicationRecord.transaction do - issue_block_reward!(current_block) - end - end - - def issue_block_reward!(current_block) - CkbUtils.update_block_reward!(current_block) - CkbUtils.calculate_received_tx_fee!(current_block) - end - - def revert_block_rewards(local_tip_block) - target_block = local_tip_block.target_block - target_block_number = local_tip_block.target_block_number - return if target_block_number < 1 || target_block.blank? - - revert_reward_status(target_block) - revert_received_tx_fee(target_block) - end - - def revert_received_tx_fee(target_block) - target_block.update!(received_tx_fee: 0) - end - - def revert_reward_status(target_block) - target_block.update!(reward_status: "pending") - target_block.update!(received_tx_fee_status: "pending") - end - - def invalid_block(local_tip_block) - ApplicationRecord.transaction do - revert_dao_contract_related_operations(local_tip_block) - revert_mining_info(local_tip_block) - udt_type_hashes = local_tip_block.cell_outputs.udt.pluck(:type_hash).uniq.concat(local_tip_block.cell_outputs.m_nft_token.pluck(:type_hash).uniq) - recalculate_udt_transactions_count(local_tip_block) - recalculate_dao_contract_transactions_count(local_tip_block) - decrease_records_count(local_tip_block) - local_tip_block.invalid! - recalculate_udt_accounts(udt_type_hashes, local_tip_block) - local_tip_block.contained_addresses.each(&method(:update_address_balance_and_ckb_transactions_count)) - revert_block_rewards(local_tip_block) - ForkedEvent.create!(block_number: local_tip_block.number, epoch_number: local_tip_block.epoch, - block_timestamp: local_tip_block.timestamp) - Charts::BlockStatisticGenerator.new(local_tip_block.number).call - - local_tip_block - end - end - - def decrease_records_count(local_tip_block) - block_counter = TableRecordCount.find_or_initialize_by(table_name: "blocks") - block_counter.decrement!(:count) - ckb_transaction_counter = TableRecordCount.find_or_initialize_by(table_name: "ckb_transactions") - normal_transactions = local_tip_block.ckb_transactions.normal - ckb_transaction_counter.decrement!(:count, normal_transactions.count) if normal_transactions.present? - end - - def recalculate_dao_contract_transactions_count(local_tip_block) - dao_transactions_count = local_tip_block.ckb_transactions.where("tags @> array[?]::varchar[]", ["dao"]).count - if dao_transactions_count > 0 - DaoContract.default_contract.decrement!(:ckb_transactions_count, - dao_transactions_count) - end - end - - def recalculate_udt_transactions_count(local_tip_block) - udt_ids = local_tip_block.ckb_transactions.where("tags @> array[?]::varchar[]", - ["udt"]).pluck(:contained_udt_ids).flatten - udt_counts = udt_ids.each_with_object(Hash.new(0)) { |udt_id, counts| counts[udt_id] += 1 } - udt_counts_value = - udt_counts.map do |udt_id, count| - udt = Udt.find(udt_id) - { - id: udt_id, ckb_transactions_count: udt.ckb_transactions_count - count, created_at: udt.created_at, - updated_at: Time.current } - end - - Udt.upsert_all(udt_counts_value) if udt_counts_value.present? - end - - def recalculate_udt_accounts(udt_type_hashes, local_tip_block) - return if udt_type_hashes.blank? - - local_tip_block.contained_addresses.find_each do |address| - udt_type_hashes.each do |type_hash| - udt_account = address.udt_accounts.find_by(type_hash: type_hash) - next if udt_account.blank? - - if udt_account.udt_type == "sudt" - amount = address.cell_outputs.live.udt.where(type_hash: type_hash).sum(:udt_amount) - udt_account.update!(amount: amount) - elsif udt_account.udt_type == "m_nft_token" - udt_account.destroy - end - end - end - end - - def revert_mining_info(local_tip_block) - local_tip_block.mining_infos.first.reverted! - miner_address = local_tip_block.miner_address - miner_address.decrement!(:mined_blocks_count) - end - - def revert_dao_contract_related_operations(local_tip_block) - dao_events = DaoEvent.where(block: local_tip_block).processed - dao_contract = DaoContract.default_contract - revert_withdraw_from_dao(dao_contract, dao_events) - revert_issue_interest(dao_contract, dao_events) - revert_deposit_to_dao(dao_contract, dao_events) - revert_new_dao_depositor(dao_contract, dao_events) - revert_take_away_all_deposit(dao_contract, dao_events) - end - - def revert_take_away_all_deposit(dao_contract, dao_events) - take_away_all_deposit_dao_events = dao_events.where(event_type: "take_away_all_deposit") - take_away_all_deposit_dao_events.each do |event| - dao_contract.increment!(:depositors_count) - event.reverted! - end - end - - def revert_issue_interest(dao_contract, dao_events) - issue_interest_dao_events = dao_events.where(event_type: "issue_interest") - issue_interest_dao_events.each do |event| - dao_contract.decrement!(:claimed_compensation, event.value) - address = event.address - address.decrement!(:interest, event.value) - event.reverted! - end - end - - def revert_withdraw_from_dao(dao_contract, dao_events) - withdraw_from_dao_events = dao_events.where(event_type: "withdraw_from_dao") - withdraw_from_dao_events.each do |event| - dao_contract.decrement!(:withdraw_transactions_count) - dao_contract.increment!(:total_deposit, event.value) - address = event.address - address.increment!(:dao_deposit, event.value) - event.reverted! - end - end - - def revert_new_dao_depositor(dao_contract, dao_events) - new_dao_depositor_events = dao_events.where(event_type: "new_dao_depositor") - new_dao_depositor_events.each do |event| - dao_contract.decrement!(:depositors_count) - dao_contract.decrement!(:total_depositors_count) - event.reverted! - end - end - - def revert_deposit_to_dao(dao_contract, dao_events) - deposit_to_dao_events = dao_events.where(event_type: "deposit_to_dao") - deposit_to_dao_events.each do |event| - address = event.address - address.decrement!(:dao_deposit, event.value) - dao_contract.decrement!(:total_deposit, event.value) - dao_contract.decrement!(:deposit_transactions_count) - event.reverted! - end - end - - def update_block_contained_address_info(local_block) - ApplicationRecord.transaction do - local_block.address_ids = local_block.ckb_transactions.pluck(:contained_address_ids).flatten.uniq - local_block.save! - local_block.contained_addresses.each(&method(:update_address_balance_and_ckb_transactions_count)) - end - end - - def forked?(target_block, local_tip_block) - return false if local_tip_block.blank? - - target_block.header.parent_hash != local_tip_block.block_hash - end - - def uncle_block_hashes(node_block_uncles) - node_block_uncles.map { |uncle| uncle.header.hash } - end - - def generate_address_in_advance(cellbase, block_timestamp) - return if cellbase.witnesses.blank? - - lock_script = CkbUtils.generate_lock_script_from_cellbase(cellbase) - address = Address.find_or_create_address(lock_script, block_timestamp) - LockScript.create_or_find_by( - args: lock_script.args, - code_hash: lock_script.code_hash, - hash_type: lock_script.hash_type, - address: address - ) - end - - def build_block(node_block) - header = node_block.header - epoch_info = CkbUtils.parse_epoch_info(header) - cellbase = node_block.transactions.first - - generate_address_in_advance(cellbase, header.timestamp) - - Block.new( - compact_target: header.compact_target, - block_hash: header.hash, - number: header.number, - parent_hash: header.parent_hash, - nonce: header.nonce, - timestamp: header.timestamp, - transactions_root: header.transactions_root, - proposals_hash: header.proposals_hash, - uncles_count: node_block.uncles.count, - uncles_hash: header.uncles_hash, - uncle_block_hashes: uncle_block_hashes(node_block.uncles), - version: header.version, - proposals: node_block.proposals, - proposals_count: node_block.proposals.count, - cell_consumed: CkbUtils.block_cell_consumed(node_block.transactions), - total_cell_capacity: CkbUtils.total_cell_capacity(node_block.transactions), - miner_hash: CkbUtils.miner_hash(cellbase), - miner_lock_hash: CkbUtils.miner_lock_hash(cellbase), - reward: CkbUtils.base_reward(header.number, epoch_info.number), - primary_reward: CkbUtils.base_reward(header.number, epoch_info.number), - secondary_reward: 0, - reward_status: header.number.to_i == 0 ? "issued" : "pending", - total_transaction_fee: 0, - epoch: epoch_info.number, - start_number: epoch_info.start_number, - length: epoch_info.length, - dao: header.dao, - block_time: block_time(header.timestamp, header.number), - block_size: 0 - ) - end - - def block_time(timestamp, number) - target_block_number = [number - 1, 0].max - return 0 if target_block_number.zero? - - previous_block_timestamp = Block.find_by(number: target_block_number).timestamp - timestamp - previous_block_timestamp - end - - def build_uncle_block(uncle_block, local_block) - header = uncle_block.header - epoch_info = CkbUtils.parse_epoch_info(header) - local_block.uncle_blocks.build( - compact_target: header.compact_target, - block_hash: header.hash, - number: header.number, - parent_hash: header.parent_hash, - nonce: header.nonce, - timestamp: header.timestamp, - transactions_root: header.transactions_root, - proposals_hash: header.proposals_hash, - uncles_hash: header.uncles_hash, - version: header.version, - proposals: uncle_block.proposals, - proposals_count: uncle_block.proposals.count, - epoch: epoch_info.number, - dao: header.dao - ) - end - - def build_ckb_transactions(local_block, transactions, outputs, new_dao_depositor_events, udt_infos) - transactions.each_with_index.map do |transaction, transaction_index| - addresses = Set.new - address_ids = Set.new - tags = Set.new - udt_ids = Set.new - dao_address_ids = Set.new - udt_address_ids = Set.new - ckb_transaction = build_ckb_transaction(local_block, transaction, transaction_index) - build_cell_inputs(transaction.inputs, ckb_transaction) - build_cell_outputs(transaction.outputs, ckb_transaction, addresses, transaction.outputs_data, outputs, - new_dao_depositor_events, udt_infos, address_ids, tags, udt_ids, dao_address_ids, udt_address_ids) - ckb_transaction.addresses << addresses.to_a - ckb_transaction.contained_address_ids += address_ids.to_a - ckb_transaction.tags += tags.to_a - ckb_transaction.contained_udt_ids += udt_ids.to_a - ckb_transaction.dao_address_ids += dao_address_ids.to_a - ckb_transaction.udt_address_ids += udt_address_ids.to_a - - ckb_transaction - end - end - - def build_ckb_transaction(local_block, transaction, transaction_index) - local_block.ckb_transactions.build( - tx_hash: transaction.hash, - cell_deps: transaction.cell_deps, - header_deps: transaction.header_deps, - version: transaction.version, - block_number: local_block.number, - block_timestamp: local_block.timestamp, - transaction_fee: 0, - witnesses: transaction.witnesses, - is_cellbase: transaction_index.zero?, - live_cell_changes: live_cell_changes(transaction, transaction_index) - ) - end - - def live_cell_changes(transaction, transaction_index) - transaction_index.zero? ? 1 : transaction.outputs.count - transaction.inputs.count - end - - def build_cell_inputs(node_inputs, ckb_transaction) - node_inputs.each do |node_input| - build_cell_input(ckb_transaction, node_input) - end - end - - def build_cell_input(ckb_transaction, node_input) - ckb_transaction.cell_inputs.build( - previous_output: node_input.previous_output, - since: node_input.since, - block: ckb_transaction.block, - from_cell_base: from_cell_base?(node_input) - ) - end - - def from_cell_base?(node_input) - node_input.previous_output.tx_hash == CellOutput::SYSTEM_TX_HASH - end - - def build_cell_outputs( - node_outputs, ckb_transaction, addresses, outputs_data, outputs, new_dao_depositor_events, -udt_infos, address_ids, tags, udt_ids, dao_address_ids, udt_address_ids - ) - node_outputs.each_with_index.map do |output, cell_index| - address = Address.find_or_create_address(output.lock, ckb_transaction.block_timestamp) - addresses << address - address_ids << address.id - cell_output = build_cell_output(ckb_transaction, output, address, cell_index, outputs_data[cell_index]) - outputs << cell_output - if cell_output.udt? - udt_infos << { type_script: output.type, address: address, udt_type: "sudt" } - tags << "udt" - udt = Udt.find_or_create_by!(type_hash: output.type.compute_hash, udt_type: "sudt") - udt_ids << udt.id - udt_address_ids << address.id - end - if cell_output.m_nft_token? - udt_infos << { type_script: output.type, address: address, udt_type: "m_nft_token" } - udt = Udt.find_or_create_by!(type_hash: output.type.compute_hash, udt_type: "m_nft_token") - m_nft_class_type = TypeScript.where(code_hash: CkbSync::Api.instance.token_class_script_code_hash, - args: output.type.args[0..49]).select { |type| - type.cell_output&.live? - }.first - if m_nft_class_type.present? - m_nft_class_cell = m_nft_class_type.cell_output - parsed_class_data = CkbUtils.parse_token_class_data(m_nft_class_cell.data) - udt.update(full_name: parsed_class_data.name, icon_file: parsed_class_data.renderer) - end - udt.update(published: true) - end - if cell_output.nervos_dao_deposit? || cell_output.nervos_dao_withdrawing? - tags << "dao" - dao_address_ids << address.id - end - - build_deposit_dao_events(address, cell_output, ckb_transaction, new_dao_depositor_events) - build_lock_script(cell_output, output.lock, address) - build_type_script(cell_output, output.type) - - cell_output - end - end - - def build_deposit_dao_events(address, cell_output, ckb_transaction, new_dao_depositor_events) - if cell_output.nervos_dao_deposit? - dao_contract = DaoContract.find_or_create_by(id: 1) - ckb_transaction.dao_events.build(block: ckb_transaction.block, address_id: address.id, event_type: "deposit_to_dao", - value: cell_output.capacity, contract_id: dao_contract.id, block_timestamp: ckb_transaction.block_timestamp) - if address.dao_deposit.zero? && !new_dao_depositor_events.key?(address.id) - new_dao_depositor_events[address.id] = ckb_transaction.tx_hash - end - end - end - - def build_withdraw_dao_events(address_id, ckb_transaction_id, local_block, previous_cell_output) - if previous_cell_output.nervos_dao_withdrawing? - withdraw_amount = previous_cell_output.capacity - ckb_transaction = CkbTransaction.find(ckb_transaction_id) - ckb_transaction.dao_events.create!(block: local_block, block_timestamp: local_block.timestamp, - address_id: address_id, event_type: "withdraw_from_dao", value: withdraw_amount, contract_id: DaoContract.default_contract.id) - interest = CkbUtils.dao_interest(previous_cell_output) - ckb_transaction.dao_events.create!(block: local_block, block_timestamp: local_block.timestamp, - address_id: address_id, event_type: "issue_interest", value: interest, contract_id: DaoContract.default_contract.id) - address = Address.find(address_id) - if (address.dao_deposit - withdraw_amount).zero? - ckb_transaction.dao_events.create!(block: local_block, block_timestamp: local_block.timestamp, - address_id: address_id, event_type: "take_away_all_deposit", value: 1, contract_id: DaoContract.default_contract.id) - end - end - end - - def build_cell_output(ckb_transaction, output, address, cell_index, output_data) - cell_output = ckb_transaction.cell_outputs.build( - capacity: output.capacity, - data: output_data, - data_size: CKB::Utils.hex_to_bin(output_data).bytesize, - occupied_capacity: CkbUtils.calculate_cell_min_capacity(output, output_data), - address: address, - block: ckb_transaction.block, - tx_hash: ckb_transaction.tx_hash, - cell_index: cell_index, - cell_type: CkbUtils.cell_type(output.type, output_data), - block_timestamp: ckb_transaction.block_timestamp, - type_hash: output.type&.compute_hash, - dao: ckb_transaction.block.dao - ) - - cell_output.udt_amount = CkbUtils.parse_udt_cell_data(output_data) if cell_output.udt? - cell_output.udt_amount = "0x#{output.type.args[-8..-1]}".hex if cell_output.m_nft_token? - - cell_output - end - - def build_lock_script(cell_output, lock_script, address) - cell_output.build_lock_script( - args: lock_script.args, - code_hash: lock_script.code_hash, - address: address, - hash_type: lock_script.hash_type - ) - end - - def build_type_script(cell_output, type_script) - return if type_script.blank? - - cell_output.build_type_script( - args: type_script.args, - code_hash: type_script.code_hash, - hash_type: type_script.hash_type - ) - end - - def update_tx_fee_related_data(local_block, input_capacities, udt_infos) - local_block.cell_inputs.where(from_cell_base: false, - previous_cell_output_id: nil).find_in_batches(batch_size: 3500) do |cell_inputs| - updated_inputs = [] - updated_outputs = [] - updated_ckb_transactions = [] - account_books = [] - ApplicationRecord.transaction do - cell_inputs.each do |cell_input| - consumed_tx = cell_input.ckb_transaction - ckb_transaction_id = consumed_tx.id - previous_cell_output = cell_input.previous_cell_output - address_id = previous_cell_output.address_id - input_capacities[ckb_transaction_id] << previous_cell_output.capacity - if previous_cell_output.udt? - udt_infos << { - type_script: previous_cell_output.node_output.type, address: previous_cell_output.address, - udt_type: "sudt" } - end - if previous_cell_output.m_nft_token? - udt_infos << { - type_script: previous_cell_output.node_output.type, address: previous_cell_output.address, - udt_type: "m_nft_token" } - end - - link_previous_cell_output_to_cell_input(cell_input, previous_cell_output) - update_previous_cell_output_status(ckb_transaction_id, previous_cell_output, consumed_tx.block_timestamp) - account_book = link_payer_address_to_ckb_transaction(ckb_transaction_id, address_id) - update_ckb_transaction(consumed_tx, address_id, previous_cell_output, updated_ckb_transactions) - build_withdraw_dao_events(address_id, ckb_transaction_id, local_block, previous_cell_output) - - updated_inputs << cell_input - updated_outputs << previous_cell_output - account_books << account_book - end - - udt_counts_value = udt_counts_value(updated_ckb_transactions) - - dao_tx_count = updated_ckb_transactions.select { |tx| tx[:tags].include?("dao") }.count - DaoContract.default_contract.increment!(:ckb_transactions_count, dao_tx_count) - - CellInput.import!(updated_inputs, validate: false, on_duplicate_key_update: [:previous_cell_output_id]) - CellOutput.import!(updated_outputs, validate: false, - on_duplicate_key_update: [:consumed_by_id, :status, :consumed_block_timestamp]) - AccountBook.import!(account_books, validate: false) - CkbTransaction.upsert_all(updated_ckb_transactions.uniq { |tx| tx[:id] }) - Udt.upsert_all(udt_counts_value) if udt_counts_value.present? - end - input_cache_keys = updated_inputs.map(&:cache_keys) - output_cache_keys = updated_outputs.map(&:cache_keys) - - flush_caches(input_cache_keys + output_cache_keys) - end - end - - def udt_counts_value(updated_ckb_transactions) - udt_ids = updated_ckb_transactions.pluck(:contained_udt_ids).flatten - udt_counts = udt_ids.each_with_object(Hash.new(0)) { |udt_id, counts| counts[udt_id] += 1 } - udt_counts.map do |udt_id, count| - udt = Udt.find(udt_id) - { - id: udt_id, ckb_transactions_count: udt.ckb_transactions_count + count, created_at: udt.created_at, - updated_at: Time.current } - end - end - - def flush_caches(cache_keys) - cache_keys.each_slice(400) do |keys| - $redis.pipelined do - $redis.del(*keys) - end - end - end - - def link_previous_cell_output_to_cell_input(cell_input, previous_cell_output) - cell_input.previous_cell_output_id = previous_cell_output.id - end - - def link_payer_address_to_ckb_transaction(ckb_transaction_id, address_id) - { ckb_transaction_id: ckb_transaction_id, address_id: address_id } - end - - def update_ckb_transaction(consumed_tx, address_id, previous_cell_output, updated_ckb_transactions) - tx = updated_ckb_transactions.select { |tx| tx[:id] == consumed_tx.id }.first - consumed_tx.contained_address_ids << address_id - if previous_cell_output.udt? - consumed_tx.tags << "udt" - consumed_tx.contained_udt_ids << Udt.find_or_create_by!(type_hash: previous_cell_output.type_hash, - udt_type: "sudt").id - consumed_tx.udt_address_ids << previous_cell_output.address_id - end - if previous_cell_output.nervos_dao_withdrawing? - consumed_tx.tags << "dao" - consumed_tx.dao_address_ids << previous_cell_output.address_id - end - if tx.present? - tx[:contained_address_ids] = (tx[:contained_address_ids] << consumed_tx.contained_address_ids).flatten.uniq - tx[:tags] = (tx[:tags] << consumed_tx.tags).flatten.uniq - tx[:contained_udt_ids] = (tx[:contained_udt_ids] << consumed_tx.contained_udt_ids).flatten.uniq - else - updated_ckb_transactions << { - id: consumed_tx.id, - contained_dao_address_ids: consumed_tx.dao_address_ids.uniq, - contained_udt_address_ids: consumed_tx.udt_address_ids.uniq, - contained_udt_ids: consumed_tx.contained_udt_ids.uniq, - contained_address_ids: consumed_tx.contained_address_ids.uniq, - tags: consumed_tx.tags.uniq, - created_at: consumed_tx.created_at, - updated_at: Time.current - } - end - end - - def update_previous_cell_output_status(ckb_transaction_id, previous_cell_output, consumed_block_timestamp) - previous_cell_output.consumed_by_id = ckb_transaction_id - previous_cell_output.consumed_block_timestamp = consumed_block_timestamp - previous_cell_output.status = "dead" - end - - def update_address_balance_and_ckb_transactions_count(address) - address.balance = address.cell_outputs.live.sum(:capacity) - address.ckb_transactions_count = address.custom_ckb_transactions.count - address.live_cells_count = address.cell_outputs.live.count - address.dao_transactions_count = address.ckb_dao_transactions.count - address.save! - end - - def calculate_tx_fee(local_block, ckb_transactions, input_capacities, outputs) - output_capacities = outputs.each { |k, v| outputs[k] = v.map(&:capacity) } - ckb_transactions = ckb_transactions.reject(&:is_cellbase) - return if ckb_transactions.blank? - - txs = [] - ckb_transactions.each do |ckb_transaction| - update_transaction_fee(ckb_transaction, input_capacities[ckb_transaction.id].sum, - output_capacities[ckb_transaction.id].sum) - ckb_transaction.capacity_involved = input_capacities[ckb_transaction.id].sum unless ckb_transaction.is_cellbase - txs << ckb_transaction - end - - CkbTransaction.import!(txs, validate: false, on_duplicate_key_update: [:transaction_fee, :capacity_involved]) - local_block.total_transaction_fee = local_block.ckb_transactions.sum(:transaction_fee) - local_block.save! - rescue ActiveRecord::RecordInvalid - local_block.update(total_transaction_fee: 0) - Rails.logger.error "block number: #{local_block.number}, tx_fee is negative" - end - - def update_transaction_fee(ckb_transaction, input_capacities, output_capacities) - transaction_fee = CkbUtils.ckb_transaction_fee(ckb_transaction, input_capacities, output_capacities) - Rails.logger.error "ckb_transaction_id: #{ckb_transaction.id}, tx_fee is negative" if transaction_fee < 0 - - ckb_transaction.transaction_fee = [transaction_fee, 0].max - end - - def update_current_block_mining_info(block) - CkbUtils.update_current_block_mining_info(block) - end - end -end diff --git a/app/models/ckb_transaction.rb b/app/models/ckb_transaction.rb index 660da2faa..29386d8a5 100644 --- a/app/models/ckb_transaction.rb +++ b/app/models/ckb_transaction.rb @@ -9,6 +9,10 @@ class CkbTransaction < ApplicationRecord enum tx_status: { pending: 0, proposed: 1, committed: 2, rejected: 3 }, _prefix: :tx belongs_to :block, optional: true # when a transaction is pending, it does not belongs to any block + has_many :block_transactions + has_many :included_blocks, class_name: "Block", + through: :block_transactions, + inverse_of: :contained_transactions has_many :account_books, dependent: :delete_all has_many :addresses, through: :account_books has_many :cell_inputs, dependent: :delete_all @@ -27,6 +31,8 @@ class CkbTransaction < ApplicationRecord has_many :header_dependencies, dependent: :delete_all has_many :witnesses, dependent: :delete_all + has_one :reject_reason + has_and_belongs_to_many :contained_addresses, class_name: "Address", join_table: "account_books" has_and_belongs_to_many :contained_udts, class_name: "Udt", join_table: :udt_transactions has_and_belongs_to_many :contained_dao_addresses, class_name: "Address", join_table: "address_dao_transactions" @@ -53,6 +59,14 @@ def self.cached_find(query_key) end end + def self.clean_pending + tx_pending.find_each do |t| + if where(tx_hash: t.tx_hash).where.not(tx_status: :pending).exists? + t.destroy + end + end + end + def self.largest_in_epoch(epoch_number) Rails.cache.fetch(["epoch", epoch_number, "largest_tx"]) do tx = CkbTransaction.where(block: { epoch_number: epoch_number }).order(bytes: :desc).first @@ -73,7 +87,7 @@ def self.write_raw_hash_cache(tx_hash, raw_hash = nil) raw_hash = tx_hash tx_hash = raw_hash["hash"] end - Rails.cache.write([self.class.name, tx_hash, "raw_hash"], raw_hash, expires_in: 1.day) + Rails.cache.write([name, tx_hash, "raw_hash"], raw_hash, expires_in: 1.day) end # fetch using rpc method "get_transaction" @@ -82,7 +96,7 @@ def self.write_raw_hash_cache(tx_hash, raw_hash = nil) # @param write_raw_hash_cache [Boolean] if we should write raw hash of transaction without status to cache # @return [Hash] def self.fetch_raw_hash_with_status(tx_hash, write_raw_hash_cache: true) - Rails.cache.fetch([name, tx_hash, "raw_hash_with_status"], expires_in: 1.day) do + Rails.cache.fetch([name, tx_hash, "raw_hash_with_status"], expires_in: 1.day, skip_nil: true) do res = CkbSync::Api.instance.directly_single_call_rpc method: "get_transaction", params: [tx_hash] h = res["result"].with_indifferent_access self.write_raw_hash_cache(tx_hash, h["transaction"]) if write_raw_hash_cache @@ -95,7 +109,7 @@ def self.fetch_raw_hash_with_status(tx_hash, write_raw_hash_cache: true) # @param tx_hash [String] # @return [Hash] def self.fetch_raw_hash(tx_hash) - Rails.cache.fetch([name, tx_hash, "raw_hash"], expires_in: 1.day) do + Rails.cache.fetch([name, tx_hash, "raw_hash"], expires_in: 1.day, skip_nil: true) do fetch_raw_hash_with_status(tx_hash, write_raw_hash_cache: false)["transaction"] end end @@ -104,9 +118,9 @@ def self.fetch_raw_hash(tx_hash) # @param tx_hash [String] # @return [CKB::Types::TransactionWithStatus] def self.fetch_sdk_transaction_with_status(tx_hash, write_object_cache: true) - Rails.cache.fetch([name, tx_hash, "object_with_status"], expires_in: 1.day) do + Rails.cache.fetch([name, tx_hash, "object_with_status"], expires_in: 1.day, skip_nil: true) do tx = CKB::Types::TransactionWithStatus.from_h fetch_raw_hash_with_status(tx_hash) - Rails.cache.write([name, tx_hash, "object"], tx.transaction) if write_object_cache + Rails.cache.write([name, tx_hash, "object"], tx.transaction, expires_in: 1.day) if write_object_cache tx end end @@ -115,8 +129,15 @@ def self.fetch_sdk_transaction_with_status(tx_hash, write_object_cache: true) # @param tx_hash [String] # @return [CKB::Types::Transaction] def self.fetch_sdk_transaction(tx_hash) - Rails.cache.fetch([name, tx_hash, "object"], expires_in: 1.day) do - fetch_sdk_transaction_with_status(tx_hash, write_object_cache: false).transaction + Rails.cache.fetch([name, tx_hash, "object"], expires_in: 1.day, skip_nil: true) do + sdk_tx_with_status = Rails.cache.read([name, tx_hash, "object_with_status"]) + if sdk_tx_with_status + return sdk_transaction_with_status.transaction + else + tx = CKB::Types::Transaction.from_h fetch_raw_hash(tx_hash).with_indifferent_access + Rails.cache.write([name, tx_hash, "object"], tx, expires_in: 1.day) + tx + end end end @@ -207,7 +228,7 @@ def cell_info end def detailed_message - @detailed_message ||= PoolTransactionEntry.find_by(tx_hash: tx_hash)&.detailed_message + reject_reason&.message end # convert current record to raw hash with standard RPC json data structure @@ -247,11 +268,19 @@ def normal_tx_display_outputs(previews) cell_outputs_for_display = cell_outputs_for_display[0, 10] end cell_outputs_for_display.map do |output| - consumed_tx_hash = output.live? ? nil : output.consumed_by.tx_hash + consumed_tx_hash = output.live? ? nil : output.consumed_by&.tx_hash display_output = { - id: output.id, capacity: output.capacity, address_hash: output.address_hash, - status: output.status, consumed_tx_hash: consumed_tx_hash, cell_type: output.cell_type } + id: output.id, + capacity: output.capacity, + address_hash: output.address_hash, + status: output.status, + consumed_tx_hash: consumed_tx_hash, + cell_type: output.cell_type + } display_output.merge!(attributes_for_udt_cell(output)) if output.udt? + display_output.merge!(attributes_for_cota_registry_cell(output)) if output.cota_registry? + display_output.merge!(attributes_for_cota_regular_cell(output)) if output.cota_regular? + display_output.merge!(attributes_for_m_nft_cell(output)) if output.cell_type.in?(%w( m_nft_issuer m_nft_class m_nft_token @@ -282,6 +311,19 @@ def normal_tx_display_inputs(previews) end cell_inputs_for_display.each_with_index.map do |cell_input, index| previous_cell_output = cell_input.previous_cell_output + unless previous_cell_output + next({ + from_cellbase: false, + capacity: "", + address_hash: "", + generated_tx_hash: cell_input.previous_tx_hash, + cell_index: cell_input.previous_index, + since: { + raw: hex_since(cell_input.since.to_i), + median_timestamp: cell_input.block&.median_timestamp.to_i + } + }) + end display_input = { id: previous_cell_output.id, @@ -293,7 +335,7 @@ def normal_tx_display_inputs(previews) cell_type: previous_cell_output.cell_type, since: { raw: hex_since(cell_input.since.to_i), - median_timestamp: cell_input.block.median_timestamp.to_i + median_timestamp: cell_input.block&.median_timestamp.to_i } } display_input.merge!(attributes_for_dao_input(previous_cell_output)) if previous_cell_output.nervos_dao_withdrawing? @@ -330,6 +372,16 @@ def attributes_for_m_nft_cell(m_nft_cell) { m_nft_info: info, extra_info: info } end + def attributes_for_cota_registry_cell(cota_cell) + info = cota_cell.cota_registry_info + { cota_registry_info: info, extra_info: info } + end + + def attributes_for_cota_regular_cell(cota_cell) + info = cota_cell.cota_regular_info + { cota_regular_info: info, extra_info: info } + end + def attributes_for_nrc_721_cell(nrc_721_cell) info = nrc_721_cell.nrc_721_nft_info { nrc_721_token_info: info, extra_info: info } diff --git a/app/models/lock_script.rb b/app/models/lock_script.rb index d84fc612d..2df44740f 100644 --- a/app/models/lock_script.rb +++ b/app/models/lock_script.rb @@ -90,6 +90,15 @@ def lock_info end end + # @return [Integer] Byte + def calculate_bytesize + bytesize = 1 + bytesize += CKB::Utils.hex_to_bin(code_hash).bytesize if code_hash + bytesize += CKB::Utils.hex_to_bin(args).bytesize if args + + bytesize + end + private def set_since_epoch_number_and_index(since_value) diff --git a/app/models/pool_transaction_entry.rb b/app/models/pool_transaction_entry.rb deleted file mode 100644 index cc7d475e7..000000000 --- a/app/models/pool_transaction_entry.rb +++ /dev/null @@ -1,94 +0,0 @@ -class PoolTransactionEntry < ApplicationRecord - enum tx_status: { pending: 0, proposed: 1, committed: 2, rejected: 3 }, - _prefix: :pool_transaction - - def block - nil - end - - def is_cellbase - false - end - - def income(address = nil) - nil - end - - def display_outputs(previews: false) - self.attributes["display_outputs"] - end - - def display_inputs(previews: false) - self.attributes["display_inputs"] - end - - def proposal_short_id - tx_hash[0...12] - end - - def display_inputs_info; end - - def to_raw - { - hash: tx_hash, - header_deps: Array.wrap(header_deps), - cell_deps: Array.wrap(cell_deps).map do |d| - d["out_point"]["index"] = "0x#{d['out_point']['index'].to_s(16)}" - d - end, - inputs: Array.wrap(inputs).map do |i| - i["since"] = "0x#{i['since'].to_s(16)}" - i - end, - outputs: Array.wrap(outputs).map do |i| - i["capacity"] = "0x#{i['capacity'].to_s(16)}" - i - end, - outputs_data: Array.wrap(outputs_data), - version: "0x#{(version || 0).to_s(16)}", - witnesses: Array.wrap(witnesses) - } - end - - def update_detailed_message_for_rejected_transaction - response_string = CkbSync::Api.instance.directly_single_call_rpc method: "get_transaction", - params: [tx_hash] - reason = response_string["result"]["tx_status"] - self.update detailed_message: response_string["result"]["tx_status"]["reason"] - return self - end -end - -# == Schema Information -# -# Table name: pool_transaction_entries -# -# id :bigint not null, primary key -# cell_deps :jsonb -# tx_hash :binary -# header_deps :jsonb -# inputs :jsonb -# outputs :jsonb -# outputs_data :jsonb -# version :integer -# witnesses :jsonb -# transaction_fee :decimal(30, ) -# block_number :decimal(30, ) -# block_timestamp :decimal(30, ) -# cycles :decimal(30, ) -# tx_size :decimal(30, ) -# display_inputs :jsonb -# display_outputs :jsonb -# tx_status :integer default("pending") -# created_at :datetime not null -# updated_at :datetime not null -# detailed_message :text -# bytes :integer default(0) -# -# Indexes -# -# index_pool_transaction_entries_on_id_and_tx_status (id,tx_status) -# index_pool_transaction_entries_on_tx_hash (tx_hash) USING hash -# index_pool_transaction_entries_on_tx_status (tx_status) -# unique_tx_hash (tx_hash) UNIQUE -# diff --git a/app/models/reject_reason.rb b/app/models/reject_reason.rb new file mode 100644 index 000000000..d683f0049 --- /dev/null +++ b/app/models/reject_reason.rb @@ -0,0 +1,17 @@ +class RejectReason < ApplicationRecord + belongs_to :ckb_transaction + validates :ckb_transaction_id, uniqueness: true +end + +# == Schema Information +# +# Table name: reject_reasons +# +# id :bigint not null, primary key +# ckb_transaction_id :bigint not null +# message :text +# +# Indexes +# +# index_reject_reasons_on_ckb_transaction_id (ckb_transaction_id) UNIQUE +# diff --git a/app/models/statistic_info.rb b/app/models/statistic_info.rb index 47dd39aa3..60552bee3 100644 --- a/app/models/statistic_info.rb +++ b/app/models/statistic_info.rb @@ -105,7 +105,7 @@ def self.hash_rate(block_number) txs = CkbTransaction.tx_committed. where("bytes > 0 and transaction_fee > 0"). order("id desc").limit(10000). - pluck(:id, :created_at, :transaction_fee, :bytes, :confirmation_time, :block_timestamp) + pluck(:id, :created_at, :transaction_fee, :bytes, :confirmation_time, :block_timestamp, :created_at) txs.map do |id, created_at, transaction_fee, bytes, confirmation_time, block_timestamp| if confirmation_time && confirmation_time >= 0 { @@ -119,15 +119,6 @@ def self.hash_rate(block_number) a = created_at.to_i c = b - a - if c <= 0 - # because the transaction will be deleted when block rollback - # so the original created_at will be lost, so we fallback to the record in PoolTransactionEntry - d = PoolTransactionEntry.find_by(tx_hash: CkbTransaction.where(id: id).pick(:tx_hash))&.created_at - if d - c = b - d.to_i - end - end - CkbTransaction.where(id: id).update_all(confirmation_time: c) { id: id, @@ -141,7 +132,7 @@ def self.hash_rate(block_number) define_logic :pending_transaction_fee_rates do # select from database - fee_rates = PoolTransactionEntry.pool_transaction_pending. + fee_rates = CkbTransaction.tx_pending. where("transaction_fee > 0"). order("id desc").limit(100) diff --git a/app/models/suggest_query.rb b/app/models/suggest_query.rb index 69142c682..d53e613ef 100644 --- a/app/models/suggest_query.rb +++ b/app/models/suggest_query.rb @@ -55,12 +55,7 @@ def find_udt_by_type_hash UdtSerializer.new(udt) if udt.present? end - def find_pool_tx_by_hash - tx = PoolTransactionEntry.find_by(tx_hash: query_key) - CkbTransactionSerializer.new(tx) if tx.present? - end - def find_by_hex - Block.cached_find(query_key) || find_ckb_transaction_by_hash || find_pool_tx_by_hash || find_address_by_lock_hash || find_udt_by_type_hash + Block.cached_find(query_key) || find_ckb_transaction_by_hash || find_address_by_lock_hash || find_udt_by_type_hash end end diff --git a/app/models/type_script.rb b/app/models/type_script.rb index f3d54add0..d9007c188 100644 --- a/app/models/type_script.rb +++ b/app/models/type_script.rb @@ -52,6 +52,15 @@ def generate_script_hash self.hash_type ||= "type" self.script_hash ||= CKB::Types::Script.new(**to_node).compute_hash rescue nil end + + # @return [Integer] Byte + def calculate_bytesize + bytesize = 1 + bytesize += CKB::Utils.hex_to_bin(code_hash).bytesize if code_hash + bytesize += CKB::Utils.hex_to_bin(args).bytesize if args + + bytesize + end end # == Schema Information diff --git a/app/models/udt.rb b/app/models/udt.rb index 9f75b62dd..e1b3ae59a 100644 --- a/app/models/udt.rb +++ b/app/models/udt.rb @@ -14,9 +14,10 @@ class Udt < ApplicationRecord has_and_belongs_to_many :ckb_transactions, join_table: :udt_transactions - def h24_ckb_transactions_count - Rails.cache.realize("udt_h24_ckb_transactions_count_#{id}", expires_in: 1.hour) do - ckb_transactions.where("block_timestamp >= ?", CkbUtils.time_in_milliseconds(24.hours.ago)).count + def update_h24_ckb_transactions_count + if self.ckb_transactions.exists? + update(h24_ckb_transactions_count: self.ckb_transactions.where("block_timestamp >= ?", + CkbUtils.time_in_milliseconds(24.hours.ago)).count) end end @@ -35,29 +36,30 @@ def type_script # # Table name: udts # -# id :bigint not null, primary key -# code_hash :binary -# hash_type :string -# args :string -# type_hash :string -# full_name :string -# symbol :string -# decimal :integer -# description :string -# icon_file :string -# operator_website :string -# addresses_count :bigint default(0) -# total_amount :decimal(40, ) default(0) -# udt_type :integer -# published :boolean default(FALSE) -# created_at :datetime not null -# updated_at :datetime not null -# block_timestamp :bigint -# issuer_address :binary -# ckb_transactions_count :bigint default(0) -# nrc_factory_cell_id :bigint -# display_name :string -# uan :string +# id :bigint not null, primary key +# code_hash :binary +# hash_type :string +# args :string +# type_hash :string +# full_name :string +# symbol :string +# decimal :integer +# description :string +# icon_file :string +# operator_website :string +# addresses_count :bigint default(0) +# total_amount :decimal(40, ) default(0) +# udt_type :integer +# published :boolean default(FALSE) +# created_at :datetime not null +# updated_at :datetime not null +# block_timestamp :bigint +# issuer_address :binary +# ckb_transactions_count :bigint default(0) +# nrc_factory_cell_id :bigint +# display_name :string +# uan :string +# h24_ckb_transactions_count :bigint default(0) # # Indexes # diff --git a/app/serializers/ckb_transaction_serializer.rb b/app/serializers/ckb_transaction_serializer.rb index 3b8529051..7c8e742fb 100644 --- a/app/serializers/ckb_transaction_serializer.rb +++ b/app/serializers/ckb_transaction_serializer.rb @@ -1,36 +1,21 @@ # notice: -# this class would serialize 2 models: CkbTransaction and PoolTransactionEntry # class CkbTransactionSerializer include FastJsonapi::ObjectSerializer # for the tx_status, - # CkbTransaction will always be "commited" - # PoolTransactionEntry will give: 0, 1, 2, 3 attributes :is_cellbase, :tx_status attribute :witnesses do |o| - if o.is_a?(PoolTransactionEntry) - o.witnesses - else - o.witnesses&.map(&:data) || [] - end + o.witnesses&.map(&:data) || [] end attribute :cell_deps do |o| - if o.is_a?(PoolTransactionEntry) - o.cell_deps - else - o.cell_dependencies.explicit.includes(:cell_output).to_a.map(&:to_raw) - end + o.cell_dependencies.explicit.includes(:cell_output).to_a.map(&:to_raw) end attribute :header_deps do |o| - if o.is_a?(PoolTransactionEntry) - o.header_deps - else - o.header_dependencies.map(&:header_hash) - end + o.header_dependencies.map(&:header_hash) end attribute :detailed_message do |object| diff --git a/app/utils/ckb_utils.rb b/app/utils/ckb_utils.rb index 1868e73d0..c9d80e6da 100644 --- a/app/utils/ckb_utils.rb +++ b/app/utils/ckb_utils.rb @@ -1,4 +1,8 @@ class CkbUtils + def self.int_to_hex(i) + "0x#{i.to_s(16)}" + end + def self.calculate_cell_min_capacity(output, data) output.calculate_min_capacity(data) end @@ -375,7 +379,8 @@ def self.cell_type(type_script, output_data) return "normal" unless ([ Settings.dao_code_hash, Settings.dao_type_hash, Settings.sudt_cell_type_hash, Settings.sudt1_cell_type_hash, CkbSync::Api.instance.issuer_script_code_hash, CkbSync::Api.instance.token_class_script_code_hash, - CkbSync::Api.instance.token_script_code_hash + CkbSync::Api.instance.token_script_code_hash, CkbSync::Api.instance.cota_registry_code_hash, + CkbSync::Api.instance.cota_regular_code_hash ].include?(type_script&.code_hash) && type_script&.hash_type == "type") || is_nrc_721_token_cell?(output_data) || is_nrc_721_factory_cell?(output_data) case type_script&.code_hash @@ -520,4 +525,8 @@ def self.address_to_lock_hash(address) parsed.script.compute_hash end end + + def self.hex_since(int_since_value) + return "0x#{int_since_value.to_s(16).rjust(16, '0')}" + end end diff --git a/app/workers/address_unclaimed_compensation_generator.rb b/app/workers/address_unclaimed_compensation_generator.rb index 97ae55587..3d935acfd 100644 --- a/app/workers/address_unclaimed_compensation_generator.rb +++ b/app/workers/address_unclaimed_compensation_generator.rb @@ -5,7 +5,12 @@ def perform Address.where(is_depositor: true).find_in_batches do |addresses| values = addresses.map do |address| - { id: address.id, unclaimed_compensation: address.cal_unclaimed_compensation, created_at: address.created_at, updated_at: Time.current } + { + id: address.id, + unclaimed_compensation: address.cal_unclaimed_compensation, + created_at: address.created_at, + updated_at: Time.current + } end if values.present? diff --git a/app/workers/flush_inputs_outputs_cache_worker.rb b/app/workers/flush_inputs_outputs_cache_worker.rb index 128722c61..b6f822996 100644 --- a/app/workers/flush_inputs_outputs_cache_worker.rb +++ b/app/workers/flush_inputs_outputs_cache_worker.rb @@ -11,7 +11,7 @@ def perform(block_id) cache_keys << cell_input.cache_keys end $redis.pipelined do - $redis.del(*cache_keys) + Rails.cache.delete_multi(cache_keys) end end end diff --git a/app/workers/pool_transaction_check_worker.rb b/app/workers/pool_transaction_check_worker.rb index d2628d830..71a668ac4 100644 --- a/app/workers/pool_transaction_check_worker.rb +++ b/app/workers/pool_transaction_check_worker.rb @@ -1,3 +1,4 @@ +# Check every pending transaction in the pool if rejected class PoolTransactionCheckWorker include Sidekiq::Worker sidekiq_options retry: 0 @@ -11,14 +12,13 @@ def perform # If any of the input or dependency cells is used, the transaction will never be valid. # Thus we can directly mark this transaction rejected without requesting to CKB Node. # Only request the CKB Node for reject reason after we find the transaction is rejeceted. - PoolTransactionEntry.pool_transaction_pending.where(block_timestamp: ..latest_block.timestamp).select( - :id, :tx_hash, :inputs, :created_at, :cell_deps - ).find_each do |tx| + CkbTransaction.tx_pending. + where(block_timestamp: ..latest_block.timestamp).includes(:cell_dependencies, cell_inputs: :previous_cell_output).find_each do |tx| is_rejected = false rejected_transaction = nil - tx.inputs.each do |input| - if CellOutput.where(tx_hash: input["previous_output"]["tx_hash"], - cell_index: input["previous_output"]["index"], status: "dead").exists? + # check if any input is used by other transactions + tx.cell_inputs.each do |input| + if input.previous_cell_output && input.previous_cell_output.dead? rejected_transaction = { id: tx.id, tx_status: "rejected", @@ -29,10 +29,11 @@ def perform break end end + unless is_rejected - tx.cell_deps.each do |input| - if CellOutput.where(tx_hash: input["out_point"]["tx_hash"], - cell_index: input["out_point"]["index"], status: "dead").exists? + # check if any dependency cell(contract) is consumed by other transactions + tx.cell_dependencies.each do |dep| + if dep.cell_output && dep.cell_output.dead? rejected_transaction = { id: tx.id, tx_status: "rejected", @@ -44,8 +45,10 @@ def perform end end end + if is_rejected AfterCommitEverywhere.after_commit do + # fetch the reason from node PoolTransactionUpdateRejectReasonWorker.perform_async tx.tx_hash end CkbTransaction.where(tx_hash: tx.tx_hash).update_all tx_status: :rejected # , detailed_message: reason diff --git a/app/workers/pool_transaction_update_reject_reason_worker.rb b/app/workers/pool_transaction_update_reject_reason_worker.rb index d8bb06681..70bf56696 100644 --- a/app/workers/pool_transaction_update_reject_reason_worker.rb +++ b/app/workers/pool_transaction_update_reject_reason_worker.rb @@ -5,10 +5,10 @@ def perform(tx_hash) params: [tx_hash] reason = response_string["result"]["tx_status"] if reason["status"] == "rejected" - PoolTransactionEntry. - where(tx_hash: tx_hash). - update_all tx_status: "rejected", - detailed_message: reason["reason"] + tx = CkbTransaction.find_by tx_hash: tx_hash + reject_reason = tx.reject_reason || tx.build_reject_reason + tx.update tx_status: "rejected" + reject_reason.update message: reason["reason"] end end end diff --git a/app/workers/update_h24_ckb_transactions_count_on_udts_worker.rb b/app/workers/update_h24_ckb_transactions_count_on_udts_worker.rb new file mode 100644 index 000000000..f5d747b4e --- /dev/null +++ b/app/workers/update_h24_ckb_transactions_count_on_udts_worker.rb @@ -0,0 +1,10 @@ +class UpdateH24CkbTransactionsCountOnUdtsWorker + include Sidekiq::Worker + sidekiq_options queue: "low" + + def perform + Udt.find_each do |udt| + udt.update_h24_ckb_transactions_count + end + end +end diff --git a/config/environments/test.rb b/config/environments/test.rb index 5746019c0..0b2e38951 100644 --- a/config/environments/test.rb +++ b/config/environments/test.rb @@ -25,7 +25,7 @@ # Show full error reports and disable caching. config.consider_all_requests_local = true config.action_controller.perform_caching = false - config.cache_store = :null_store + config.cache_store = :memory_store # Raise exceptions instead of rendering exception templates. config.action_dispatch.show_exceptions = false diff --git a/config/routes.rb b/config/routes.rb index 93282b461..ea270ab6d 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -13,8 +13,12 @@ namespace :external do resources :stats, only: :show end - resources :blocks, only: %i(index show) - resources :address_transactions, only: :show + resources :blocks, only: %i(index show) do + collection do + get :download_csv + end + end + resources :address_dao_transactions, only: :show resources :block_transactions, only: :show resources :addresses, only: :show get "/transactions/:id", to: "ckb_transactions#show", as: "ckb_transaction" @@ -33,22 +37,26 @@ resources :contract_transactions, only: :show resources :contracts, only: :show resources :dao_contract_transactions, only: :show - resources :address_dao_transactions, only: :show + resources :address_transactions, only: :show do + collection do + get :download_csv + end + end resources :dao_depositors, only: :index resources :daily_statistics, only: :show resources :block_statistics, only: :show resources :epoch_statistics, only: :show resources :market_data, only: :show - resources :udts, only: %i(index show) + resources :udts, only: %i(index show) do + collection do + get :download_csv + end + end resources :udt_transactions, only: :show resources :address_udt_transactions, only: :show resources :distribution_data, only: :show resources :monetary_data, only: :show end - - namespace :v2 do - post "/das_accounts" => "das_accounts#query", as: :das_accounts - end end draw "v2" match "/:anything" => "errors#routing_error", via: :all, constraints: { anything: /.*/ } diff --git a/config/routes/v2.rb b/config/routes/v2.rb index 7c681e88b..68ff5362e 100644 --- a/config/routes/v2.rb +++ b/config/routes/v2.rb @@ -1,11 +1,16 @@ namespace :api do namespace :v2 do + post "/das_accounts" => "das_accounts#query", as: :das_accounts resources :transactions, only: [:index, :show] do member do + get :details get :raw end end resources :pending_transactions, only: [:index] do + collection do + get :count + end end namespace :monitors do resources :daily_statistics, only: :index @@ -35,7 +40,12 @@ end end resources :items, only: :index - resources :transfers, only: [:index, :show] + resources :transfers, only: [:index, :show] do + collection do + get :download_csv + end + end + end resources :dao_events, only: [:index] diff --git a/config/schedule.yml b/config/schedule.yml index 829edc3c2..e18ec3922 100644 --- a/config/schedule.yml +++ b/config/schedule.yml @@ -41,3 +41,7 @@ update_pool_txs_status: clean_up_worker: cron: "0 * * * *" class: 'CleanUpWorker' + +update_h24_ckb_transactioins_count_on_udts: + cron: "0 * * * *" + class: 'UpdateH24CkbTransactionsCountOnUdtsWorker' diff --git a/db/migrate/20230504023535_add_h24_ckb_transactions_count_to_udts.rb b/db/migrate/20230504023535_add_h24_ckb_transactions_count_to_udts.rb new file mode 100644 index 000000000..c10a8aa36 --- /dev/null +++ b/db/migrate/20230504023535_add_h24_ckb_transactions_count_to_udts.rb @@ -0,0 +1,17 @@ +class AddH24CkbTransactionsCountToUdts < ActiveRecord::Migration[7.0] + def up + add_column :udts, :h24_ckb_transactions_count, :bigint, default: 0 + + Udt.all.each do |udt| + h24_ckb_transactions_count = Rails.cache.realize("udt_h24_ckb_transactions_count_#{udt.id}", expires_in: 1.hour) do + udt.ckb_transactions.where("block_timestamp >= ?", CkbUtils.time_in_milliseconds(24.hours.ago)).count + end + + udt.update_columns(h24_ckb_transactions_count: h24_ckb_transactions_count) + end + end + + def down + remove_column :udts, :h24_ckb_transactions_count, :bigint, default: 0 + end +end diff --git a/db/migrate/20230518061651_change_cell_input_previous_output.rb b/db/migrate/20230518061651_change_cell_input_previous_output.rb new file mode 100644 index 000000000..bd9fdc57e --- /dev/null +++ b/db/migrate/20230518061651_change_cell_input_previous_output.rb @@ -0,0 +1,16 @@ +class ChangeCellInputPreviousOutput < ActiveRecord::Migration[7.0] + def change + change_table :cell_inputs, bulk: true do |t| + t.binary :previous_tx_hash + t.integer :previous_index + end + execute <<~SQL + UPDATE cell_inputs + SET previous_tx_hash = decode(substring(previous_output ->> 'tx_hash',3), 'hex') , + previous_index = ('x' || lpad(substring(previous_output ->> 'index' from 3), 8, '0'))::bit(32)::int + where previous_cell_output_id is null and previous_output is not null + SQL + add_index :cell_inputs, [:previous_tx_hash, :previous_index] + remove_column :cell_inputs, :previous_output + end +end diff --git a/db/migrate/20230526070328_create_reject_reasons.rb b/db/migrate/20230526070328_create_reject_reasons.rb new file mode 100644 index 000000000..219865349 --- /dev/null +++ b/db/migrate/20230526070328_create_reject_reasons.rb @@ -0,0 +1,9 @@ +class CreateRejectReasons < ActiveRecord::Migration[7.0] + def change + create_table :reject_reasons do |t| + t.bigint :ckb_transaction_id, null: false + t.text :message + end + add_index :reject_reasons, :ckb_transaction_id, unique: true + end +end diff --git a/db/migrate/20230526085258_address_block_snapshot.rb b/db/migrate/20230526085258_address_block_snapshot.rb new file mode 100644 index 000000000..735fc07cd --- /dev/null +++ b/db/migrate/20230526085258_address_block_snapshot.rb @@ -0,0 +1,12 @@ +class AddressBlockSnapshot < ActiveRecord::Migration[7.0] + def change + create_table :address_block_snapshots do |t| + t.belongs_to :address + t.belongs_to :block + t.bigint :block_number + t.jsonb :final_state + + t.index [:block_id, :address_id], unique: true + end + end +end diff --git a/db/migrate/20230526135653_migrate_reject_messages.rb b/db/migrate/20230526135653_migrate_reject_messages.rb new file mode 100644 index 000000000..952d5fd6a --- /dev/null +++ b/db/migrate/20230526135653_migrate_reject_messages.rb @@ -0,0 +1,19 @@ +class MigrateRejectMessages < ActiveRecord::Migration[7.0] + def change + execute <<~SQL + INSERT INTO reject_reasons (ckb_transaction_id, message) + SELECT ckb_transactions.id, pool_transaction_entries.detailed_message + FROM ckb_transactions + JOIN pool_transaction_entries + ON ckb_transactions.tx_hash = pool_transaction_entries.tx_hash + where pool_transaction_entries.tx_status = 3 + SQL + execute <<~SQL + UPDATE ckb_transactions + SET tx_status = pool_transaction_entries.tx_status + FROM pool_transaction_entries + WHERE ckb_transactions.tx_hash = pool_transaction_entries.tx_hash + AND pool_transaction_entries.tx_status = 3 + SQL + end +end diff --git a/db/structure.sql b/db/structure.sql index a66d20cbf..09227aec3 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -347,6 +347,38 @@ CREATE SEQUENCE public.account_books_id_seq ALTER SEQUENCE public.account_books_id_seq OWNED BY public.account_books.id; +-- +-- Name: address_block_snapshots; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.address_block_snapshots ( + id bigint NOT NULL, + address_id bigint, + block_id bigint, + block_number bigint, + final_state jsonb +); + + +-- +-- Name: address_block_snapshots_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.address_block_snapshots_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: address_block_snapshots_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.address_block_snapshots_id_seq OWNED BY public.address_block_snapshots.id; + + -- -- Name: address_dao_transactions; Type: TABLE; Schema: public; Owner: - -- @@ -752,7 +784,6 @@ ALTER SEQUENCE public.cell_dependencies_id_seq OWNED BY public.cell_dependencies CREATE TABLE public.cell_inputs ( id bigint NOT NULL, - previous_output jsonb, ckb_transaction_id bigint, created_at timestamp without time zone NOT NULL, updated_at timestamp without time zone NOT NULL, @@ -761,7 +792,9 @@ CREATE TABLE public.cell_inputs ( block_id numeric(30,0), since numeric(30,0) DEFAULT 0.0, cell_type integer DEFAULT 0, - index integer + index integer, + previous_tx_hash bytea, + previous_index integer ); @@ -1673,6 +1706,36 @@ CREATE SEQUENCE public.referring_cells_id_seq ALTER SEQUENCE public.referring_cells_id_seq OWNED BY public.referring_cells.id; +-- +-- Name: reject_reasons; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.reject_reasons ( + id bigint NOT NULL, + ckb_transaction_id bigint NOT NULL, + message text +); + + +-- +-- Name: reject_reasons_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.reject_reasons_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: reject_reasons_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.reject_reasons_id_seq OWNED BY public.reject_reasons.id; + + -- -- Name: rolling_avg_block_time; Type: MATERIALIZED VIEW; Schema: public; Owner: - -- @@ -2138,7 +2201,8 @@ CREATE TABLE public.udts ( ckb_transactions_count bigint DEFAULT 0.0, nrc_factory_cell_id bigint, display_name character varying, - uan character varying + uan character varying, + h24_ckb_transactions_count bigint DEFAULT 0 ); @@ -2272,6 +2336,13 @@ ALTER TABLE ONLY public.ckb_transactions ATTACH PARTITION public.ckb_transaction ALTER TABLE ONLY public.account_books ALTER COLUMN id SET DEFAULT nextval('public.account_books_id_seq'::regclass); +-- +-- Name: address_block_snapshots id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.address_block_snapshots ALTER COLUMN id SET DEFAULT nextval('public.address_block_snapshots_id_seq'::regclass); + + -- -- Name: addresses id; Type: DEFAULT; Schema: public; Owner: - -- @@ -2461,6 +2532,13 @@ ALTER TABLE ONLY public.pool_transaction_entries ALTER COLUMN id SET DEFAULT nex ALTER TABLE ONLY public.referring_cells ALTER COLUMN id SET DEFAULT nextval('public.referring_cells_id_seq'::regclass); +-- +-- Name: reject_reasons id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.reject_reasons ALTER COLUMN id SET DEFAULT nextval('public.reject_reasons_id_seq'::regclass); + + -- -- Name: script_transactions id; Type: DEFAULT; Schema: public; Owner: - -- @@ -2567,6 +2645,14 @@ ALTER TABLE ONLY public.account_books ADD CONSTRAINT account_books_pkey PRIMARY KEY (id); +-- +-- Name: address_block_snapshots address_block_snapshots_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.address_block_snapshots + ADD CONSTRAINT address_block_snapshots_pkey PRIMARY KEY (id); + + -- -- Name: addresses addresses_pkey; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -2863,6 +2949,14 @@ ALTER TABLE ONLY public.referring_cells ADD CONSTRAINT referring_cells_pkey PRIMARY KEY (id); +-- +-- Name: reject_reasons reject_reasons_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.reject_reasons + ADD CONSTRAINT reject_reasons_pkey PRIMARY KEY (id); + + -- -- Name: schema_migrations schema_migrations_pkey; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -3212,6 +3306,27 @@ CREATE UNIQUE INDEX index_account_books_on_address_id_and_ckb_transaction_id ON CREATE INDEX index_account_books_on_ckb_transaction_id ON public.account_books USING btree (ckb_transaction_id); +-- +-- Name: index_address_block_snapshots_on_address_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX index_address_block_snapshots_on_address_id ON public.address_block_snapshots USING btree (address_id); + + +-- +-- Name: index_address_block_snapshots_on_block_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX index_address_block_snapshots_on_block_id ON public.address_block_snapshots USING btree (block_id); + + +-- +-- Name: index_address_block_snapshots_on_block_id_and_address_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE UNIQUE INDEX index_address_block_snapshots_on_block_id_and_address_id ON public.address_block_snapshots USING btree (block_id, address_id); + + -- -- Name: index_address_dao_transactions_on_ckb_transaction_id; Type: INDEX; Schema: public; Owner: - -- @@ -3373,6 +3488,13 @@ CREATE INDEX index_cell_inputs_on_ckb_transaction_id ON public.cell_inputs USING CREATE INDEX index_cell_inputs_on_previous_cell_output_id ON public.cell_inputs USING btree (previous_cell_output_id); +-- +-- Name: index_cell_inputs_on_previous_tx_hash_and_previous_index; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX index_cell_inputs_on_previous_tx_hash_and_previous_index ON public.cell_inputs USING btree (previous_tx_hash, previous_index); + + -- -- Name: index_cell_outputs_on_address_id_and_status; Type: INDEX; Schema: public; Owner: - -- @@ -3730,6 +3852,13 @@ CREATE INDEX index_pool_transaction_entries_on_tx_hash ON public.pool_transactio CREATE INDEX index_pool_transaction_entries_on_tx_status ON public.pool_transaction_entries USING btree (tx_status); +-- +-- Name: index_reject_reasons_on_ckb_transaction_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE UNIQUE INDEX index_reject_reasons_on_ckb_transaction_id ON public.reject_reasons USING btree (ckb_transaction_id); + + -- -- Name: index_rolling_avg_block_time_on_timestamp; Type: INDEX; Schema: public; Owner: - -- @@ -4469,6 +4598,11 @@ INSERT INTO "schema_migrations" (version) VALUES ('20230425114436'), ('20230425162318'), ('20230426133543'), -('20230427025007'); +('20230427025007'), +('20230504023535'), +('20230518061651'), +('20230526070328'), +('20230526085258'), +('20230526135653'); diff --git a/lib/scheduler.rb b/lib/scheduler.rb index 70aed5da6..22bb52562 100644 --- a/lib/scheduler.rb +++ b/lib/scheduler.rb @@ -94,4 +94,13 @@ def call_worker(clz) StatisticInfo.default.reset! :address_balance_ranking, :miner_ranking, :last_n_days_transaction_fee_rates end +s.every "1h", overlap: false do + puts "update h24 transaction count" + call_worker UpdateH24CkbTransactionsCountOnUdtsWorker +end + +s.every "1h", overlap: false do + CkbTransaction.clean_pending +end + s.join diff --git a/lib/tasks/migration/fill_nrc_721_token.rake b/lib/tasks/migration/fill_nrc_721_token.rake index 467de4cf9..6ba7e1199 100644 --- a/lib/tasks/migration/fill_nrc_721_token.rake +++ b/lib/tasks/migration/fill_nrc_721_token.rake @@ -1,22 +1,27 @@ namespace :migration do desc "Usage: RAILS_ENV=production bundle exec rake migration:update_nrc_721_token_info[factory_code_hash, factory_hash_type, factory_args]" task :update_nrc_721_token_info, [:factory_code_hash, :factory_hash_type, :factory_args] => :environment do |_, args| - factory_cell = NrcFactoryCell.find_by(code_hash: args[:factory_code_hash], hash_type: args[:factory_hash_type], args: args[:factory_args]) + factory_cell = NrcFactoryCell.find_by(code_hash: args[:factory_code_hash], hash_type: args[:factory_hash_type], + args: args[:factory_args]) if factory_cell.nil? puts "No Factory Cell Found!" return end - nrc_721_factory_cell_type = TypeScript.where(code_hash: factory_cell.code_hash, hash_type: factory_cell.hash_type, args: factory_cell.args).last - factory_data = CellOutput.where(type_script_id: nrc_721_factory_cell_type.id, cell_type: "nrc_721_factory").last.data + nrc_721_factory_cell_type = TypeScript.where(code_hash: factory_cell.code_hash, hash_type: factory_cell.hash_type, + args: factory_cell.args).last + factory_data = CellOutput.where(type_script_id: nrc_721_factory_cell_type.id, + cell_type: "nrc_721_factory").last.data parsed_factory_data = CkbUtils.parse_nrc_721_factory_data(factory_data) - factory_cell.update(verified: true, name: parsed_factory_data.name, symbol: parsed_factory_data.symbol, base_token_uri: parsed_factory_data.base_token_uri, extra_data: parsed_factory_data.extra_data) + factory_cell.update(verified: true, name: parsed_factory_data.name, symbol: parsed_factory_data.symbol, + base_token_uri: parsed_factory_data.base_token_uri, extra_data: parsed_factory_data.extra_data) udts = Udt.where(nrc_factory_cell_id: factory_cell.id) udts.each do |udt| udt_account = UdtAccount.where(udt_id: udt.id, udt_type: "nrc_721_token").first udt_account.update(full_name: parsed_factory_data.name, symbol: parsed_factory_data.symbol) - udt.update(full_name: parsed_factory_data.name, symbol: parsed_factory_data.symbol, icon_file: "#{parsed_factory_data.base_token_uri}/#{udt_account.nft_token_id}") + udt.update(full_name: parsed_factory_data.name, symbol: parsed_factory_data.symbol, + icon_file: "#{parsed_factory_data.base_token_uri}/#{udt_account.nft_token_id}") tx_ids = udt.ckb_transactions.pluck(:id) tx_ids.each do |tx_id| @@ -28,14 +33,16 @@ namespace :migration do end TxDisplayInfoGeneratorWorker.new.perform(tx_ids) # update udt transaction page cache - ckb_transactions = udt.ckb_transactions.select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at).recent.page(1).per(CkbTransaction.default_per_page) + ckb_transactions = udt.ckb_transactions.select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, + :is_cellbase, :updated_at).recent.page(1).per(CkbTransaction.default_per_page) Rails.cache.delete(ckb_transactions.cache_key) # update addresses transaction page cache CkbTransaction.where(id: tx_ids).find_each do |ckb_tx| Address.where(id: ckb_tx.contained_address_ids).find_each do |address| - ckb_transactions = address.custom_ckb_transactions.select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at).recent.page(1).per(CkbTransaction.default_per_page) - $redis.del("#{ckb_transactions.cache_key}/#{address.query_address}") + ckb_transactions = address.custom_ckb_transactions.select(:id, :tx_hash, :block_id, :block_number, + :block_timestamp, :is_cellbase, :updated_at).recent.page(1).per(CkbTransaction.default_per_page) + Rails.cache.delete("#{ckb_transactions.cache_key}/#{address.query_address}") end end end @@ -51,13 +58,17 @@ namespace :migration do nrc_tokens.each do |output| factory_cell = CkbUtils.parse_nrc_721_args(output.type_script.args) type_hash = output.type_script.script_hash - nrc_721_factory_cell_type = TypeScript.where(code_hash: factory_cell.code_hash, hash_type: factory_cell.hash_type, args: factory_cell.args).first - factory_data = CellOutput.where(type_script_id: nrc_721_factory_cell_type.id, cell_type: "nrc_721_factory").last.data + nrc_721_factory_cell_type = TypeScript.where(code_hash: factory_cell.code_hash, + hash_type: factory_cell.hash_type, args: factory_cell.args).first + factory_data = CellOutput.where(type_script_id: nrc_721_factory_cell_type.id, + cell_type: "nrc_721_factory").last.data nft_token_attr = {} if nrc_721_factory_cell_type.present? parsed_factory_data = CkbUtils.parse_nrc_721_factory_data(factory_data) - nrc_721_factory_cell = NrcFactoryCell.find_or_create_by(code_hash: factory_cell.code_hash, hash_type: factory_cell.hash_type, args: factory_cell.args) - nrc_721_factory_cell.update(verified: true, name: parsed_factory_data.name, symbol: parsed_factory_data.symbol, base_token_uri: parsed_factory_data.base_token_uri, extra_data: parsed_factory_data.extra_data) + nrc_721_factory_cell = NrcFactoryCell.find_or_create_by(code_hash: factory_cell.code_hash, + hash_type: factory_cell.hash_type, args: factory_cell.args) + nrc_721_factory_cell.update(verified: true, name: parsed_factory_data.name, symbol: parsed_factory_data.symbol, + base_token_uri: parsed_factory_data.base_token_uri, extra_data: parsed_factory_data.extra_data) nft_token_attr[:full_name] = parsed_factory_data.name nft_token_attr[:symbol] = parsed_factory_data.symbol nft_token_attr[:icon_file] = "#{parsed_factory_data.base_token_uri}/#{factory_cell.token_id}" @@ -68,23 +79,33 @@ namespace :migration do type_hash: type_hash, udt_type: "nrc_721_token", block_timestamp: output.block_timestamp, args: output.type_script.args, code_hash: output.type_script.code_hash, hash_type: output.type_script.hash_type }.merge(nft_token_attr) end - Udt.insert_all(udts_attributes.map! { |attr| attr.merge!(created_at: Time.current, updated_at: Time.current) }) if udts_attributes.present? + if udts_attributes.present? + Udt.insert_all(udts_attributes.map! do |attr| + attr.merge!(created_at: Time.current, updated_at: Time.current) + end) + end # update udt account new_udt_accounts_attributes = [] nrc_tokens.live.select(:id, :address_id, :type_hash, :cell_type, :type_script_id).each do |udt_output| address = Address.find(udt_output.address_id) - udt_account = address.udt_accounts.where(type_hash: udt_output.type_hash, udt_type: "nrc_721_token").select(:id, :created_at).first + udt_account = address.udt_accounts.where(type_hash: udt_output.type_hash, udt_type: "nrc_721_token").select(:id, + :created_at).first amount = 0 nft_token_id = CkbUtils.parse_nrc_721_args(udt_output.type_script.args).token_id - udt = Udt.where(type_hash: udt_output.type_hash, udt_type: "nrc_721_token").select(:id, :udt_type, :full_name, :symbol, :decimal, :published, :code_hash, :type_hash, :created_at).take! + udt = Udt.where(type_hash: udt_output.type_hash, udt_type: "nrc_721_token").select(:id, :udt_type, :full_name, + :symbol, :decimal, :published, :code_hash, :type_hash, :created_at).take! if udt_account.blank? new_udt_accounts_attributes << { address_id: udt_output.address_id, udt_type: udt.udt_type, full_name: udt.full_name, symbol: udt.symbol, decimal: udt.decimal, published: udt.published, code_hash: udt.code_hash, type_hash: udt.type_hash, amount: amount, udt_id: udt.id, nft_token_id: nft_token_id } end end - UdtAccount.insert_all(new_udt_accounts_attributes.map! { |attr| attr.merge!(created_at: Time.current, updated_at: Time.current) }) if new_udt_accounts_attributes.present? + if new_udt_accounts_attributes.present? + UdtAccount.insert_all(new_udt_accounts_attributes.map! do |attr| + attr.merge!(created_at: Time.current, updated_at: Time.current) + end) + end puts "done" end diff --git a/lib/tasks/migration/register_udt.rake b/lib/tasks/migration/register_udt.rake index a0a4d232f..9d02e154e 100644 --- a/lib/tasks/migration/register_udt.rake +++ b/lib/tasks/migration/register_udt.rake @@ -5,7 +5,8 @@ class UdtRegister namespace :migration do desc "Usage: RAILS_ENV=production bundle exec rake 'migration:register_udt[nil]'" # mode can be normal or forcebridge - task :register_udt, [:mode, :args, :symbol, :full_name, :decimal, :description, :operator_website] => :environment do |_, args| + task :register_udt, + [:mode, :args, :symbol, :full_name, :decimal, :description, :operator_website] => :environment do |_, args| if args[:mode] == "forcebridge" register_forcebridge_udt elsif args[:mode] == "url" @@ -19,128 +20,136 @@ class UdtRegister private - def register_udts_from_url - assets = assets_from_url - puts "assets_from_url_counts: #{assets.size}" - - non_exist_udt_infos = [] - assets.each do |asset| - args = { args: asset[:sudtArgs], symbol: asset[:symbol], full_name: asset[:name], decimal: asset[:decimal], icon_file: asset[:logoURI] } - puts args - type_script = build_udt_type_script(args) - udt = Udt.find_by(type_hash: type_script.compute_hash) - if udt.blank? - puts "udt not exist, args: #{args}" - non_exist_udt_infos << asset - next - end - register_udt(args) + def register_udts_from_url + assets = assets_from_url + puts "assets_from_url_counts: #{assets.size}" + + non_exist_udt_infos = [] + assets.each do |asset| + args = { + args: asset[:sudtArgs], symbol: asset[:symbol], full_name: asset[:name], decimal: asset[:decimal], + icon_file: asset[:logoURI] } + puts args + type_script = build_udt_type_script(args) + udt = Udt.find_by(type_hash: type_script.compute_hash) + if udt.blank? + puts "udt not exist, args: #{args}" + non_exist_udt_infos << asset + next end + register_udt(args) end + end - def assets_from_url - uri = URI(ENV["ASSET_URL"]) - http = Net::HTTP::Persistent.new - request = Net::HTTP::Get.new(uri) - response = http.request(uri, request) - parse_response(response) - end + def assets_from_url + uri = URI(ENV["ASSET_URL"]) + http = Net::HTTP::Persistent.new + request = Net::HTTP::Get.new(uri) + response = http.request(uri, request) + parse_response(response) + end - def register_forcebridge_udt - puts "forcebridge_asset_counts: #{forcebridge_assets.size}" - non_exist_udt_infos = [] - forcebridge_assets.each do |asset| - next if asset[:network] != "Ethereum" - - args = { args: asset[:info][:shadow][:ident], symbol: asset[:info][:symbol], full_name: asset[:info][:name], decimal: asset[:info][:decimals], icon_file: asset[:info][:logoURI] } - puts args - type_script = build_udt_type_script(args) - udt = Udt.find_by(type_hash: type_script.compute_hash) - if udt.blank? - puts "udt not exist, args: #{args[:args]}" - non_exist_udt_infos << asset[:info] - next - end - register_udt(args) + def register_forcebridge_udt + puts "forcebridge_asset_counts: #{forcebridge_assets.size}" + non_exist_udt_infos = [] + forcebridge_assets.each do |asset| + next if asset[:network] != "Ethereum" + + args = { + args: asset[:info][:shadow][:ident], symbol: asset[:info][:symbol], full_name: asset[:info][:name], + decimal: asset[:info][:decimals], icon_file: asset[:info][:logoURI] } + puts args + type_script = build_udt_type_script(args) + udt = Udt.find_by(type_hash: type_script.compute_hash) + if udt.blank? + puts "udt not exist, args: #{args[:args]}" + non_exist_udt_infos << asset[:info] + next end - puts "non exist udts count: #{non_exist_udt_infos.size}" - puts "non exist udts: #{non_exist_udt_infos}" + register_udt(args) end + puts "non exist udts count: #{non_exist_udt_infos.size}" + puts "non exist udts: #{non_exist_udt_infos}" + end - def forcebridge_assets - uri = URI(ENV["FORCE_BRIDGE_HOST"]) - http = Net::HTTP::Persistent.new - request = Net::HTTP::Post.new(uri) - request.body = { id: SecureRandom.uuid, jsonrpc: "2.0", method: "getAssetList", params: { asset: "all" } }.to_json - request["Content-Type"] = "application/json" - response = http.request(uri, request) - parse_response(response)[:result] + def forcebridge_assets + uri = URI(ENV["FORCE_BRIDGE_HOST"]) + http = Net::HTTP::Persistent.new + request = Net::HTTP::Post.new(uri) + request.body = { id: SecureRandom.uuid, jsonrpc: "2.0", method: "getAssetList", params: { asset: "all" } }.to_json + request["Content-Type"] = "application/json" + response = http.request(uri, request) + parse_response(response)[:result] + end + + def parse_response(response) + if response.code == "200" + JSON.parse(response.body, symbolize_names: true) + else + error_messages = { body: response.body, code: response.code } + raise error_messages end + end - def parse_response(response) - if response.code == "200" - JSON.parse(response.body, symbolize_names: true) - else - error_messages = { body: response.body, code: response.code } - raise error_messages - end + def register_udt(args) + unless params_valid?(args) + puts "params invalid must exists" + return end - def register_udt(args) - unless params_valid?(args) - puts "params invalid must exists" + ApplicationRecord.transaction do + type_script = build_udt_type_script(args) + issuer_address = Address.where(lock_hash: type_script.args).pick(:address_hash) + udt = Udt.find_by(type_hash: type_script.compute_hash) + if udt.blank? + puts "udt not exist, args: #{args[:args]}" return end - - ApplicationRecord.transaction do - type_script = build_udt_type_script(args) - issuer_address = Address.where(lock_hash: type_script.args).pick(:address_hash) - udt = Udt.find_by(type_hash: type_script.compute_hash) - if udt.blank? - puts "udt not exist, args: #{args[:args]}" - return - end - udt.update!(code_hash: type_script.code_hash, hash_type: type_script.hash_type, args: type_script.args, symbol: args[:symbol], full_name: args[:full_name], decimal: args[:decimal], description: args[:description], operator_website: args[:operator_website], icon_file: args[:icon_file], issuer_address: issuer_address) - udt.update!(published: true) if args[:icon_file].present? - UdtAccount.where(udt_id: udt.id).update(symbol: udt.symbol, full_name: udt.full_name, decimal: udt.decimal, published: udt.published) - flush_caches(udt) - puts "UDT type_hash: #{udt.type_hash}" - end + udt.update!(code_hash: type_script.code_hash, hash_type: type_script.hash_type, args: type_script.args, + symbol: args[:symbol], full_name: args[:full_name], decimal: args[:decimal], description: args[:description], operator_website: args[:operator_website], icon_file: args[:icon_file], issuer_address: issuer_address) + udt.update!(published: true) if args[:icon_file].present? + UdtAccount.where(udt_id: udt.id).update(symbol: udt.symbol, full_name: udt.full_name, decimal: udt.decimal, + published: udt.published) + flush_caches(udt) + puts "UDT type_hash: #{udt.type_hash}" end + end - def build_udt_type_script(args) - code_hash = CkbSync::Api.instance.mode == "mainnet" ? "0x5e7a36a77e68eecc013dfa2fe6a23f3b6c344b04005808694ae6dd45eea4cfd5" : "0xc5e5dcf215925f7ef4dfaf5f4b4f105bc321c02776d6e7d52a1db3fcd9d011a4" - CKB::Types::Script.new(args: args[:args], code_hash: code_hash, hash_type: "type") - end + def build_udt_type_script(args) + code_hash = CkbSync::Api.instance.mode == "mainnet" ? "0x5e7a36a77e68eecc013dfa2fe6a23f3b6c344b04005808694ae6dd45eea4cfd5" : "0xc5e5dcf215925f7ef4dfaf5f4b4f105bc321c02776d6e7d52a1db3fcd9d011a4" + CKB::Types::Script.new(args: args[:args], code_hash: code_hash, hash_type: "type") + end - def flush_caches(udt) - tx_ids = udt.ckb_transactions.pluck(:id) - tx_ids.each do |tx_id| - Rails.cache.delete("normal_tx_display_outputs_previews_false_#{tx_id}") - Rails.cache.delete("normal_tx_display_outputs_previews_true_#{tx_id}") - Rails.cache.delete("normal_tx_display_inputs_previews_false_#{tx_id}") - Rails.cache.delete("normal_tx_display_inputs_previews_true_#{tx_id}") - Rails.cache.delete("TxDisplayInfo/#{tx_id}") - end - TxDisplayInfoGeneratorWorker.new.perform(tx_ids) - # update udt transaction page cache - ckb_transactions = udt.ckb_transactions.select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at).recent.page(1).per(CkbTransaction.default_per_page) - Rails.cache.delete(ckb_transactions.cache_key) - - # update addresses transaction page cache - CkbTransaction.where(id: tx_ids).find_each do |ckb_tx| - Address.where(id: ckb_tx.contained_address_ids).find_each do |address| - ckb_transactions = address.custom_ckb_transactions.select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at).recent.page(1).per(CkbTransaction.default_per_page) - $redis.del("#{ckb_transactions.cache_key}/#{address.query_address}") - end + def flush_caches(udt) + tx_ids = udt.ckb_transactions.pluck(:id) + tx_ids.each do |tx_id| + Rails.cache.delete("normal_tx_display_outputs_previews_false_#{tx_id}") + Rails.cache.delete("normal_tx_display_outputs_previews_true_#{tx_id}") + Rails.cache.delete("normal_tx_display_inputs_previews_false_#{tx_id}") + Rails.cache.delete("normal_tx_display_inputs_previews_true_#{tx_id}") + Rails.cache.delete("TxDisplayInfo/#{tx_id}") + end + TxDisplayInfoGeneratorWorker.new.perform(tx_ids) + # update udt transaction page cache + ckb_transactions = udt.ckb_transactions.select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, + :is_cellbase, :updated_at).recent.page(1).per(CkbTransaction.default_per_page) + Rails.cache.delete(ckb_transactions.cache_key) + + # update addresses transaction page cache + CkbTransaction.where(id: tx_ids).find_each do |ckb_tx| + Address.where(id: ckb_tx.contained_address_ids).find_each do |address| + ckb_transactions = address.custom_ckb_transactions.select(:id, :tx_hash, :block_id, :block_number, + :block_timestamp, :is_cellbase, :updated_at).recent.page(1).per(CkbTransaction.default_per_page) + Rails.cache.delete("#{ckb_transactions.cache_key}/#{address.query_address}") end end + end - def params_valid?(args) - return false if args[:args].blank? || args[:symbol].blank? || args[:full_name].blank? || args[:decimal].blank? + def params_valid?(args) + return false if args[:args].blank? || args[:symbol].blank? || args[:full_name].blank? || args[:decimal].blank? - true - end + true + end end UdtRegister.new diff --git a/lib/tasks/migration/update_cell_type.rake b/lib/tasks/migration/update_cell_type.rake new file mode 100644 index 000000000..adeb30cd7 --- /dev/null +++ b/lib/tasks/migration/update_cell_type.rake @@ -0,0 +1,34 @@ +namespace :migration do + desc "Usage: RAILS_ENV=production bundle exec rake migration:update_cell_type" + task update_cell_type: :environment do + cota_registry_scripts = TypeScript.where(code_hash: CkbSync::Api.instance.cota_registry_code_hash) + cota_regular_scripts = TypeScript.where(code_hash: CkbSync::Api.instance.cota_regular_code_hash) + total_count = cota_registry_scripts.count + cota_regular_scripts.count + + progress_bar = ProgressBar.create({ total: total_count, format: "%e %B %p%% %c/%C" }) + + cota_registry_scripts.each do |ts| + cell_type = CkbUtils.cell_type(ts, "0x") + CellOutput.where(type_script_id: ts.id).in_batches do |cell_outputs| + cell_outputs.update_all(cell_type: cell_type) + cell_output_ids = cell_outputs.pluck(:id) + CellInput.where(previous_cell_output_id: cell_output_ids).update_all(cell_type: cell_type) + end + + progress_bar.increment + end + + cota_regular_scripts.each do |ts| + cell_type = CkbUtils.cell_type(ts, "0x") + CellOutput.where(type_script_id: ts.id).in_batches do |cell_outputs| + cell_outputs.update_all(cell_type: cell_type) + cell_output_ids = cell_outputs.pluck(:id) + CellInput.where(previous_cell_output_id: cell_output_ids).update_all(cell_type: cell_type) + end + + progress_bar.increment + end + + puts "done" + end +end diff --git a/lib/tasks/migration/update_token_transfer_action.rake b/lib/tasks/migration/update_token_transfer_action.rake new file mode 100644 index 000000000..66f108097 --- /dev/null +++ b/lib/tasks/migration/update_token_transfer_action.rake @@ -0,0 +1,33 @@ +namespace :migration do + desc "Usage: RAILS_ENV=production bundle exec rake migration:update_token_transfer_action" + task update_token_transfer_action: :environment do + total_count = TokenCollection.where(standard: "cota").count + progress_bar = ProgressBar.create({ total: total_count, format: "%e %B %p%% %c/%C" }) + + TokenCollection.where(standard: "cota").find_each do |collection| + collection.transfers.each do |transfer| + block_number = transfer.ckb_transaction.block_number + data = CotaAggregator.instance.get_transactions_by_block_number(block_number) + data["transactions"].each do |t| + action = + case t["tx_type"] + when "mint" + "mint" + when "transfer" + "normal" + end + token_id = t["token_index"].hex + + next if token_id != transfer.item.token_id + next if action == transfer.action + + transfer.update(action: action) + end + end + + progress_bar.increment + rescue => e + puts e + end + end +end diff --git a/lib/websocket.rb b/lib/websocket.rb new file mode 100644 index 000000000..35f778e1d --- /dev/null +++ b/lib/websocket.rb @@ -0,0 +1,67 @@ +require_relative "../config/environment" +require "new_relic/agent" +NewRelic::Agent.manual_start(sync_startup: true) + +Rails.logger = Logger.new(STDERR) +Rails.logger.level = ENV.fetch("LOG_LEVEL") { "info" } +ActiveRecord::Base.logger = Rails.logger + +at_exit do + puts "exiting & clearing" +end + +require "async" +require "async/http" +require "async/websocket" +require "protocol/websocket/json_message" +URL = ENV.fetch("CKB_WS_URL", "http://localhost:28114") +$message_id = 0 + +def subscribe(connection, topic) + $message_id += 1 + message = Protocol::WebSocket::JSONMessage.generate({ + "id": $message_id, + "jsonrpc": "2.0", + "method": "subscribe", + "params": [topic] + }) + message.send(connection) + connection.flush +end + +queue = Queue.new + +persister = + Thread.new do + Rails.application.executor.wrap do + loop do + data = queue.pop + + ImportTransactionJob.new.perform(data["transaction"], { + cycles: data["cycles"].hex, + fee: data["fee"].hex, + size: data["size"].hex, + timestamp: data["timestamp"].hex + }) + end + end + end + +Async do |_task| + endpoint = Async::HTTP::Endpoint.parse(URL, alpn_protocols: Async::HTTP::Protocol::HTTP11.names) + + Async::WebSocket::Client.connect(endpoint) do |connection| + subscribe connection, "new_transaction" + + while message = connection.read + message = Protocol::WebSocket::JSONMessage.wrap(message) + res = message.to_h + if res[:method] == "subscribe" + data = JSON.parse res[:params][:result] + # binding.pry + puts data["transaction"]["hash"] + queue.push(data) + end + end + end +end diff --git a/test/controllers/api/v1/address_transactions_controller_test.rb b/test/controllers/api/v1/address_transactions_controller_test.rb index e7157c7eb..5adc1d4c5 100644 --- a/test/controllers/api/v1/address_transactions_controller_test.rb +++ b/test/controllers/api/v1/address_transactions_controller_test.rb @@ -46,7 +46,8 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest get api_v1_address_transaction_url(address.address_hash), headers: { "Content-Type": "application/vnd.api+json", - "Accept": "application/json" } + "Accept": "application/json" + } assert_equal 406, response.status end @@ -60,7 +61,8 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest get api_v1_address_transaction_url(address.address_hash), headers: { "Content-Type": "application/vnd.api+json", - "Accept": "application/json" } + "Accept": "application/json" + } assert_equal response_json, response.body end @@ -84,11 +86,23 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest valid_get api_v1_address_transaction_url(address.address_hash) records_counter = RecordCounters::AddressTransactions.new(address) - options = FastJsonapi::PaginationMetaGenerator.new(request: request, - records: ckb_transactions, page: page, page_size: page_size, records_counter: records_counter).call - - assert_equal CkbTransactionsSerializer.new(ckb_transactions, options.merge(params: { previews: true, address: address })).serialized_json, - response.body + options = FastJsonapi::PaginationMetaGenerator.new( + request: request, + records: ckb_transactions, + page: page, + page_size: page_size, + records_counter: records_counter + ).call + + assert_equal CkbTransactionsSerializer.new( + ckb_transactions, + options.merge( + params: { + previews: true, + address: address + } + ) + ).serialized_json, response.body end test "should return corresponding ckb transactions with given lock hash" do @@ -100,11 +114,23 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest valid_get api_v1_address_transaction_url(address.lock_hash) records_counter = RecordCounters::AddressTransactions.new(address) - options = FastJsonapi::PaginationMetaGenerator.new(request: request, - records: ckb_transactions, page: page, page_size: page_size, records_counter: records_counter).call - - assert_equal CkbTransactionsSerializer.new(ckb_transactions, options.merge(params: { previews: true, address: address })).serialized_json, - response.body + options = FastJsonapi::PaginationMetaGenerator.new( + request: request, + records: ckb_transactions, + page: page, + page_size: page_size, + records_counter: records_counter + ).call + + assert_equal CkbTransactionsSerializer.new( + ckb_transactions, + options.merge( + params: { + previews: true, + address: address + } + ) + ).serialized_json, response.body end test "should contain right keys in the serialized object when call show" do @@ -114,8 +140,17 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest response_tx_transaction = json["data"].first - assert_equal %w(block_number block_timestamp display_inputs display_inputs_count display_outputs display_outputs_count income is_cellbase transaction_hash).sort, - response_tx_transaction["attributes"].keys.sort + assert_equal %w( + block_number + block_timestamp + display_inputs + display_inputs_count + display_outputs + display_outputs_count + income + is_cellbase + transaction_hash + ).sort, response_tx_transaction["attributes"].keys.sort end test "should return correct income" do @@ -123,7 +158,8 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest block = create(:block, :with_block_hash) generated_ckb_transaction = create(:ckb_transaction, block: block, - block_timestamp: "1567131126594", contained_address_ids: [address.id]) + block_timestamp: "1567131126594", + contained_address_ids: [address.id]) create(:cell_output, capacity: 10**8 * 8, ckb_transaction: generated_ckb_transaction, block: generated_ckb_transaction.block, @@ -226,12 +262,16 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest page = 2 page_size = 10 address = create(:address, :with_transactions, transactions_count: 30) - address_ckb_transactions = address.custom_ckb_transactions.order("block_timestamp desc nulls last, id desc").page(page).per(page_size) - valid_get api_v1_address_transaction_url(address.address_hash), - params: { page: page } + address_ckb_transactions = address.custom_ckb_transactions. + order("block_timestamp desc nulls last, id desc"). + page(page). + per(page_size) + valid_get api_v1_address_transaction_url(address.address_hash), params: { page: page } options = FastJsonapi::PaginationMetaGenerator.new(request: request, - records: address_ckb_transactions, page: page, page_size: page_size).call + records: address_ckb_transactions, + page: page, + page_size: page_size).call response_transaction = CkbTransactionsSerializer.new( address_ckb_transactions, options.merge(params: { previews: true, @@ -253,11 +293,15 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest records_counter = RecordCounters::AddressTransactions.new(address) options = FastJsonapi::PaginationMetaGenerator.new(request: request, - records: address_ckb_transactions, page: page, page_size: page_size, records_counter: records_counter).call + records: address_ckb_transactions, + page: page, + page_size: page_size, + records_counter: records_counter).call response_transaction = CkbTransactionsSerializer.new( address_ckb_transactions, options.merge(params: { previews: true, - address: address }) + address: address + }) ).serialized_json assert_equal response_transaction, response.body @@ -275,11 +319,15 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest records_counter = RecordCounters::AddressTransactions.new(address) options = FastJsonapi::PaginationMetaGenerator.new(request: request, - records: address_ckb_transactions, page: page, page_size: page_size, records_counter: records_counter).call + records: address_ckb_transactions, + page: page, + page_size: page_size, + records_counter: records_counter).call response_transaction = CkbTransactionsSerializer.new( address_ckb_transactions, options.merge(params: { previews: true, - address: address }) + address: address + }) ).serialized_json assert_equal response_transaction, response.body @@ -296,11 +344,15 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest records_counter = RecordCounters::AddressTransactions.new(address) options = FastJsonapi::PaginationMetaGenerator.new(request: request, - records: address_ckb_transactions, page: page, page_size: page_size, records_counter: records_counter).call + records: address_ckb_transactions, + page: page, + page_size: page_size, + records_counter: records_counter).call response_transaction = CkbTransactionsSerializer.new( address_ckb_transactions, options.merge(params: { previews: true, - address: address }) + address: address + }) ).serialized_json assert_equal [], json["data"] @@ -318,9 +370,8 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest test "should return up to ten display_inputs" do address = create(:address) block = create(:block, :with_block_hash) - ckb_transaction = create(:ckb_transaction, - :with_multiple_inputs_and_outputs, block: block, contained_address_ids: [address.id]) - # address.ckb_transactions << ckb_transaction + create(:ckb_transaction, + :with_multiple_inputs_and_outputs, block: block, contained_address_ids: [address.id]) valid_get api_v1_address_transaction_url(address.address_hash) @@ -335,9 +386,8 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest test "should return up to ten display_outputs" do address = create(:address) block = create(:block, :with_block_hash) - ckb_transaction = create(:ckb_transaction, - :with_multiple_inputs_and_outputs, block: block, contained_address_ids: [address.id]) - # address.ckb_transactions << ckb_transaction + create(:ckb_transaction, + :with_multiple_inputs_and_outputs, block: block, contained_address_ids: [address.id]) valid_get api_v1_address_transaction_url(address.address_hash) @@ -348,6 +398,97 @@ class AddressTransactionsControllerTest < ActionDispatch::IntegrationTest input.key?("from_cellbase") }.uniq end + + test "should get success code when call download csv" do + address = create(:address, :with_transactions) + + valid_get download_csv_api_v1_address_transactions_url(id: address.address_hash) + + assert_response :success + end + + test "should set right content type when call download csv" do + address = create(:address, :with_transactions) + + valid_get download_csv_api_v1_address_transactions_url(id: address.address_hash) + + assert_equal "text/csv; charset=utf-8", response.headers["Content-Type"] + end + + test "should respond with 415 Unsupported Media Type when call download csv Content-Type is wrong" do + address = create(:address, :with_transactions) + + get download_csv_api_v1_address_transactions_url(address.address_hash), + headers: { "Content-Type": "text/plain" } + + assert_equal 415, response.status + end + + test "should respond with error object when call download csv Content-Type is wrong" do + address = create(:address, :with_transactions) + error_object = Api::V1::Exceptions::InvalidContentTypeError.new + response_json = RequestErrorSerializer.new([error_object], + message: error_object.title).serialized_json + + get download_csv_api_v1_address_transactions_url(address.address_hash), + headers: { "Content-Type": "text/plain" } + + assert_equal response_json, response.body + end + + test "should respond with 406 Not Acceptable when call download csv Accept is wrong" do + address = create(:address, :with_transactions) + + get download_csv_api_v1_address_transactions_url(address.address_hash), + headers: { + "Content-Type": "application/vnd.api+json", + "Accept": "application/json" + } + + assert_equal 406, response.status + end + + test "should respond with error object when call download csv Accept is wrong" do + address = create(:address, :with_transactions) + error_object = Api::V1::Exceptions::InvalidAcceptError.new + response_json = RequestErrorSerializer.new([error_object], + message: error_object.title).serialized_json + + get download_csv_api_v1_address_transactions_url(address.address_hash), + headers: { + "Content-Type": "application/vnd.api+json", + "Accept": "application/json" + } + + assert_equal response_json, response.body + end + + test "should return error object when call download csv id is not a address hash" do + error_object = Api::V1::Exceptions::AddressHashInvalidError.new + response_json = RequestErrorSerializer.new([error_object], + message: error_object.title).serialized_json + + valid_get download_csv_api_v1_address_transactions_url(id: "9034fwefwef") + + assert_equal response_json, response.body + end + + test "should get download_csv" do + address = create(:address) + block = create(:block, :with_block_hash) + create(:ckb_transaction, :with_multiple_inputs_and_outputs, block: block, + contained_address_ids: [address.id], + transaction_fee: 1000) + valid_get download_csv_api_v1_address_transactions_url( + id: address.address_hash, + start_date: (Date.today - 2.days).to_s, + end_date: Date.today.to_s + ) + + csv_data = CSV.parse(response.body) + + assert_equal csv_data.length, 16 + end end end end diff --git a/test/controllers/api/v1/block_transactions_controller_test.rb b/test/controllers/api/v1/block_transactions_controller_test.rb index 7b6c48377..f06d92399 100644 --- a/test/controllers/api/v1/block_transactions_controller_test.rb +++ b/test/controllers/api/v1/block_transactions_controller_test.rb @@ -80,7 +80,7 @@ class BlockTransactionsControllerTest < ActionDispatch::IntegrationTest valid_get api_v1_block_transaction_url(block.block_hash) - ckb_transactions = block.ckb_transactions.order(:id).page(page).per(page_size) + ckb_transactions = block.ckb_transactions.order('id desc').page(page).per(page_size) records_counter = RecordCounters::BlockTransactions.new(block) options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: ckb_transactions, page: page, page_size: page_size, records_counter: records_counter).call @@ -157,7 +157,7 @@ class BlockTransactionsControllerTest < ActionDispatch::IntegrationTest page = 2 page_size = 10 block = create(:block, :with_ckb_transactions, transactions_count: 30) - block_ckb_transactions = block.ckb_transactions.order(:id).page(page).per(page_size) + block_ckb_transactions = block.ckb_transactions.order('id desc').page(page).per(page_size) valid_get api_v1_block_transaction_url(block.block_hash), params: { page: page } @@ -173,7 +173,7 @@ class BlockTransactionsControllerTest < ActionDispatch::IntegrationTest page = 1 page_size = 12 block = create(:block, :with_ckb_transactions, transactions_count: 15) - block_ckb_transactions = block.ckb_transactions.order(:id).page(page).per(page_size) + block_ckb_transactions = block.ckb_transactions.order('id desc').page(page).per(page_size) valid_get api_v1_block_transaction_url(block.block_hash), params: { page_size: page_size } @@ -186,10 +186,11 @@ class BlockTransactionsControllerTest < ActionDispatch::IntegrationTest end test "should return the corresponding transactions when page and page_size are set" do + CkbTransaction.delete_all page = 2 page_size = 5 block = create(:block, :with_ckb_transactions, transactions_count: 30) - block_ckb_transactions = block.ckb_transactions.order(:id).page(page).per(page_size) + block_ckb_transactions = block.ckb_transactions.order('id desc').page(page).per(page_size) valid_get api_v1_block_transaction_url(block.block_hash), params: { page: page, page_size: page_size } @@ -204,7 +205,7 @@ class BlockTransactionsControllerTest < ActionDispatch::IntegrationTest page = 2 page_size = 5 block = create(:block) - block_ckb_transactions = block.ckb_transactions.order(:id).page(page).per(page_size) + block_ckb_transactions = block.ckb_transactions.order('id desc').page(page).per(page_size) valid_get api_v1_block_transaction_url(block.block_hash), params: { page: page, page_size: page_size } diff --git a/test/controllers/api/v1/blocks_controller_test.rb b/test/controllers/api/v1/blocks_controller_test.rb index 14f1679a9..08b5a6a48 100644 --- a/test/controllers/api/v1/blocks_controller_test.rb +++ b/test/controllers/api/v1/blocks_controller_test.rb @@ -299,6 +299,23 @@ class BlocksControllerTest < ActionDispatch::IntegrationTest assert_equal response_json, response.body end + + test "should get download_csv, by date" do + + block = create :block, timestamp: Time.now.to_i * 1000, miner_hash: 'ckb1qzda0cr08m85hc8jlnfp3zer7xulejywt49kt2rr0vthywaa50xwsqwau7qpcpealv6xf3a37pdcq6ajhwuyaxgs5g955' + + valid_get download_csv_api_v1_blocks_url(start_date: 1.day.ago.strftime("%Y-%m-%d")) + assert_response :success + end + + test "should get download_csv, by block number" do + + block = create :block, timestamp: Time.now.to_i * 1000, miner_hash: 'ckb1qzda0cr08m85hc8jlnfp3zer7xulejywt49kt2rr0vthywaa50xwsqwau7qpcpealv6xf3a37pdcq6ajhwuyaxgs5g955' + + valid_get download_csv_api_v1_blocks_url(start_number: block.number - 1) + puts response.body + assert_response :success + end end end end diff --git a/test/controllers/api/v1/ckb_transactions_controller_test.rb b/test/controllers/api/v1/ckb_transactions_controller_test.rb index 444a347d6..20984d54c 100644 --- a/test/controllers/api/v1/ckb_transactions_controller_test.rb +++ b/test/controllers/api/v1/ckb_transactions_controller_test.rb @@ -3,7 +3,10 @@ module Api module V1 class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest - TransactionKeys = %w(block_number transaction_hash block_timestamp transaction_fee bytes version display_inputs display_outputs is_cellbase income witnesses cell_deps header_deps tx_status detailed_message largest_tx largest_tx_in_epoch cycles max_cycles_in_epoch max_cycles).sort + TransactionKeys = %w( + block_number transaction_hash block_timestamp transaction_fee bytes version display_inputs + display_outputs is_cellbase income witnesses cell_deps header_deps tx_status detailed_message largest_tx largest_tx_in_epoch cycles max_cycles_in_epoch max_cycles + ).sort setup do CkbSync::Api.any_instance.stubs(:get_blockchain_info).returns(OpenStruct.new(chain: "ckb_testnet")) end @@ -45,7 +48,8 @@ class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest test "should respond with 406 Not Acceptable when Accept is wrong" do ckb_transaction = create(:ckb_transaction) - get api_v1_ckb_transaction_url(ckb_transaction.tx_hash), headers: { "Content-Type": "application/vnd.api+json", "Accept": "application/json" } + get api_v1_ckb_transaction_url(ckb_transaction.tx_hash), + headers: { "Content-Type": "application/vnd.api+json", "Accept": "application/json" } assert_equal 406, response.status end @@ -55,7 +59,8 @@ class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest error_object = Api::V1::Exceptions::InvalidAcceptError.new response_json = RequestErrorSerializer.new([error_object], message: error_object.title).serialized_json - get api_v1_ckb_transaction_url(ckb_transaction.tx_hash), headers: { "Content-Type": "application/vnd.api+json", "Accept": "application/json" } + get api_v1_ckb_transaction_url(ckb_transaction.tx_hash), + headers: { "Content-Type": "application/vnd.api+json", "Accept": "application/json" } assert_equal response_json, response.body end @@ -95,14 +100,14 @@ class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest assert_equal CkbTransactionSerializer.new(ckb_transaction).serialized_json, response.body end - # test "should return pool tx when tx is in the pool" do - # tx = create(:pool_transaction_entry) + test "should return pool tx when tx is in the pool" do + tx = create(:pending_transaction) - # valid_get api_v1_ckb_transaction_url(tx.tx_hash) + valid_get api_v1_ckb_transaction_url(tx.tx_hash) - # expected_response = CkbTransactionSerializer.new(tx).serialized_json - # assert_equal expected_response, response.body - # end + expected_response = CkbTransactionSerializer.new(tx).serialized_json + assert_equal expected_response, response.body + end test "should contain right keys in the serialized object when call show" do create(:table_record_count, :block_counter) @@ -134,7 +139,9 @@ class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest valid_get api_v1_ckb_transaction_url(ckb_transaction.tx_hash) assert_equal 15, json["data"].dig("attributes", "display_inputs").count - assert_equal [true], json["data"].dig("attributes", "display_inputs").map { |input| input.key?("from_cellbase") }.uniq + assert_equal [true], json["data"].dig("attributes", "display_inputs").map { |input| + input.key?("from_cellbase") + }.uniq end test "should return all display_outputs" do @@ -144,7 +151,9 @@ class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest valid_get api_v1_ckb_transaction_url(ckb_transaction.tx_hash) assert_equal 15, json["data"].dig("attributes", "display_outputs").count - assert_equal [false], json["data"].dig("attributes", "display_outputs").map { |input| input.key?("from_cellbase") }.uniq + assert_equal [false], json["data"].dig("attributes", "display_outputs").map { |input| + input.key?("from_cellbase") + }.uniq end test "should get success code when call index" do @@ -175,7 +184,8 @@ class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest end test "should respond with 406 Not Acceptable when call index and Accept is wrong" do - get api_v1_ckb_transactions_url, headers: { "Content-Type": "application/vnd.api+json", "Accept": "application/json" } + get api_v1_ckb_transactions_url, + headers: { "Content-Type": "application/vnd.api+json", "Accept": "application/json" } assert_equal 406, response.status end @@ -184,7 +194,8 @@ class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest error_object = Api::V1::Exceptions::InvalidAcceptError.new response_json = RequestErrorSerializer.new([error_object], message: error_object.title).serialized_json - get api_v1_ckb_transactions_url, headers: { "Content-Type": "application/vnd.api+json", "Accept": "application/json" } + get api_v1_ckb_transactions_url, + headers: { "Content-Type": "application/vnd.api+json", "Accept": "application/json" } assert_equal response_json, response.body end @@ -209,7 +220,8 @@ class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest first_ckb_transaction = json["data"].first last_ckb_transaction = json["data"].last - assert_operator first_ckb_transaction.dig("attributes", "block_timestamp"), :>=, last_ckb_transaction.dig("attributes", "block_timestamp") + assert_operator first_ckb_transaction.dig("attributes", "block_timestamp"), :>=, + last_ckb_transaction.dig("attributes", "block_timestamp") end test "should contain right keys in the serialized object" do @@ -219,7 +231,8 @@ class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest valid_get api_v1_ckb_transactions_url response_ckb_transaction = json["data"].first - assert_equal %w(block_number transaction_hash block_timestamp capacity_involved live_cell_changes).sort, response_ckb_transaction["attributes"].keys.sort + assert_equal %w(block_number transaction_hash block_timestamp capacity_involved live_cell_changes).sort, + response_ckb_transaction["attributes"].keys.sort end test "should return the corresponding number of ckb transactions " do @@ -330,7 +343,8 @@ class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest valid_get api_v1_ckb_transactions_url, params: { page: page, page_size: page_size } records_counter = RecordCounters::Transactions.new - options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: ckb_transactions, page: page, page_size: page_size, records_counter: records_counter).call + options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: ckb_transactions, page: page, + page_size: page_size, records_counter: records_counter).call response_ckb_transactions = CkbTransactionListSerializer.new(ckb_transactions, options).serialized_json assert_equal response_ckb_transactions, response.body end @@ -344,9 +358,11 @@ class CkbTransactionsControllerTest < ActionDispatch::IntegrationTest valid_post api_v1_query_ckb_transactions_url, params: { address: address.address_hash } records_counter = RecordCounters::AddressTransactions.new(address) - options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: ckb_transactions, page: page, page_size: page_size, records_counter: records_counter).call + options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: ckb_transactions, page: page, + page_size: page_size, records_counter: records_counter).call - assert_equal CkbTransactionsSerializer.new(ckb_transactions, options.merge(params: { previews: true, address: address })).serialized_json, response.body + assert_equal CkbTransactionsSerializer.new(ckb_transactions, options.merge(params: { previews: true, address: address })).serialized_json, + response.body end end end diff --git a/test/controllers/api/v1/udts_controller_test.rb b/test/controllers/api/v1/udts_controller_test.rb index dee660d6c..2cb62543e 100644 --- a/test/controllers/api/v1/udts_controller_test.rb +++ b/test/controllers/api/v1/udts_controller_test.rb @@ -162,13 +162,22 @@ class UdtsControllerTest < ActionDispatch::IntegrationTest create(:udt, addresses_count: 2) create(:udt, addresses_count: 3) - valid_get api_v1_udts_url + valid_get api_v1_udts_url(addresses_count_desc: true) records = Udt.sudt.order(addresses_count: :desc).page(1).per(25) options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: records, page: 1, page_size: 25).call expected_udts = UdtSerializer.new(records, options).serialized_json assert_equal expected_udts, response.body end + + + test "should get download_csv" do + udt = create(:udt, :with_transactions, published: true) + + valid_get download_csv_api_v1_udts_url(id: udt.type_hash, start_date: Time.now.strftime("%Y-%m-%d"), end_date: Time.now.strftime("%Y-%m-%d")) + + assert_response :success + end end end end diff --git a/test/controllers/api/v2/nft/transfers_controller_test.rb b/test/controllers/api/v2/nft/transfers_controller_test.rb index 74a424037..af9e45e1f 100644 --- a/test/controllers/api/v2/nft/transfers_controller_test.rb +++ b/test/controllers/api/v2/nft/transfers_controller_test.rb @@ -34,6 +34,18 @@ def setup assert_equal @token_transfer.id, JSON.parse(response.body)['id'] end + test "should get download_csv, by date" do + + valid_get download_csv_api_v2_nft_transfers_url(collection_id: @token_collection.sn, start_date: 1.day.ago.strftime("%Y-%m-%d")) + assert_response :success + end + + test "should get download_csv, by block_number" do + + valid_get download_csv_api_v2_nft_transfers_url(collection_id: @token_collection.sn, start_number: 8, end_number: 12) + assert_response :success + end + end end end diff --git a/test/controllers/api/v2/pending_transactions_controller_test.rb b/test/controllers/api/v2/pending_transactions_controller_test.rb index 4d8ad0459..ce0f651bc 100644 --- a/test/controllers/api/v2/pending_transactions_controller_test.rb +++ b/test/controllers/api/v2/pending_transactions_controller_test.rb @@ -5,17 +5,21 @@ module V2 class PendingTransactionsControllerTest < ActionDispatch::IntegrationTest def setup super - create(:pool_transaction_entry, display_inputs: [{capacity: 100}, {capacity: 200}]) - create(:pool_transaction_entry, display_inputs: [{capacity: 300}]) - create(:pool_transaction_entry, display_inputs: [{capacity: 400}]) + create(:pending_transaction) + create(:pending_transaction) + create(:pending_transaction) end test "should get index " do get api_v2_pending_transactions_url assert_response :success body = JSON.parse response.body - assert_equal 3, body['data'].size - assert_equal 3, body['meta']['total'] - #assert_equal 400, body['data'][0]['capacity_of_inputs'] + assert_equal 3, body["data"].size + assert_equal 3, body["meta"]["total"] + # assert_equal 400, body['data'][0]['capacity_of_inputs'] + end + test "should get count" do + get count_api_v2_pending_transactions_url + assert_response :success end end end diff --git a/test/controllers/api/v2/statistics_controller_test.rb b/test/controllers/api/v2/statistics_controller_test.rb index fb88d46ef..42c7746c6 100644 --- a/test/controllers/api/v2/statistics_controller_test.rb +++ b/test/controllers/api/v2/statistics_controller_test.rb @@ -17,10 +17,10 @@ class StatisticsControllerTest < ActionDispatch::IntegrationTest transaction_fee: 30000, bytes: 20, confirmation_time: confirmation_time, block: block, tx_hash: tx_hash1 create :ckb_transaction, created_at: Time.at(tx_created_at), transaction_fee: 30000, bytes: 20, confirmation_time: confirmation_time, block: block, tx_hash: tx_hash2 - create :pool_transaction_entry, transaction_fee: 30000, bytes: 20, - tx_hash: tx_hash1 - create :pool_transaction_entry, transaction_fee: 13000, bytes: 15, - tx_hash: tx_hash2 + create :pending_transaction, transaction_fee: 30000, bytes: 20, + tx_hash: tx_hash1 + create :pending_transaction, transaction_fee: 13000, bytes: 15, + tx_hash: tx_hash2 end test "should get transaction_fees, for committed tx" do @@ -31,7 +31,7 @@ class StatisticsControllerTest < ActionDispatch::IntegrationTest "Content-Type": "application/vnd.api+json", "Accept": "application/json" } data = JSON.parse(response.body) - assert_equal PoolTransactionEntry.all.size, + assert_equal CkbTransaction.tx_pending.count, data["transaction_fee_rates"].size assert data["transaction_fee_rates"].first["fee_rate"] > 0 assert data["transaction_fee_rates"].first["confirmation_time"] > 0 @@ -47,7 +47,7 @@ class StatisticsControllerTest < ActionDispatch::IntegrationTest "Content-Type": "application/vnd.api+json", "Accept": "application/json" } data = JSON.parse(response.body) - assert_equal PoolTransactionEntry.all.size, + assert_equal CkbTransaction.tx_pending.count, data["transaction_fee_rates"].size assert data["pending_transaction_fee_rates"].first["fee_rate"] > 0 diff --git a/test/controllers/api/v2/transactions_controller_test.rb b/test/controllers/api/v2/transactions_controller_test.rb new file mode 100644 index 000000000..a3c720a84 --- /dev/null +++ b/test/controllers/api/v2/transactions_controller_test.rb @@ -0,0 +1,29 @@ +require "test_helper" + +module Api + module V2 + class TransactionsControllerTest < ActionDispatch::IntegrationTest + + test "should get raw" do + + ckb_transaction = create :ckb_transaction, :with_multiple_inputs_and_outputs + + valid_get raw_api_v2_transaction_url(id: ckb_transaction.tx_hash) + assert_response :success + end + + test "should get details" do + ckb_transaction = create :ckb_transaction, :with_multiple_inputs_and_outputs + + valid_get details_api_v2_transaction_url(id: ckb_transaction.tx_hash) + assert_response :success + json = JSON.parse response.body + + assert_equal 30, json['data'].size + assert_equal "-800000000.0", json['data'][0]['transfers'][0]['capacity'] + assert_equal 'simple_transfer', json['data'][0]['transfers'][0]['transfer_type'] + end + + end + end +end diff --git a/test/factories/address_block_snapshot.rb b/test/factories/address_block_snapshot.rb new file mode 100644 index 000000000..c004f0dfb --- /dev/null +++ b/test/factories/address_block_snapshot.rb @@ -0,0 +1,15 @@ +FactoryBot.define do + factory :address_block_snapshot do + final_state do + { + balance: 10_000_000 * 10**8, + balance_occupied: 1_000_000 * 10**8, + live_cells_count: 100, + ckb_transactions_count: 233, + dao_transactions_count: 39 + } + end + address + block + end +end diff --git a/test/factories/block.rb b/test/factories/block.rb index 32a1e456e..145268889 100644 --- a/test/factories/block.rb +++ b/test/factories/block.rb @@ -5,8 +5,7 @@ number { 10 } parent_hash { "0xcba2d1a70602a1def80efbd59629c37a9d6c36f9de7a8ed6d1ca4f76389365e1" } nonce { 1757392074788233522 } - # FIXME: the timestamp in blocks table is in milliseconds but here is in seconds - timestamp { Faker::Time.between(from: 2.days.ago, to: Date.today).to_i } + timestamp { Faker::Time.between(from: 2.days.ago, to: Date.today).to_i * 1000 } transactions_root { "0xe08894ef0ed80481448f7a584438a76b6bdbea178c02b4c3b40863d75c5aed3c" } proposals_hash { "0x0000000000000000000000000000000000000000000000000000000000000000" } uncles_count { 1 } diff --git a/test/factories/cell_input.rb b/test/factories/cell_input.rb index 0fe59b4f6..fdc7a35e5 100644 --- a/test/factories/cell_input.rb +++ b/test/factories/cell_input.rb @@ -1,6 +1,5 @@ FactoryBot.define do factory :cell_input do - previous_output {} trait :from_cellbase do before(:create) do |cell_input, _evaluator| ckb_transaction = create(:ckb_transaction, :with_cell_output_and_lock_script) @@ -13,7 +12,11 @@ before(:create) do |cell_input, _evaluator| ckb_transaction = create(:ckb_transaction, :with_cell_output_and_lock_script) previous_output_id = ckb_transaction.cell_outputs.where(cell_index: 0).pick(:id) - cell_input.update(ckb_transaction: ckb_transaction, previous_cell_output_id: previous_output_id, block: ckb_transaction.block) + cell_input.update( + ckb_transaction: ckb_transaction, + previous_cell_output_id: previous_output_id, + block: ckb_transaction.block + ) end end @@ -21,12 +24,17 @@ before(:create) do |cell_input, _evaluator| ckb_transaction = create(:ckb_transaction, :with_cell_output_and_lock_and_type_script) previous_output_id = ckb_transaction.cell_outputs.where(cell_index: 0).pick(:id) - cell_input.update(ckb_transaction: ckb_transaction, previous_cell_output_id: previous_output_id, block: ckb_transaction.block) + cell_input.update( + ckb_transaction: ckb_transaction, + previous_cell_output_id: previous_output_id, + block: ckb_transaction.block + ) end end after(:create) do |cell_input, _evaluator| if cell_input.previous_cell_output_id.blank? && cell_input.previous_output.present? && cell_input.previous_output["tx_hash"] != CellOutput::SYSTEM_TX_HASH - output = CellOutput.find_by(tx_hash: cell_input.previous_output["tx_hash"], cell_index: cell_input.previous_output["index"]) + output = CellOutput.find_by(tx_hash: cell_input.previous_output["tx_hash"], + cell_index: cell_input.previous_output["index"]) unless output tx = create :ckb_transaction, :with_single_output, tx_hash: cell_input.previous_output["tx_hash"] output = tx.cell_outputs.first diff --git a/test/factories/cell_output.rb b/test/factories/cell_output.rb index 6ff0356ca..b27d8550d 100644 --- a/test/factories/cell_output.rb +++ b/test/factories/cell_output.rb @@ -34,6 +34,8 @@ cell.address.increment! :balance_occupied, cell.capacity if cell.occupied? cell.address.increment! :live_cells_count end + AccountBook.upsert({ ckb_transaction_id: cell.ckb_transaction_id, address_id: cell.address_id }, + unique_by: [:address_id, :ckb_transaction_id]) end end end diff --git a/test/factories/ckb_transaction.rb b/test/factories/ckb_transaction.rb index b8586a2e3..c7f1a2627 100644 --- a/test/factories/ckb_transaction.rb +++ b/test/factories/ckb_transaction.rb @@ -48,7 +48,13 @@ end factory :pending_transaction do + tx_hash { "0x#{SecureRandom.hex(32)}" } tx_status { "pending" } + block_number { nil } + block_timestamp { nil } + transaction_fee { 100 } + version { 0 } + bytes { 2000 } end trait :with_cell_output_and_lock_script do @@ -78,11 +84,20 @@ trait :with_cell_output_and_lock_and_type_script do after(:create) do |ckb_transaction, _evaluator| output1 = create(:cell_output, capacity: 10**8 * 8, - ckb_transaction: ckb_transaction, block: ckb_transaction.block, tx_hash: ckb_transaction.tx_hash, cell_index: 0) + ckb_transaction: ckb_transaction, + block: ckb_transaction.block, + tx_hash: ckb_transaction.tx_hash, + cell_index: 0) output2 = create(:cell_output, capacity: 10**8 * 8, - ckb_transaction: ckb_transaction, block: ckb_transaction.block, tx_hash: ckb_transaction.tx_hash, cell_index: 1) + ckb_transaction: ckb_transaction, + block: ckb_transaction.block, + tx_hash: ckb_transaction.tx_hash, + cell_index: 1) output3 = create(:cell_output, capacity: 10**8 * 8, - ckb_transaction: ckb_transaction, block: ckb_transaction.block, tx_hash: ckb_transaction.tx_hash, cell_index: 2) + ckb_transaction: ckb_transaction, + block: ckb_transaction.block, + tx_hash: ckb_transaction.tx_hash, + cell_index: 2) lock1 = create(:lock_script, cell_output_id: output1.id) type1 = create(:type_script, cell_output: output1) lock2 = create(:lock_script, cell_output_id: output2.id) @@ -116,7 +131,10 @@ trait :with_single_output do after(:create) do |ckb_transaction| create(:cell_output, capacity: 10**8 * 8, - ckb_transaction: ckb_transaction, block: ckb_transaction.block, tx_hash: ckb_transaction.tx_hash, cell_index: 0) + ckb_transaction: ckb_transaction, + block: ckb_transaction.block, + tx_hash: ckb_transaction.tx_hash, + cell_index: 0) end end @@ -124,12 +142,18 @@ after(:create) do |ckb_transaction| 15.times do |index| create(:cell_output, capacity: 10**8 * 8, - ckb_transaction: ckb_transaction, block: ckb_transaction.block, tx_hash: ckb_transaction.tx_hash, cell_index: index) + ckb_transaction: ckb_transaction, + block: ckb_transaction.block, + tx_hash: ckb_transaction.tx_hash, + cell_index: index) previous_output = { tx_hash: ckb_transaction.tx_hash, index: 1 } create(:cell_input, previous_output: previous_output, - ckb_transaction: ckb_transaction, block: ckb_transaction.block) + ckb_transaction: ckb_transaction, + block: ckb_transaction.block) end end end + factory :cell_base_transaction do + end end end diff --git a/test/factories/lock_script.rb b/test/factories/lock_script.rb index bf1a45e32..becd0133a 100644 --- a/test/factories/lock_script.rb +++ b/test/factories/lock_script.rb @@ -3,6 +3,6 @@ address hash_type { "type" } args { "0x#{SecureRandom.hex(20)}" } - code_hash { "0x#{SecureRandom.hex(32)}" } + code_hash { Settings.secp_cell_type_hash } end end diff --git a/test/factories/pool_transaction_entry.rb b/test/factories/pool_transaction_entry.rb deleted file mode 100644 index 2a472e449..000000000 --- a/test/factories/pool_transaction_entry.rb +++ /dev/null @@ -1,5 +0,0 @@ -FactoryBot.define do - factory :pool_transaction_entry do - tx_hash { "0x#{SecureRandom.hex(32)}" } - end -end diff --git a/test/factories/reject_reasons.rb b/test/factories/reject_reasons.rb new file mode 100644 index 000000000..590a76678 --- /dev/null +++ b/test/factories/reject_reasons.rb @@ -0,0 +1,6 @@ +FactoryBot.define do + factory :reject_reason do + ckb_transaction { nil } + message { "MyText" } + end +end diff --git a/test/factories/token_collection.rb b/test/factories/token_collection.rb index 8b37889ff..b190f0248 100644 --- a/test/factories/token_collection.rb +++ b/test/factories/token_collection.rb @@ -10,4 +10,3 @@ sn {"sn-#{SecureRandom.hex(32)}"} end end - diff --git a/test/factories/token_item.rb b/test/factories/token_item.rb index 74ad5deb3..236accfc7 100644 --- a/test/factories/token_item.rb +++ b/test/factories/token_item.rb @@ -6,4 +6,3 @@ status { 1 } end end - diff --git a/test/factories/token_transfer.rb b/test/factories/token_transfer.rb index d2828f9d9..39ae48a1e 100644 --- a/test/factories/token_transfer.rb +++ b/test/factories/token_transfer.rb @@ -3,5 +3,3 @@ action { :normal } end end - - diff --git a/test/factories/udt_transaction.rb b/test/factories/udt_transaction.rb new file mode 100644 index 000000000..bfb3d2ec0 --- /dev/null +++ b/test/factories/udt_transaction.rb @@ -0,0 +1,6 @@ +FactoryBot.define do + factory :udt_transaction do + udt + ckb_transaction + end +end diff --git a/test/jobs/import_transaction_job_test.rb b/test/jobs/import_transaction_job_test.rb new file mode 100644 index 000000000..dd535afe2 --- /dev/null +++ b/test/jobs/import_transaction_job_test.rb @@ -0,0 +1,138 @@ +require "test_helper" + +class ImportTransactionJobTest < ActiveJob::TestCase + # test "the truth" do + # assert true + # end + + # setup the previous cell outputs and contract that the raw tx requires + setup do + end + + test "import normal ckb transaction" do + @cell_base_transaction = create :ckb_transaction, :with_single_output + @cell_base = @cell_base_transaction.cell_outputs.first + @raw_tx = { + "cell_deps" => + [ + { + "dep_type" => "code", + "out_point" => { + "index" => "0x3", + "tx_hash" => "0x8f8c79eb6671709633fe6a46de93c0fedc9c1b8a6527a18d3983879542635c9f" } }, + { + "dep_type" => "code", + "out_point" => { + "index" => "0x1", + "tx_hash" => "0x8f8c79eb6671709633fe6a46de93c0fedc9c1b8a6527a18d3983879542635c9f" } } + ], + "hash" => "0xf8de3bb47d055cdf460d93a2a6e1b05f7432f9777c8c474abf4eec1d4aee5d37", + "header_deps" => [], + "inputs" => [ + { + "previous_output" => { + "index" => CkbUtils.int_to_hex(@cell_base.cell_index), + "tx_hash" => @cell_base_transaction.tx_hash + }, + "since" => "0x0" + } + ], + "outputs" => + [ + { + "capacity" => CkbUtils.int_to_hex(10**8 * 4), + "lock" => { + "args" => "0x57ccb07be6875f61d93636b0ee11b675494627d2", + "code_hash" => "0x9bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce8", + "hash_type" => "type" + }, + "type" => nil + }, + { + "capacity" => CkbUtils.int_to_hex(10**8 * 4 - 1), + "lock" => { + "args" => "0x64257f00b6b63e987609fa9be2d0c86d351020fb", + "code_hash" => "0x9bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce8", + "hash_type" => "type" + }, + "type" => nil + } + ], + "outputs_data" => [], + "version" => "0x0", + "witnesses" => [ + "0x55f49d7979ba246aa2f05a6e9afd25a23dc39ed9085a0b1e33b6b3bb80d34dbd4031a04ea389d6d8ff5604828889aa06a827e930a7e89411b80f6c3e1404951f00" + ] + } + ImportTransactionJob.new.perform @raw_tx + assert_equal 2, CkbTransaction.count + assert_equal 1, CellInput.count + assert_equal 3, CellOutput.count + assert_equal 4, Address.count + assert_equal 4, AccountBook.count + end + + test "import transaction which wants to consume non-exists cells" do + # this will halt the import process, only leave a pending transaction + raw_tx = { + "cell_deps" => + [ + { + "dep_type" => "code", + "out_point" => { + "index" => "0x3", + "tx_hash" => "0x8f8c79eb6671709633fe6a46de93c0fedc9c1b8a6527a18d3983879542635c9f" } }, + { + "dep_type" => "code", + "out_point" => { + "index" => "0x1", + "tx_hash" => "0x8f8c79eb6671709633fe6a46de93c0fedc9c1b8a6527a18d3983879542635c9f" } } + ], + "hash" => "0xf8de3bb47d055cdf460d93a2a6e1b05f7432f9777c8c474abf4eec1d4aee5d37", + "header_deps" => [], + "inputs" => [ + { + "previous_output" => { + "index" => "0x0", + "tx_hash" => "0x519c09b28e1170b8ee89523b75965dae2f7dd209e88c98008286e996bad46e07" + }, + "since" => "0x0" + } + ], + "outputs" => + [ + { + "capacity" => CkbUtils.int_to_hex(10**8 * 4), + "lock" => { + "args" => "0x57ccb07be6875f61d93636b0ee11b675494627d2", + "code_hash" => "0x9bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce8", + "hash_type" => "type" + }, + "type" => nil + }, + { + "capacity" => CkbUtils.int_to_hex(10**8 * 4 - 1), + "lock" => { + "args" => "0x64257f00b6b63e987609fa9be2d0c86d351020fb", + "code_hash" => "0x9bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce8", + "hash_type" => "type" + }, + "type" => nil + } + ], + "outputs_data" => [], + "version" => "0x0", + "witnesses" => [ + "0x55f49d7979ba246aa2f05a6e9afd25a23dc39ed9085a0b1e33b6b3bb80d34dbd4031a04ea389d6d8ff5604828889aa06a827e930a7e89411b80f6c3e1404951f00" + ] + } + + assert_difference -> { CkbTransaction.count } => 1, + -> { CellInput.count } => 1, + -> { CellOutput.count } => 2, + -> { AccountBook.count } => 2, + -> { Address.count } => 2 do + ImportTransactionJob.new.perform raw_tx + end + end +end diff --git a/test/jobs/revert_block_job_test.rb b/test/jobs/revert_block_job_test.rb new file mode 100644 index 000000000..c66bff1e5 --- /dev/null +++ b/test/jobs/revert_block_job_test.rb @@ -0,0 +1,34 @@ +require "test_helper" + +class RevertBlockJobTest < ActiveJob::TestCase + setup do + @address = create(:address) + first_block = create(:block) + parent_block = create(:block, parent_hash: first_block.hash, address_ids: [@address.id], number: 11) + @local_block = create(:block, parent_hash: parent_block.hash, address_ids: [@address.id], number: 12) + _first_block_snapshot = create(:address_block_snapshot, block: first_block, block_number: first_block.number, + address: @address) + @local_block_snapshot = create(:address_block_snapshot, block: @local_block, block_number: @local_block.number, + address: @address) + @address.update(@local_block_snapshot.final_state) + @parent_block_snapshot = create(:address_block_snapshot, block: parent_block, block_number: parent_block.number, + address: @address) + end + test "rollback address final_state with parent block" do + assert_equal @address.reload.live_cells_count, @local_block_snapshot.final_state["live_cells_count"] + assert_equal @address.reload.ckb_transactions_count, @local_block_snapshot.final_state["ckb_transactions_count"] + assert_equal @address.reload.dao_transactions_count, @local_block_snapshot.final_state["dao_transactions_count"] + assert_equal @address.reload.balance, @local_block_snapshot.final_state["balance"] + assert_equal @address.reload.balance_occupied, @local_block_snapshot.final_state["balance_occupied"] + + RevertBlockJob.new(@local_block).update_address_balance_and_ckb_transactions_count(@local_block) + + assert_equal @address.reload.live_cells_count, @parent_block_snapshot.final_state["live_cells_count"] + assert_equal @address.reload.ckb_transactions_count, @parent_block_snapshot.final_state["ckb_transactions_count"] + assert_equal @address.reload.dao_transactions_count, @parent_block_snapshot.final_state["dao_transactions_count"] + assert_equal @address.reload.balance, @parent_block_snapshot.final_state["balance"] + assert_equal @address.reload.balance_occupied, @parent_block_snapshot.final_state["balance_occupied"] + + assert_nil AddressBlockSnapshot.find_by(id: @local_block_snapshot.id) + end +end diff --git a/test/models/ckb_sync/dao_events_test.rb b/test/models/ckb_sync/dao_events_test.rb index fdf35d4f1..37f08d42e 100644 --- a/test/models/ckb_sync/dao_events_test.rb +++ b/test/models/ckb_sync/dao_events_test.rb @@ -70,7 +70,9 @@ class DaoEventsTest < ActiveSupport::TestCase deposit_to_dao_events = Block.find_by(number: node_block.header.number).dao_events.where(event_type: "withdraw_from_dao") assert_equal ["processed"], deposit_to_dao_events.pluck(:status).uniq - assert_equal %w(block_id ckb_transaction_id address_id contract_id event_type value status block_timestamp), deposit_to_dao_events.first.attribute_names.reject { |attribute| attribute.in?(%w(created_at updated_at id)) } + assert_equal %w(block_id ckb_transaction_id address_id contract_id event_type value status block_timestamp), deposit_to_dao_events.first.attribute_names.reject { |attribute| + attribute.in?(%w(created_at updated_at id)) + } end end @@ -102,7 +104,9 @@ class DaoEventsTest < ActiveSupport::TestCase tx = fake_dao_interest_transaction(node_block) withdraw_amount = tx.cell_outputs.nervos_dao_withdrawing.first.capacity - assert_difference -> { DaoContract.default_contract.reload.claimed_compensation }, "0x174876ebe8".hex - withdraw_amount do + assert_difference -> { + DaoContract.default_contract.reload.claimed_compensation + }, "0x174876ebe8".hex - withdraw_amount do node_data_processor.process_block(node_block) end @@ -136,9 +140,11 @@ class DaoEventsTest < ActiveSupport::TestCase block1 = create(:block, :with_block_hash, number: DEFAULT_NODE_BLOCK_NUMBER - 2) tx1 = create(:ckb_transaction, block: block1) block2 = create(:block, :with_block_hash, number: DEFAULT_NODE_BLOCK_NUMBER - 1) - deposit_block = create(:block, :with_block_hash, number: DEFAULT_NODE_BLOCK_NUMBER - 5, dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") + deposit_block = create(:block, :with_block_hash, number: DEFAULT_NODE_BLOCK_NUMBER - 5, + dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") deposit_tx = create(:ckb_transaction, block: deposit_block) - deposit_block1 = create(:block, :with_block_hash, number: DEFAULT_NODE_BLOCK_NUMBER - 6, dao: "0x185369bb078607007224be7987882300517774e04400000000e3bad4847a0100") + deposit_block1 = create(:block, :with_block_hash, number: DEFAULT_NODE_BLOCK_NUMBER - 6, + dao: "0x185369bb078607007224be7987882300517774e04400000000e3bad4847a0100") deposit_tx1 = create(:ckb_transaction, block: deposit_block1) tx2 = create(:ckb_transaction, block: block2) tx3 = create(:ckb_transaction, block: block2) @@ -149,22 +155,37 @@ class DaoEventsTest < ActiveSupport::TestCase input_address3 = create(:address) input_address4 = create(:address) input_address5 = create(:address) - create(:cell_output, ckb_transaction: deposit_tx, block: deposit_block, capacity: 50000 * 10**8, occupied_capacity: 61 * 10**8, tx_hash: deposit_tx.tx_hash, cell_index: 0, address: input_address1, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") - create(:cell_output, ckb_transaction: deposit_tx, block: deposit_block, capacity: 40000 * 10**8, occupied_capacity: 61 * 10**8, tx_hash: deposit_tx.tx_hash, cell_index: 1, address: input_address1, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") - create(:cell_input, ckb_transaction: tx1, block: block1, previous_output: { tx_hash: deposit_tx.tx_hash, index: 0 }) - create(:cell_input, ckb_transaction: tx2, block: block2, previous_output: { tx_hash: deposit_tx.tx_hash, index: 0 }) - create(:cell_input, ckb_transaction: tx2, block: block2, previous_output: { tx_hash: deposit_tx.tx_hash, index: 1 }) - create(:cell_output, ckb_transaction: tx1, block: block1, capacity: 50000 * 10**8, tx_hash: tx1.tx_hash, cell_index: 0, address: input_address1, cell_type: "nervos_dao_withdrawing", dao: "0x28ef3c7ff3860700d88b1a61958923008ae424cd7200000000e3bad4847a0100", occupied_capacity: 6100000000) - create(:cell_output, ckb_transaction: tx2, block: block2, capacity: 60000 * 10**8, tx_hash: tx2.tx_hash, cell_index: 1, address: input_address2, cell_type: "nervos_dao_withdrawing", dao: "0x2cd631702e870700b3df08d7d889230036f787487e00000000e3bad4847a0100", occupied_capacity: 6100000000) - create(:cell_output, ckb_transaction: tx3, block: block2, capacity: 70000 * 10**8, tx_hash: tx3.tx_hash, cell_index: 2, address: input_address3, occupied_capacity: 6100000000) - - create(:cell_output, ckb_transaction: deposit_tx1, block: deposit_block1, capacity: 50000 * 10**8, occupied_capacity: 61 * 10**8, tx_hash: deposit_tx1.tx_hash, cell_index: 0, address: input_address4, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") - create(:cell_output, ckb_transaction: deposit_tx1, block: deposit_block1, capacity: 40000 * 10**8, occupied_capacity: 61 * 10**8, tx_hash: deposit_tx1.tx_hash, cell_index: 1, address: input_address5, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") - create(:cell_input, ckb_transaction: tx4, block: block1, previous_output: { tx_hash: deposit_tx1.tx_hash, index: 0 }) - create(:cell_input, ckb_transaction: tx5, block: block2, previous_output: { tx_hash: deposit_tx1.tx_hash, index: 1 }) - create(:cell_output, ckb_transaction: tx4, block: block1, capacity: 150000 * 10**8, tx_hash: tx4.tx_hash, cell_index: 0, address: input_address4, cell_type: "nervos_dao_withdrawing", dao: "0x28ef3c7ff3860700d88b1a61958923008ae424cd7200000000e3bad4847a0100", occupied_capacity: 6100000000) - create(:cell_output, ckb_transaction: tx5, block: block2, capacity: 60000 * 10**8, tx_hash: tx5.tx_hash, cell_index: 0, address: input_address5, cell_type: "nervos_dao_withdrawing", dao: "0x2cd631702e870700b3df08d7d889230036f787487e00000000e3bad4847a0100", occupied_capacity: 6100000000) - header = CKB::Types::BlockHeader.new(compact_target: "0x1000", hash: "0x#{SecureRandom.hex(32)}", number: DEFAULT_NODE_BLOCK_NUMBER, parent_hash: "0x#{SecureRandom.hex(32)}", nonce: 1757392074788233522, timestamp: CkbUtils.time_in_milliseconds(Time.current), transactions_root: "0x#{SecureRandom.hex(32)}", proposals_hash: "0x#{SecureRandom.hex(32)}", extra_hash: "0x#{SecureRandom.hex(32)}", version: 0, epoch: 1, dao: "0x01000000000000000000c16ff286230000a3a65e97fd03000057c138586f0000") + create(:cell_output, ckb_transaction: deposit_tx, block: deposit_block, capacity: 50000 * 10**8, + occupied_capacity: 61 * 10**8, tx_hash: deposit_tx.tx_hash, cell_index: 0, address: input_address1, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") + create(:cell_output, ckb_transaction: deposit_tx, block: deposit_block, capacity: 40000 * 10**8, + occupied_capacity: 61 * 10**8, tx_hash: deposit_tx.tx_hash, cell_index: 1, address: input_address1, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") + create(:cell_input, ckb_transaction: tx1, block: block1, + previous_output: { tx_hash: deposit_tx.tx_hash, index: 0 }) + create(:cell_input, ckb_transaction: tx2, block: block2, + previous_output: { tx_hash: deposit_tx.tx_hash, index: 0 }) + create(:cell_input, ckb_transaction: tx2, block: block2, + previous_output: { tx_hash: deposit_tx.tx_hash, index: 1 }) + create(:cell_output, ckb_transaction: tx1, block: block1, capacity: 50000 * 10**8, tx_hash: tx1.tx_hash, + cell_index: 0, address: input_address1, cell_type: "nervos_dao_withdrawing", dao: "0x28ef3c7ff3860700d88b1a61958923008ae424cd7200000000e3bad4847a0100", occupied_capacity: 6100000000) + create(:cell_output, ckb_transaction: tx2, block: block2, capacity: 60000 * 10**8, tx_hash: tx2.tx_hash, + cell_index: 1, address: input_address2, cell_type: "nervos_dao_withdrawing", dao: "0x2cd631702e870700b3df08d7d889230036f787487e00000000e3bad4847a0100", occupied_capacity: 6100000000) + create(:cell_output, ckb_transaction: tx3, block: block2, capacity: 70000 * 10**8, tx_hash: tx3.tx_hash, + cell_index: 2, address: input_address3, occupied_capacity: 6100000000) + + create(:cell_output, ckb_transaction: deposit_tx1, block: deposit_block1, capacity: 50000 * 10**8, + occupied_capacity: 61 * 10**8, tx_hash: deposit_tx1.tx_hash, cell_index: 0, address: input_address4, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") + create(:cell_output, ckb_transaction: deposit_tx1, block: deposit_block1, capacity: 40000 * 10**8, + occupied_capacity: 61 * 10**8, tx_hash: deposit_tx1.tx_hash, cell_index: 1, address: input_address5, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") + create(:cell_input, ckb_transaction: tx4, block: block1, + previous_output: { tx_hash: deposit_tx1.tx_hash, index: 0 }) + create(:cell_input, ckb_transaction: tx5, block: block2, + previous_output: { tx_hash: deposit_tx1.tx_hash, index: 1 }) + create(:cell_output, ckb_transaction: tx4, block: block1, capacity: 150000 * 10**8, tx_hash: tx4.tx_hash, + cell_index: 0, address: input_address4, cell_type: "nervos_dao_withdrawing", dao: "0x28ef3c7ff3860700d88b1a61958923008ae424cd7200000000e3bad4847a0100", occupied_capacity: 6100000000) + create(:cell_output, ckb_transaction: tx5, block: block2, capacity: 60000 * 10**8, tx_hash: tx5.tx_hash, + cell_index: 0, address: input_address5, cell_type: "nervos_dao_withdrawing", dao: "0x2cd631702e870700b3df08d7d889230036f787487e00000000e3bad4847a0100", occupied_capacity: 6100000000) + header = CKB::Types::BlockHeader.new(compact_target: "0x1000", hash: "0x#{SecureRandom.hex(32)}", + number: DEFAULT_NODE_BLOCK_NUMBER, parent_hash: "0x#{SecureRandom.hex(32)}", nonce: 1757392074788233522, timestamp: CkbUtils.time_in_milliseconds(Time.current), transactions_root: "0x#{SecureRandom.hex(32)}", proposals_hash: "0x#{SecureRandom.hex(32)}", extra_hash: "0x#{SecureRandom.hex(32)}", version: 0, epoch: 1, dao: "0x01000000000000000000c16ff286230000a3a65e97fd03000057c138586f0000") inputs = [ CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx1.tx_hash, index: 0)), CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx2.tx_hash, index: 1)), @@ -174,9 +195,12 @@ class DaoEventsTest < ActiveSupport::TestCase CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx4.tx_hash, index: 0)), CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx5.tx_hash, index: 0)) ] - lock1 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") - lock2 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") - lock3 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") + lock1 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") + lock2 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") + lock3 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") outputs = [ CKB::Types::Output.new(capacity: 50000 * 10**8, lock: lock1), CKB::Types::Output.new(capacity: 60000 * 10**8, lock: lock2), @@ -187,17 +211,25 @@ class DaoEventsTest < ActiveSupport::TestCase CKB::Types::Output.new(capacity: 60000 * 10**8, lock: lock2), CKB::Types::Output.new(capacity: 40000 * 10**8, lock: lock3) ] - miner_lock = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") + miner_lock = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") cellbase_inputs = [ - CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: "0x0000000000000000000000000000000000000000000000000000000000000000", index: 4294967295), since: 3000) + CKB::Types::Input.new( + previous_output: CKB::Types::OutPoint.new( + tx_hash: "0x0000000000000000000000000000000000000000000000000000000000000000", index: 4294967295 + ), since: 3000 + ) ] cellbase_outputs = [ CKB::Types::Output.new(capacity: 200986682127, lock: miner_lock) ] transactions = [ - CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [], inputs: cellbase_inputs, outputs: cellbase_outputs, outputs_data: %w[0x], witnesses: ["0x590000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd800000000"]), - CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], inputs: inputs, outputs: outputs, outputs_data: %w[0x 0x 0x], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]), - CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], inputs: inputs1, outputs: outputs1, outputs_data: %w[0x 0x 0x], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]) + CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [], + inputs: cellbase_inputs, outputs: cellbase_outputs, outputs_data: %w[0x], witnesses: ["0x590000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd800000000"]), + CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], + inputs: inputs, outputs: outputs, outputs_data: %w[0x 0x 0x], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]), + CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], + inputs: inputs1, outputs: outputs1, outputs_data: %w[0x 0x 0x], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]) ] node_block = CKB::Types::Block.new(uncles: [], proposals: [], transactions: transactions, header: header) block = node_data_processor.process_block(node_block) @@ -214,7 +246,7 @@ class DaoEventsTest < ActiveSupport::TestCase DaoContract.default_contract.update(total_deposit: 100000000000000, depositors_count: 1) DaoCompensationCalculator.any_instance.stubs(:call).returns(100800000000) node_block = fake_node_block("0x3307186493c5da8b91917924253a5ffd35231151649d0c7e2941aa8801815063") - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) target_address = nil VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do tx = fake_dao_withdraw_transaction(node_block) @@ -242,7 +274,7 @@ class DaoEventsTest < ActiveSupport::TestCase DaoCompensationCalculator.any_instance.stubs(:call).returns(100800000000) DaoContract.default_contract.update(total_deposit: 100000000000000, depositors_count: 1) node_block = fake_node_block("0x3307186493c5da8b91917924253a5ffd35231151649d0c7e2941aa8801815063") - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do tx = fake_dao_withdraw_transaction(node_block) output = tx.cell_outputs.first @@ -269,7 +301,7 @@ class DaoEventsTest < ActiveSupport::TestCase DaoCompensationCalculator.any_instance.stubs(:call).returns(1000) DaoContract.default_contract.update(total_deposit: 100000000000000, depositors_count: 1) node_block = fake_node_block("0x3307186493c5da8b91917924253a5ffd35231151649d0c7e2941aa8801815063") - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do tx = fake_dao_withdraw_transaction(node_block) output = tx.cell_outputs.first @@ -296,7 +328,7 @@ class DaoEventsTest < ActiveSupport::TestCase DaoContract.default_contract.update(total_deposit: 100000000000000, depositors_count: 1) DaoCompensationCalculator.any_instance.stubs(:call).returns(1000) node_block = fake_node_block("0x3307186493c5da8b91917924253a5ffd35231151649d0c7e2941aa8801815063") - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, timestamp: 1557282351075, number: node_block.header.number - 1) VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do tx = fake_dao_interest_transaction(node_block) @@ -324,7 +356,7 @@ class DaoEventsTest < ActiveSupport::TestCase DaoContract.default_contract.update(total_deposit: 100000000000000, depositors_count: 1) DaoCompensationCalculator.any_instance.stubs(:call).returns(1000) node_block = fake_node_block("0x3307186493c5da8b91917924253a5ffd35231151649d0c7e2941aa8801815063") - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) target_address = nil VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do tx = fake_dao_interest_transaction(node_block) @@ -352,7 +384,7 @@ class DaoEventsTest < ActiveSupport::TestCase DaoCompensationCalculator.any_instance.stubs(:call).returns(1000) DaoContract.default_contract.update(total_deposit: 100000000000000, depositors_count: 1) node_block = fake_node_block("0x3307186493c5da8b91917924253a5ffd35231151649d0c7e2941aa8801815063") - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do tx = fake_dao_withdraw_transaction(node_block) output = tx.cell_outputs.first @@ -387,7 +419,9 @@ class DaoEventsTest < ActiveSupport::TestCase deposit_to_dao_events = Block.find_by(number: node_block.header.number).dao_events.where(event_type: "withdraw_from_dao") assert_equal ["processed"], deposit_to_dao_events.pluck(:status).uniq - assert_equal %w(block_id ckb_transaction_id address_id contract_id event_type value status block_timestamp), deposit_to_dao_events.first.attribute_names.reject { |attribute| attribute.in?(%w(created_at updated_at id)) } + assert_equal %w(block_id ckb_transaction_id address_id contract_id event_type value status block_timestamp), deposit_to_dao_events.first.attribute_names.reject { |attribute| + attribute.in?(%w(created_at updated_at id)) + } end end @@ -472,7 +506,8 @@ class DaoEventsTest < ActiveSupport::TestCase init_deposit_transactions_count = 2 init_withdraw_transactions_count = 1 init_total_depositors_count = 2 - dao_contract.update(total_deposit: init_total_deposit, depositors_count: init_depositors_count, claimed_compensation: init_interest_granted, deposit_transactions_count: init_deposit_transactions_count, withdraw_transactions_count: init_withdraw_transactions_count, total_depositors_count: init_total_depositors_count) + dao_contract.update(total_deposit: init_total_deposit, depositors_count: init_depositors_count, + claimed_compensation: init_interest_granted, deposit_transactions_count: init_deposit_transactions_count, withdraw_transactions_count: init_withdraw_transactions_count, total_depositors_count: init_total_depositors_count) prepare_node_data(HAS_UNCLES_BLOCK_NUMBER) local_block = Block.find_by(number: HAS_UNCLES_BLOCK_NUMBER) local_block.update(block_hash: "0x419c632366c8eb9635acbb39ea085f7552ae62e1fdd480893375334a0f37d1bx") @@ -508,7 +543,7 @@ class DaoEventsTest < ActiveSupport::TestCase test "should revert address dao deposit when block is invalid and there is dao cell" do CkbSync::Api.any_instance.stubs(:calculate_dao_maximum_withdraw).returns("0x2faf0be8") node_block = fake_node_block - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) target_address = nil VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do tx = fake_dao_deposit_transaction(node_block) @@ -536,7 +571,7 @@ class DaoEventsTest < ActiveSupport::TestCase test "should revert dao contract total deposit when block is invalid and there is dao cell" do CkbSync::Api.any_instance.stubs(:calculate_dao_maximum_withdraw).returns("0x2faf0be8") node_block = fake_node_block - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do fake_dao_deposit_transaction(node_block) node_data_processor.process_block(node_block) @@ -558,7 +593,7 @@ class DaoEventsTest < ActiveSupport::TestCase test "should revert dao contract deposit transactions count when block is invalid and there is dao cell" do CkbSync::Api.any_instance.stubs(:calculate_dao_maximum_withdraw).returns("0x2faf0be8") node_block = fake_node_block - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do fake_dao_deposit_transaction(node_block) node_data_processor.process_block(node_block) @@ -580,7 +615,7 @@ class DaoEventsTest < ActiveSupport::TestCase test "should revert dao contract depositors count when block is invalid and there is dao cell" do CkbSync::Api.any_instance.stubs(:calculate_dao_maximum_withdraw).returns("0x2faf0be8") node_block = fake_node_block - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do fake_dao_deposit_transaction(node_block) node_data_processor.process_block(node_block) @@ -696,12 +731,15 @@ class DaoEventsTest < ActiveSupport::TestCase deposit_to_dao_events = Block.find_by(number: node_block.header.number).dao_events.where(event_type: "new_dao_depositor") assert_equal ["processed"], deposit_to_dao_events.pluck(:status).uniq - assert_equal %w(block_id ckb_transaction_id address_id contract_id event_type value status block_timestamp), deposit_to_dao_events.first.attribute_names.reject { |attribute| attribute.in?(%w(created_at updated_at id)) } + assert_equal %w(block_id ckb_transaction_id address_id contract_id event_type value status block_timestamp), deposit_to_dao_events.first.attribute_names.reject { |attribute| + attribute.in?(%w(created_at updated_at id)) + } end end test "#process_block should not update dao contract total depositors count when depositors is already has been recorded" do - DaoContract.default_contract.update(total_deposit: 100000000000000, depositors_count: 1, total_depositors_count: 1) + DaoContract.default_contract.update(total_deposit: 100000000000000, depositors_count: 1, + total_depositors_count: 1) CkbSync::Api.any_instance.stubs(:calculate_dao_maximum_withdraw).returns("0x2faf0be8") node_block = fake_node_block("0x3307186493c5da8b91917924253a5ffd35231151649d0c7e2941aa8801815063") @@ -711,13 +749,18 @@ class DaoEventsTest < ActiveSupport::TestCase address = Address.find_or_create_address(lock, node_block.header.timestamp, lock_script.id) address.update(dao_deposit: 100000 * 10**8) block = create(:block, :with_block_hash) - ckb_transaction1 = create(:ckb_transaction, tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) - ckb_transaction2 = create(:ckb_transaction, tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) - create(:cell_output, ckb_transaction: ckb_transaction1, cell_index: 1, tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address) - create(:cell_output, ckb_transaction: ckb_transaction2, cell_index: 2, tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address) + ckb_transaction1 = create(:ckb_transaction, + tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) + ckb_transaction2 = create(:ckb_transaction, + tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) + create(:cell_output, ckb_transaction: ckb_transaction1, cell_index: 1, + tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address) + create(:cell_output, ckb_transaction: ckb_transaction2, cell_index: 2, + tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address) tx1 = node_block.transactions.first output1 = tx1.outputs.first - output1.type = CKB::Types::Script.new(args: "0xb2e61ff569acf041b3c2c17724e2379c581eeac3", hash_type: "type", code_hash: Settings.dao_type_hash) + output1.type = CKB::Types::Script.new(args: "0xb2e61ff569acf041b3c2c17724e2379c581eeac3", hash_type: "type", + code_hash: Settings.dao_type_hash) output1.capacity = 10**8 * 1000 tx1.outputs << output1 tx1.outputs_data << CKB::Utils.bin_to_hex("\x00" * 8) @@ -733,13 +776,18 @@ class DaoEventsTest < ActiveSupport::TestCase create(:block, :with_block_hash, number: node_block.header.number - 1) address = create(:address) block = create(:block, :with_block_hash) - ckb_transaction1 = create(:ckb_transaction, tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) - ckb_transaction2 = create(:ckb_transaction, tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) - create(:cell_output, ckb_transaction: ckb_transaction1, cell_index: 1, tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address) - create(:cell_output, ckb_transaction: ckb_transaction2, cell_index: 2, tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address) + ckb_transaction1 = create(:ckb_transaction, + tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) + ckb_transaction2 = create(:ckb_transaction, + tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) + create(:cell_output, ckb_transaction: ckb_transaction1, cell_index: 1, + tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address) + create(:cell_output, ckb_transaction: ckb_transaction2, cell_index: 2, + tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address) tx1 = node_block.transactions.first output1 = tx1.outputs.first - output1.type = CKB::Types::Script.new(args: "0xb2e61ff569acf041b3c2c17724e2379c581eeac3", hash_type: "type", code_hash: Settings.dao_type_hash) + output1.type = CKB::Types::Script.new(args: "0xb2e61ff569acf041b3c2c17724e2379c581eeac3", hash_type: "type", + code_hash: Settings.dao_type_hash) output1.capacity = 10**8 * 1000 tx1.outputs << output1 tx1.outputs_data << CKB::Utils.bin_to_hex("\x00" * 8) @@ -763,12 +811,18 @@ class DaoEventsTest < ActiveSupport::TestCase input_address3 = create(:address) input_address4 = create(:address) input_address5 = create(:address) - create(:cell_output, ckb_transaction: tx1, block: block1, capacity: 50000 * 10**8, tx_hash: tx1.tx_hash, cell_index: 0, address: input_address1, cell_type: "nervos_dao_deposit") - create(:cell_output, ckb_transaction: tx2, block: block2, capacity: 60000 * 10**8, tx_hash: tx2.tx_hash, cell_index: 1, address: input_address2, cell_type: "nervos_dao_deposit") - create(:cell_output, ckb_transaction: tx3, block: block2, capacity: 70000 * 10**8, tx_hash: tx3.tx_hash, cell_index: 2, address: input_address3) - create(:cell_output, ckb_transaction: tx4, block: block2, capacity: 70000 * 10**8, tx_hash: tx4.tx_hash, cell_index: 0, address: input_address4, cell_type: "nervos_dao_deposit") - create(:cell_output, ckb_transaction: tx5, block: block2, capacity: 70000 * 10**8, tx_hash: tx5.tx_hash, cell_index: 0, address: input_address5, cell_type: "nervos_dao_deposit") - header = CKB::Types::BlockHeader.new(compact_target: "0x1000", hash: "0x#{SecureRandom.hex(32)}", number: DEFAULT_NODE_BLOCK_NUMBER, parent_hash: "0x#{SecureRandom.hex(32)}", nonce: 1757392074788233522, timestamp: CkbUtils.time_in_milliseconds(Time.current), transactions_root: "0x#{SecureRandom.hex(32)}", proposals_hash: "0x#{SecureRandom.hex(32)}", extra_hash: "0x#{SecureRandom.hex(32)}", version: 0, epoch: 1, dao: "0x01000000000000000000c16ff286230000a3a65e97fd03000057c138586f0000") + create(:cell_output, ckb_transaction: tx1, block: block1, capacity: 50000 * 10**8, tx_hash: tx1.tx_hash, + cell_index: 0, address: input_address1, cell_type: "nervos_dao_deposit") + create(:cell_output, ckb_transaction: tx2, block: block2, capacity: 60000 * 10**8, tx_hash: tx2.tx_hash, + cell_index: 1, address: input_address2, cell_type: "nervos_dao_deposit") + create(:cell_output, ckb_transaction: tx3, block: block2, capacity: 70000 * 10**8, tx_hash: tx3.tx_hash, + cell_index: 2, address: input_address3) + create(:cell_output, ckb_transaction: tx4, block: block2, capacity: 70000 * 10**8, tx_hash: tx4.tx_hash, + cell_index: 0, address: input_address4, cell_type: "nervos_dao_deposit") + create(:cell_output, ckb_transaction: tx5, block: block2, capacity: 70000 * 10**8, tx_hash: tx5.tx_hash, + cell_index: 0, address: input_address5, cell_type: "nervos_dao_deposit") + header = CKB::Types::BlockHeader.new(compact_target: "0x1000", hash: "0x#{SecureRandom.hex(32)}", + number: DEFAULT_NODE_BLOCK_NUMBER, parent_hash: "0x#{SecureRandom.hex(32)}", nonce: 1757392074788233522, timestamp: CkbUtils.time_in_milliseconds(Time.current), transactions_root: "0x#{SecureRandom.hex(32)}", proposals_hash: "0x#{SecureRandom.hex(32)}", extra_hash: "0x#{SecureRandom.hex(32)}", version: 0, epoch: 1, dao: "0x01000000000000000000c16ff286230000a3a65e97fd03000057c138586f0000") inputs = [ CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx1.tx_hash, index: 0)), CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx2.tx_hash, index: 1)), @@ -778,9 +832,12 @@ class DaoEventsTest < ActiveSupport::TestCase CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx4.tx_hash, index: 0)), CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx5.tx_hash, index: 0)) ] - lock1 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") - lock2 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") - lock3 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") + lock1 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") + lock2 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") + lock3 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") dao_type = CKB::Types::Script.new(code_hash: Settings.dao_type_hash, hash_type: "type", args: "0x") outputs = [ CKB::Types::Output.new(capacity: 50000 * 10**8, lock: lock1, type: dao_type), @@ -792,18 +849,26 @@ class DaoEventsTest < ActiveSupport::TestCase CKB::Types::Output.new(capacity: 60000 * 10**8, lock: lock2, type: dao_type), CKB::Types::Output.new(capacity: 40000 * 10**8, lock: lock3) ] - miner_lock = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") + miner_lock = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") cellbase_inputs = [ - CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: "0x0000000000000000000000000000000000000000000000000000000000000000", index: 4294967295), since: 3000) + CKB::Types::Input.new( + previous_output: CKB::Types::OutPoint.new( + tx_hash: "0x0000000000000000000000000000000000000000000000000000000000000000", index: 4294967295 + ), since: 3000 + ) ] cellbase_outputs = [ CKB::Types::Output.new(capacity: 200986682127, lock: miner_lock) ] deposit_block_number = CKB::Utils.bin_to_hex([block1.number].pack("Q<")) transactions = [ - CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [], inputs: cellbase_inputs, outputs: cellbase_outputs, outputs_data: %w[0x], witnesses: ["0x590000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd800000000"]), - CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], inputs: inputs, outputs: outputs, outputs_data: %W[#{deposit_block_number} #{deposit_block_number} 0x], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]), - CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], inputs: inputs1, outputs: outputs1, outputs_data: %W[#{deposit_block_number} #{deposit_block_number} 0x], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]) + CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [], + inputs: cellbase_inputs, outputs: cellbase_outputs, outputs_data: %w[0x], witnesses: ["0x590000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd800000000"]), + CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], + inputs: inputs, outputs: outputs, outputs_data: %W[#{deposit_block_number} #{deposit_block_number} 0x], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]), + CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], + inputs: inputs1, outputs: outputs1, outputs_data: %W[#{deposit_block_number} #{deposit_block_number} 0x], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]) ] node_block = CKB::Types::Block.new(uncles: [], proposals: [], transactions: transactions, header: header) @@ -826,19 +891,27 @@ class DaoEventsTest < ActiveSupport::TestCase input_address1 = create(:address) input_address2 = create(:address) input_address3 = create(:address) - create(:cell_output, ckb_transaction: tx1, block: block1, capacity: 50000 * 10**8, tx_hash: tx1.tx_hash, cell_index: 0, address: input_address1, cell_type: "nervos_dao_deposit") - create(:cell_output, ckb_transaction: tx2, block: block2, capacity: 60000 * 10**8, tx_hash: tx2.tx_hash, cell_index: 1, address: input_address2, cell_type: "nervos_dao_deposit") - create(:cell_output, ckb_transaction: tx3, block: block2, capacity: 70000 * 10**8, tx_hash: tx3.tx_hash, cell_index: 2, address: input_address3) - header = CKB::Types::BlockHeader.new(compact_target: "0x1000", hash: "0x#{SecureRandom.hex(32)}", number: DEFAULT_NODE_BLOCK_NUMBER, parent_hash: "0x#{SecureRandom.hex(32)}", nonce: 1757392074788233522, timestamp: CkbUtils.time_in_milliseconds(Time.current), transactions_root: "0x#{SecureRandom.hex(32)}", proposals_hash: "0x#{SecureRandom.hex(32)}", extra_hash: "0x#{SecureRandom.hex(32)}", version: 0, epoch: 1, dao: "0x01000000000000000000c16ff286230000a3a65e97fd03000057c138586f0000") + create(:cell_output, ckb_transaction: tx1, block: block1, capacity: 50000 * 10**8, tx_hash: tx1.tx_hash, + cell_index: 0, address: input_address1, cell_type: "nervos_dao_deposit") + create(:cell_output, ckb_transaction: tx2, block: block2, capacity: 60000 * 10**8, tx_hash: tx2.tx_hash, + cell_index: 1, address: input_address2, cell_type: "nervos_dao_deposit") + create(:cell_output, ckb_transaction: tx3, block: block2, capacity: 70000 * 10**8, tx_hash: tx3.tx_hash, + cell_index: 2, address: input_address3) + header = CKB::Types::BlockHeader.new(compact_target: "0x1000", hash: "0x#{SecureRandom.hex(32)}", + number: DEFAULT_NODE_BLOCK_NUMBER, parent_hash: "0x#{SecureRandom.hex(32)}", nonce: 1757392074788233522, timestamp: CkbUtils.time_in_milliseconds(Time.current), transactions_root: "0x#{SecureRandom.hex(32)}", proposals_hash: "0x#{SecureRandom.hex(32)}", extra_hash: "0x#{SecureRandom.hex(32)}", version: 0, epoch: 1, dao: "0x01000000000000000000c16ff286230000a3a65e97fd03000057c138586f0000") inputs = [ CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx1.tx_hash, index: 0)), CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx2.tx_hash, index: 1)), CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx3.tx_hash, index: 2)) ] - lock1 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") - lock2 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") - lock3 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") - udt_script = CKB::Types::Script.new(code_hash: Settings.sudt_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(32)}") + lock1 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") + lock2 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") + lock3 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") + udt_script = CKB::Types::Script.new(code_hash: Settings.sudt_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(32)}") Address.create(lock_hash: udt_script.args, address_hash: "0x#{SecureRandom.hex(32)}") dao_type = CKB::Types::Script.new(code_hash: Settings.dao_type_hash, hash_type: "type", args: "0x") outputs = [ @@ -846,17 +919,24 @@ class DaoEventsTest < ActiveSupport::TestCase CKB::Types::Output.new(capacity: 60000 * 10**8, lock: lock2, type: dao_type), CKB::Types::Output.new(capacity: 40000 * 10**8, lock: lock3, type: udt_script) ] - miner_lock = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", args: "0x#{SecureRandom.hex(20)}") + miner_lock = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", + args: "0x#{SecureRandom.hex(20)}") cellbase_inputs = [ - CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: "0x0000000000000000000000000000000000000000000000000000000000000000", index: 4294967295), since: 3000) + CKB::Types::Input.new( + previous_output: CKB::Types::OutPoint.new( + tx_hash: "0x0000000000000000000000000000000000000000000000000000000000000000", index: 4294967295 + ), since: 3000 + ) ] cellbase_outputs = [ CKB::Types::Output.new(capacity: 200986682127, lock: miner_lock) ] deposit_block_number = CKB::Utils.bin_to_hex([block1.number].pack("Q<")) transactions = [ - CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [], inputs: cellbase_inputs, outputs: cellbase_outputs, outputs_data: %w[0x], witnesses: ["0x590000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd800000000"]), - CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], inputs: inputs, outputs: outputs, outputs_data: %W[#{deposit_block_number} #{deposit_block_number} #{CKB::Utils.generate_sudt_amount(1000)}], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]) + CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [], + inputs: cellbase_inputs, outputs: cellbase_outputs, outputs_data: %w[0x], witnesses: ["0x590000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd800000000"]), + CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], + inputs: inputs, outputs: outputs, outputs_data: %W[#{deposit_block_number} #{deposit_block_number} #{CKB::Utils.generate_sudt_amount(1000)}], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]) ] node_block = CKB::Types::Block.new(uncles: [], proposals: [], transactions: transactions, header: header) block = node_data_processor.process_block(node_block) @@ -873,10 +953,12 @@ def node_data_processor end def fake_dao_withdraw_transaction(node_block) - block = create(:block, :with_block_hash) + block = create(:block, :with_block_hash, timestamp: 1557382351075) lock = create(:lock_script) - ckb_transaction1 = create(:ckb_transaction, tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) - ckb_transaction2 = create(:ckb_transaction, tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) + ckb_transaction1 = create(:ckb_transaction, + tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) + ckb_transaction2 = create(:ckb_transaction, + tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) cell_output1 = create(:cell_output, ckb_transaction: ckb_transaction1, cell_index: 1, @@ -896,7 +978,8 @@ def fake_dao_withdraw_transaction(node_block) cell_output2.address.update(balance: 10**8 * 1000) tx = node_block.transactions.last output = tx.outputs.first - output.type = CKB::Types::Script.new(args: "0xb2e61ff569acf041b3c2c17724e2379c581eeac3", hash_type: "type", code_hash: Settings.dao_type_hash) + output.type = CKB::Types::Script.new(args: "0xb2e61ff569acf041b3c2c17724e2379c581eeac3", hash_type: "type", + code_hash: Settings.dao_type_hash) tx.outputs_data[0] = CKB::Utils.bin_to_hex("\x02" * 8) output.capacity = 10**8 * 1000 tx.header_deps = ["0x0b3e980e4e5e59b7d478287e21cd89ffdc3ff5916ee26cf2aa87910c6a504d61"] @@ -905,9 +988,13 @@ def fake_dao_withdraw_transaction(node_block) end def fake_dao_interest_transaction(node_block) - block = create(:block, :with_block_hash) - ckb_transaction1 = create(:ckb_transaction, tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) - ckb_transaction2 = create(:ckb_transaction, tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) + block = create(:block, :with_block_hash, timestamp: 1557382351075) + ckb_transaction1 = create(:ckb_transaction, + tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block) + ckb_transaction2 = create(:ckb_transaction, + tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block) lock = create(:lock_script) cell_output1 = create(:cell_output, ckb_transaction: ckb_transaction1, cell_index: 1, @@ -918,7 +1005,8 @@ def fake_dao_interest_transaction(node_block) data: CKB::Utils.bin_to_hex("\x02" * 8), lock_script_id: lock.id) cell_output2 = create(:cell_output, ckb_transaction: ckb_transaction2, - cell_index: 1, tx_hash: "0x398315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e2", + cell_index: 1, + tx_hash: "0x398315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e2", block: block, consumed_by: ckb_transaction2, status: "dead", @@ -936,36 +1024,67 @@ def fake_dao_interest_transaction(node_block) cell_output3.address.update(balance: 10**8 * 1000) create(:cell_input, block: ckb_transaction2.block, ckb_transaction: ckb_transaction2, - previous_output: { "tx_hash": "0x398315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e2", "index": "1" }) + previous_output: { + "tx_hash": "0x398315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e2", + "index": 1 + }) create(:cell_input, block: ckb_transaction2.block, ckb_transaction: ckb_transaction2, - previous_output: { "tx_hash": "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", "index": "2" }) + previous_output: { + "tx_hash": "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + "index": 2 + }) create(:cell_input, block: ckb_transaction1.block, ckb_transaction: ckb_transaction1, - previous_output: { "tx_hash": "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", "index": "1" }) + previous_output: { + "tx_hash": "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + "index": 1 + }) create(:cell_input, block: ckb_transaction1.block, ckb_transaction: ckb_transaction1, - previous_output: { "tx_hash": "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", "index": "1" }) + previous_output: { + "tx_hash": "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + "index": 1 + }) tx = node_block.transactions.last - tx.header_deps = ["0x0b3e980e4e5e59b7d478287e21cd89ffdc3ff5916ee26cf2aa87910c6a504d61"] - tx.witnesses = %w(0x8ae8061ec879d66c0f3996ab60d7c2a21094b8739817beddaea1e28d3620a70a21497a692581ca352631a67f3f6659a7c47d9a0c6c2def79d3e39440918a66fef 0x4e52933358ae2f26863b8c1c71bf20f17489328820f8f2cd84a070069f10ceef784bc3693c3c51b93475a7b5dbf652ba6532d0580ecc1faf909f9fd53c5f6405000000000000000000) + tx.header_deps = [ + "0x0b3e980e4e5e59b7d478287e21cd89ffdc3ff5916ee26cf2aa87910c6a504d61" + ] + tx.witnesses = %w( + 0x8ae8061ec879d66c0f3996ab60d7c2a21094b8739817beddaea1e28d3620a70a21497a692581ca352631a67f3f6659a7c47d9a0c6c2def79d3e39440918a66fef 0x4e52933358ae2f26863b8c1c71bf20f17489328820f8f2cd84a070069f10ceef784bc3693c3c51b93475a7b5dbf652ba6532d0580ecc1faf909f9fd53c5f6405000000000000000000 + ) ckb_transaction1 end def fake_dao_deposit_transaction(node_block) - block = create(:block, :with_block_hash) + block = create(:block, :with_block_hash, timestamp: 1557382351075) lock = create(:lock_script) - ckb_transaction1 = create(:ckb_transaction, tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) - ckb_transaction2 = create(:ckb_transaction, tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) - cell_output1 = create(:cell_output, ckb_transaction: ckb_transaction1, cell_index: 1, tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, lock_script_id: lock.id) - cell_output2 = create(:cell_output, ckb_transaction: ckb_transaction2, cell_index: 2, tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, lock_script_id: lock.id) + ckb_transaction1 = create(:ckb_transaction, + tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block) + ckb_transaction2 = create(:ckb_transaction, + tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block) + cell_output1 = create(:cell_output, ckb_transaction: ckb_transaction1, + cell_index: 1, + tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block, + capacity: 10**8 * 1000, + lock_script_id: lock.id) + cell_output2 = create(:cell_output, ckb_transaction: ckb_transaction2, + cell_index: 2, + tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block, + capacity: 10**8 * 1000, + lock_script_id: lock.id) cell_output1.address.update(balance: 10**8 * 1000) cell_output2.address.update(balance: 10**8 * 1000) tx = node_block.transactions.first output = tx.outputs.first - output.type = CKB::Types::Script.new(args: "0xb2e61ff569acf041b3c2c17724e2379c581eeac3", hash_type: "type", code_hash: Settings.dao_type_hash) + output.type = CKB::Types::Script.new(args: "0xb2e61ff569acf041b3c2c17724e2379c581eeac3", hash_type: "type", + code_hash: Settings.dao_type_hash) tx.outputs_data[0] = CKB::Utils.bin_to_hex("\x00" * 8) output.capacity = 10**8 * 1000 diff --git a/test/models/ckb_sync/node_data_processor_test.rb b/test/models/ckb_sync/node_data_processor_test.rb index 9c384f703..2ccd85315 100644 --- a/test/models/ckb_sync/node_data_processor_test.rb +++ b/test/models/ckb_sync/node_data_processor_test.rb @@ -144,7 +144,7 @@ class NodeDataProcessorTest < ActiveSupport::TestCase ) ) VCR.use_cassette("blocks/11") do - tx = create(:pool_transaction_entry) + tx = create(:pending_transaction) node_block = CkbSync::Api.instance.get_block_by_number(11) create(:block, :with_block_hash, number: node_block.header.number - 1) node_block.transactions.first.hash = tx.tx_hash @@ -164,7 +164,7 @@ class NodeDataProcessorTest < ActiveSupport::TestCase ) ) VCR.use_cassette("blocks/11") do - tx = create(:pool_transaction_entry) + tx = create(:pending_transaction) node_block = CkbSync::Api.instance.get_block_by_number(11) create(:block, :with_block_hash, number: node_block.header.number - 1) assert_no_changes -> { tx.reload.tx_status } do @@ -441,22 +441,27 @@ class NodeDataProcessorTest < ActiveSupport::TestCase create(:block, :with_block_hash, number: node_block.header.number - 1) node_transactions = node_block.transactions.map(&:to_h).map(&:deep_stringify_keys) node_block_cell_inputs = node_transactions.map { |commit_transaction| - commit_transaction["inputs"].each { |input| - input["previous_output"]["index"] = input["previous_output"]["index"].hex - input["since"] = input["since"].hex - input["previous_output"] = input["previous_output"].sort - }.map(&:sort) + commit_transaction["inputs"].map do |input| + { + "previous_tx_hash" => input["previous_output"]["tx_hash"] == CellOutput::SYSTEM_TX_HASH ? nil : input["previous_output"]["tx_hash"], + "index" => input["previous_output"]["tx_hash"] == CellOutput::SYSTEM_TX_HASH ? 0 : input["previous_output"]["index"].hex, + "since" => input["since"].hex + } + end }.flatten local_block = node_data_processor.process_block(node_block) local_block_transactions = local_block.ckb_transactions local_block_cell_inputs = local_block_transactions.map { |commit_transaction| commit_transaction.cell_inputs.map do |cell_input| - cell_input.previous_output = cell_input.previous_output.sort - cell_input.attributes.select { |attribute| attribute.in?(%(previous_output since)) }.sort + { + "previous_tx_hash" => cell_input.previous_tx_hash, + "index" => cell_input.previous_index, + "since" => cell_input.since + } end }.flatten - + # binding.pry assert_equal node_block_cell_inputs, local_block_cell_inputs end end @@ -737,7 +742,7 @@ class NodeDataProcessorTest < ActiveSupport::TestCase test "should create forked event when block is invalid " do node_block = fake_node_block - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do fake_dao_deposit_transaction(node_block) node_data_processor.process_block(node_block) @@ -755,7 +760,7 @@ class NodeDataProcessorTest < ActiveSupport::TestCase test "should revert dao contract total depositors count when block is invalid and there is dao cell" do CkbSync::Api.any_instance.stubs(:calculate_dao_maximum_withdraw).returns("0x2faf0be8") node_block = fake_node_block - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) VCR.use_cassette("blocks/#{DEFAULT_NODE_BLOCK_NUMBER}") do fake_dao_deposit_transaction(node_block) node_data_processor.process_block(node_block) @@ -933,6 +938,10 @@ class NodeDataProcessorTest < ActiveSupport::TestCase new_local_block = node_data_processor.call assert_equal origin_live_cells_count + 1, new_local_block.contained_addresses.sum(:live_cells_count) + + address = new_local_block.contained_addresses.first + snapshot = AddressBlockSnapshot.find_by(block_id: new_local_block.id, address_id: address.id) + assert_equal snapshot.final_state["live_cells_count"], address.live_cells_count end end @@ -960,6 +969,10 @@ class NodeDataProcessorTest < ActiveSupport::TestCase assert_equal origin_balance + new_local_block.cell_outputs.sum(:capacity), new_local_block.contained_addresses.sum(:balance) + + address = new_local_block.contained_addresses.first + snapshot = AddressBlockSnapshot.find_by(block_id: new_local_block.id, address_id: address.id) + assert_equal snapshot.final_state["balance"], address.balance end end @@ -1947,7 +1960,7 @@ class NodeDataProcessorTest < ActiveSupport::TestCase CkbSync::Api.any_instance.stubs(:get_tip_block_number).returns(22) VCR.use_cassette("blocks/21") do node_block = CkbSync::Api.instance.get_block_by_number(21) - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) node_output = node_block.transactions.first.outputs.first node_output.type = CKB::Types::Script.new(code_hash: Settings.sudt_cell_type_hash, @@ -1977,7 +1990,7 @@ class NodeDataProcessorTest < ActiveSupport::TestCase CkbSync::Api.any_instance.stubs(:get_tip_block_number).returns(22) VCR.use_cassette("blocks/21") do node_block = CkbSync::Api.instance.get_block_by_number(21) - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) node_output = node_block.transactions.first.outputs.first node_output.type = CKB::Types::Script.new(code_hash: CkbSync::Api.instance.token_script_code_hash, @@ -2004,7 +2017,7 @@ class NodeDataProcessorTest < ActiveSupport::TestCase CkbSync::Api.any_instance.stubs(:get_tip_block_number).returns(22) VCR.use_cassette("blocks/21") do node_block = CkbSync::Api.instance.get_block_by_number(21) - create(:block, :with_block_hash, number: node_block.header.number - 1) + create(:block, :with_block_hash, number: node_block.header.number - 1, timestamp: 1557282351075) node_output = node_block.transactions.first.outputs.first new_node_output = node_output.dup @@ -2587,94 +2600,218 @@ class NodeDataProcessorTest < ActiveSupport::TestCase block1 = create(:block, :with_block_hash, number: DEFAULT_NODE_BLOCK_NUMBER - 2) tx1 = create(:ckb_transaction, block: block1) block2 = create(:block, :with_block_hash, number: DEFAULT_NODE_BLOCK_NUMBER - 1) - deposit_block = create(:block, :with_block_hash, number: DEFAULT_NODE_BLOCK_NUMBER - 5, - dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") + deposit_block = create(:block, :with_block_hash, + number: DEFAULT_NODE_BLOCK_NUMBER - 5, + dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100") deposit_tx = create(:ckb_transaction, block: deposit_block) - deposit_block1 = create(:block, :with_block_hash, number: DEFAULT_NODE_BLOCK_NUMBER - 6, - dao: "0x185369bb078607007224be7987882300517774e04400000000e3bad4847a0100") + deposit_block1 = create(:block, :with_block_hash, + number: DEFAULT_NODE_BLOCK_NUMBER - 6, + dao: "0x185369bb078607007224be7987882300517774e04400000000e3bad4847a0100") deposit_tx1 = create(:ckb_transaction, block: deposit_block1) tx2 = create(:ckb_transaction, block: block2) tx3 = create(:ckb_transaction, block: block2) tx4 = create(:ckb_transaction, block: block2) tx5 = create(:ckb_transaction, block: block2) - lock1 = create(:lock_script, args: "0x#{SecureRandom.hex(20)}", code_hash: Settings.secp_cell_type_hash, + lock1 = create(:lock_script, args: "0x#{SecureRandom.hex(20)}", + code_hash: Settings.secp_cell_type_hash, hash_type: "type") - lock2 = create(:lock_script, args: "0x#{SecureRandom.hex(20)}", code_hash: Settings.secp_cell_type_hash, + lock2 = create(:lock_script, args: "0x#{SecureRandom.hex(20)}", + code_hash: Settings.secp_cell_type_hash, hash_type: "type") - lock3 = create(:lock_script, args: "0x#{SecureRandom.hex(20)}", code_hash: Settings.secp_cell_type_hash, + lock3 = create(:lock_script, args: "0x#{SecureRandom.hex(20)}", + code_hash: Settings.secp_cell_type_hash, hash_type: "type") - lock4 = create(:lock_script, args: "0x#{SecureRandom.hex(20)}", code_hash: Settings.secp_cell_type_hash, + lock4 = create(:lock_script, args: "0x#{SecureRandom.hex(20)}", + code_hash: Settings.secp_cell_type_hash, hash_type: "type") - lock5 = create(:lock_script, args: "0x#{SecureRandom.hex(20)}", code_hash: Settings.secp_cell_type_hash, + lock5 = create(:lock_script, args: "0x#{SecureRandom.hex(20)}", + code_hash: Settings.secp_cell_type_hash, hash_type: "type") input_address1 = create(:address, lock_script_id: lock1.id) input_address2 = create(:address, lock_script_id: lock2.id) input_address3 = create(:address, lock_script_id: lock3.id) input_address4 = create(:address, lock_script_id: lock4.id) input_address5 = create(:address, lock_script_id: lock5.id) - udt_script = CKB::Types::Script.new(code_hash: Settings.sudt_cell_type_hash, hash_type: "type", - args: "0x#{SecureRandom.hex(32)}") - udt_script1 = CKB::Types::Script.new(code_hash: Settings.sudt_cell_type_hash, hash_type: "type", - args: "0x#{SecureRandom.hex(32)}") - create(:udt, code_hash: Settings.sudt_cell_type_hash, type_hash: udt_script.compute_hash) - create(:udt, code_hash: Settings.sudt_cell_type_hash, type_hash: udt_script1.compute_hash) + udt_script = CKB::Types::Script.new( + code_hash: Settings.sudt_cell_type_hash, + hash_type: "type", + args: "0x#{SecureRandom.hex(32)}" + ) + udt_script1 = CKB::Types::Script.new( + code_hash: Settings.sudt_cell_type_hash, + hash_type: "type", + args: "0x#{SecureRandom.hex(32)}" + ) + create(:udt, code_hash: Settings.sudt_cell_type_hash, + type_hash: udt_script.compute_hash) + create(:udt, code_hash: Settings.sudt_cell_type_hash, + type_hash: udt_script1.compute_hash) # nervos_dao_deposit cells belongs to input_address1 - create(:cell_output, ckb_transaction: deposit_tx, block: deposit_block, - capacity: 50000 * 10**8, occupied_capacity: 61 * 10**8, tx_hash: deposit_tx.tx_hash, cell_index: 0, address: input_address1, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100", lock_script_id: lock1.id) - create(:cell_output, ckb_transaction: deposit_tx, block: deposit_block, - capacity: 40000 * 10**8, occupied_capacity: 61 * 10**8, tx_hash: deposit_tx.tx_hash, cell_index: 1, address: input_address1, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100", lock_script_id: lock1.id) - create(:cell_output, ckb_transaction: deposit_tx, block: deposit_block, - capacity: 40000 * 10**8, occupied_capacity: 61 * 10**8, tx_hash: deposit_tx.tx_hash, cell_index: 2, address: input_address1, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100", lock_script_id: lock1.id) + create(:cell_output, ckb_transaction: deposit_tx, + block: deposit_block, + capacity: 50000 * 10**8, + occupied_capacity: 61 * 10**8, + tx_hash: deposit_tx.tx_hash, + cell_index: 0, + address: input_address1, + cell_type: "nervos_dao_deposit", + dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100", lock_script_id: lock1.id) + create(:cell_output, ckb_transaction: deposit_tx, + block: deposit_block, + capacity: 40000 * 10**8, + occupied_capacity: 61 * 10**8, + tx_hash: deposit_tx.tx_hash, + cell_index: 1, + address: input_address1, + cell_type: "nervos_dao_deposit", + dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100", lock_script_id: lock1.id) + create(:cell_output, ckb_transaction: deposit_tx, + block: deposit_block, + capacity: 40000 * 10**8, + occupied_capacity: 61 * 10**8, + tx_hash: deposit_tx.tx_hash, + cell_index: 2, + address: input_address1, + cell_type: "nervos_dao_deposit", + dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100", lock_script_id: lock1.id) # nervos_dao_withdrawing inputs - create(:cell_input, ckb_transaction: tx1, block: block1, - previous_output: { tx_hash: deposit_tx.tx_hash, index: 0 }) - create(:cell_input, ckb_transaction: tx2, block: block2, - previous_output: { tx_hash: deposit_tx.tx_hash, index: 1 }) - create(:cell_input, ckb_transaction: tx2, block: block2, - previous_output: { tx_hash: deposit_tx.tx_hash, index: 2 }) + create(:cell_input, ckb_transaction: tx1, + block: block1, + previous_output: { + tx_hash: deposit_tx.tx_hash, + index: 0 + }) + create(:cell_input, ckb_transaction: tx2, + block: block2, + previous_output: { + tx_hash: deposit_tx.tx_hash, + index: 1 + }) + create(:cell_input, ckb_transaction: tx2, + block: block2, + previous_output: { + tx_hash: deposit_tx.tx_hash, + index: 2 + }) # nervos_dao_withdrawing cells - create(:cell_output, ckb_transaction: tx1, block: block1, capacity: 50000 * 10**8, - tx_hash: tx1.tx_hash, cell_index: 0, address: input_address1, cell_type: "nervos_dao_withdrawing", dao: "0x28ef3c7ff3860700d88b1a61958923008ae424cd7200000000e3bad4847a0100", lock_script_id: lock1.id, occupied_capacity: 61 * 10**8) - create(:cell_output, ckb_transaction: tx2, block: block2, capacity: 60000 * 10**8, - tx_hash: tx2.tx_hash, cell_index: 1, address: input_address2, cell_type: "nervos_dao_withdrawing", dao: "0x2cd631702e870700b3df08d7d889230036f787487e00000000e3bad4847a0100", lock_script_id: lock2.id, occupied_capacity: 61 * 10**8) + create(:cell_output, ckb_transaction: tx1, + block: block1, + capacity: 50000 * 10**8, + tx_hash: tx1.tx_hash, + cell_index: 0, + address: input_address1, + cell_type: "nervos_dao_withdrawing", + dao: "0x28ef3c7ff3860700d88b1a61958923008ae424cd7200000000e3bad4847a0100", lock_script_id: lock1.id, occupied_capacity: 61 * 10**8) + create(:cell_output, ckb_transaction: tx2, + block: block2, + capacity: 60000 * 10**8, + tx_hash: tx2.tx_hash, + cell_index: 1, + address: input_address2, + cell_type: "nervos_dao_withdrawing", + dao: "0x2cd631702e870700b3df08d7d889230036f787487e00000000e3bad4847a0100", lock_script_id: lock2.id, occupied_capacity: 61 * 10**8) # udt cell - create(:cell_output, ckb_transaction: tx3, block: block2, capacity: 70000 * 10**8, - tx_hash: tx3.tx_hash, cell_index: 2, address: input_address3, cell_type: "udt", lock_script_id: lock3.id, type_hash: udt_script.compute_hash) + create(:cell_output, ckb_transaction: tx3, + block: block2, + capacity: 70000 * 10**8, + tx_hash: tx3.tx_hash, + cell_index: 2, + address: input_address3, + cell_type: "udt", + lock_script_id: lock3.id, + type_hash: udt_script.compute_hash) # nervos_dao_deposit cells - create(:cell_output, ckb_transaction: deposit_tx1, block: deposit_block1, - capacity: 50000 * 10**8, occupied_capacity: 61 * 10**8, tx_hash: deposit_tx1.tx_hash, cell_index: 0, address: input_address4, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100", lock_script_id: lock4.id) - create(:cell_output, ckb_transaction: deposit_tx1, block: deposit_block1, - capacity: 40000 * 10**8, occupied_capacity: 61 * 10**8, tx_hash: deposit_tx1.tx_hash, cell_index: 1, address: input_address5, cell_type: "nervos_dao_deposit", dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100", lock_script_id: lock5.id) + create(:cell_output, ckb_transaction: deposit_tx1, + block: deposit_block1, + capacity: 50000 * 10**8, + occupied_capacity: 61 * 10**8, + tx_hash: deposit_tx1.tx_hash, + cell_index: 0, + address: input_address4, + cell_type: "nervos_dao_deposit", + dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100", lock_script_id: lock4.id) + create(:cell_output, ckb_transaction: deposit_tx1, + block: deposit_block1, + capacity: 40000 * 10**8, + occupied_capacity: 61 * 10**8, + tx_hash: deposit_tx1.tx_hash, + cell_index: 1, + address: input_address5, + cell_type: "nervos_dao_deposit", + dao: "0x1c3a5eac4286070025e0edf5ca8823001c957f5b5000000000e3bad4847a0100", lock_script_id: lock5.id) # nervos_dao_withdrawing inputs - create(:cell_input, ckb_transaction: tx4, block: block2, - previous_output: { tx_hash: deposit_tx1.tx_hash, index: 0 }) - create(:cell_input, ckb_transaction: tx5, block: block2, - previous_output: { tx_hash: deposit_tx1.tx_hash, index: 1 }) + create(:cell_input, ckb_transaction: tx4, + block: block2, + previous_output: { + tx_hash: deposit_tx1.tx_hash, + index: 0 + }) + create(:cell_input, ckb_transaction: tx5, + block: block2, + previous_output: { + tx_hash: deposit_tx1.tx_hash, + index: 1 + }) # nervos_dao_withdrawing cell - create(:cell_output, ckb_transaction: tx4, block: block1, capacity: 100000 * 10**8, - tx_hash: tx4.tx_hash, cell_index: 0, address: input_address4, cell_type: "nervos_dao_withdrawing", dao: "0x28ef3c7ff3860700d88b1a61958923008ae424cd7200000000e3bad4847a0100", lock_script_id: lock4.id, occupied_capacity: 61 * 10**8) + create(:cell_output, ckb_transaction: tx4, + block: block1, + capacity: 100000 * 10**8, + tx_hash: tx4.tx_hash, + cell_index: 0, + address: input_address4, + cell_type: "nervos_dao_withdrawing", + dao: "0x28ef3c7ff3860700d88b1a61958923008ae424cd7200000000e3bad4847a0100", lock_script_id: lock4.id, occupied_capacity: 61 * 10**8) # udt cell - create(:cell_output, ckb_transaction: tx5, block: block2, capacity: 60000 * 10**8, - tx_hash: tx5.tx_hash, cell_index: 0, address: input_address5, cell_type: "udt", lock_script_id: lock5.id, type_hash: udt_script.compute_hash) - - create(:type_script, args: udt_script.args, code_hash: Settings.sudt_cell_type_hash, hash_type: "data") - create(:type_script, args: udt_script.args, code_hash: Settings.sudt_cell_type_hash, hash_type: "data") - create(:type_script, args: udt_script.args, code_hash: Settings.sudt_cell_type_hash, hash_type: "data") - create(:type_script, args: udt_script1.args, code_hash: Settings.sudt_cell_type_hash, hash_type: "data") - create(:type_script, args: udt_script1.args, code_hash: Settings.sudt_cell_type_hash, hash_type: "data") + create(:cell_output, ckb_transaction: tx5, + block: block2, + capacity: 60000 * 10**8, + tx_hash: tx5.tx_hash, + cell_index: 0, + address: input_address5, + cell_type: "udt", + lock_script_id: lock5.id, + type_hash: udt_script.compute_hash) + + create(:type_script, args: udt_script.args, + code_hash: Settings.sudt_cell_type_hash, + hash_type: "data") + create(:type_script, args: udt_script.args, + code_hash: Settings.sudt_cell_type_hash, + hash_type: "data") + create(:type_script, args: udt_script.args, + code_hash: Settings.sudt_cell_type_hash, + hash_type: "data") + create(:type_script, args: udt_script1.args, + code_hash: Settings.sudt_cell_type_hash, + hash_type: "data") + create(:type_script, args: udt_script1.args, + code_hash: Settings.sudt_cell_type_hash, + hash_type: "data") Address.create(lock_hash: udt_script.args, address_hash: "0x#{SecureRandom.hex(32)}") Address.create(lock_hash: udt_script1.args, address_hash: "0x#{SecureRandom.hex(32)}") - header = CKB::Types::BlockHeader.new(compact_target: "0x1000", hash: "0x#{SecureRandom.hex(32)}", - number: DEFAULT_NODE_BLOCK_NUMBER, parent_hash: "0x#{SecureRandom.hex(32)}", nonce: 1757392074788233522, timestamp: CkbUtils.time_in_milliseconds(Time.current), transactions_root: "0x#{SecureRandom.hex(32)}", proposals_hash: "0x#{SecureRandom.hex(32)}", extra_hash: "0x#{SecureRandom.hex(32)}", version: 0, epoch: 1, dao: "0x01000000000000000000c16ff286230000a3a65e97fd03000057c138586f0000") + header = CKB::Types::BlockHeader.new( + compact_target: "0x1000", + hash: "0x#{SecureRandom.hex(32)}", + number: DEFAULT_NODE_BLOCK_NUMBER, + parent_hash: "0x#{SecureRandom.hex(32)}", + nonce: 1757392074788233522, + timestamp: CkbUtils.time_in_milliseconds(Time.current), + transactions_root: "0x#{SecureRandom.hex(32)}", + proposals_hash: "0x#{SecureRandom.hex(32)}", + extra_hash: "0x#{SecureRandom.hex(32)}", + version: 0, + epoch: 1, + dao: "0x01000000000000000000c16ff286230000a3a65e97fd03000057c138586f0000" + ) inputs = [ CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx1.tx_hash, index: 0)), # nervos_dao_withdrawing cell CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx2.tx_hash, index: 1)), # nervos_dao_withdrawing cell @@ -2684,12 +2821,21 @@ class NodeDataProcessorTest < ActiveSupport::TestCase CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx4.tx_hash, index: 0)), # nervos_dao_withdrawing cell CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx5.tx_hash, index: 0)) # udt cell ] - lock1 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", - args: "0x#{SecureRandom.hex(20)}") - lock2 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", - args: "0x#{SecureRandom.hex(20)}") - lock3 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", - args: "0x#{SecureRandom.hex(20)}") + lock1 = CKB::Types::Script.new( + code_hash: Settings.secp_cell_type_hash, + hash_type: "type", + args: "0x#{SecureRandom.hex(20)}" + ) + lock2 = CKB::Types::Script.new( + code_hash: Settings.secp_cell_type_hash, + hash_type: "type", + args: "0x#{SecureRandom.hex(20)}" + ) + lock3 = CKB::Types::Script.new( + code_hash: Settings.secp_cell_type_hash, + hash_type: "type", + args: "0x#{SecureRandom.hex(20)}" + ) outputs = [ CKB::Types::Output.new(capacity: 50000 * 10**8, lock: lock1), @@ -2701,8 +2847,11 @@ class NodeDataProcessorTest < ActiveSupport::TestCase CKB::Types::Output.new(capacity: 60000 * 10**8, lock: lock2), CKB::Types::Output.new(capacity: 40000 * 10**8, lock: lock3) ] - miner_lock = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", - args: "0x#{SecureRandom.hex(20)}") + miner_lock = CKB::Types::Script.new( + code_hash: Settings.secp_cell_type_hash, + hash_type: "type", + args: "0x#{SecureRandom.hex(20)}" + ) cellbase_inputs = [ CKB::Types::Input.new( previous_output: CKB::Types::OutPoint.new( @@ -2714,14 +2863,47 @@ class NodeDataProcessorTest < ActiveSupport::TestCase CKB::Types::Output.new(capacity: 200986682127, lock: miner_lock) ] transactions = [ - CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [], - inputs: cellbase_inputs, outputs: cellbase_outputs, outputs_data: %w[0x], witnesses: ["0x590000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd800000000"]), - CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], - inputs: inputs, outputs: outputs, outputs_data: %w[0x 0x 0x], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]), - CKB::Types::Transaction.new(hash: "0x#{SecureRandom.hex(32)}", cell_deps: [], header_deps: [block1.block_hash], - inputs: inputs1, outputs: outputs1, outputs_data: %w[0x 0x 0x], witnesses: ["0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000"]) + CKB::Types::Transaction.new( + hash: "0x#{SecureRandom.hex(32)}", + cell_deps: [], + header_deps: [], + inputs: cellbase_inputs, + outputs: cellbase_outputs, + outputs_data: %w[0x], + witnesses: [ + "0x590000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd800000000" + ] + ), + CKB::Types::Transaction.new( + hash: "0x#{SecureRandom.hex(32)}", + cell_deps: [], + header_deps: [block1.block_hash], + inputs: inputs, + outputs: outputs, + outputs_data: %w[0x 0x 0x], + witnesses: [ + "0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000" + ] + ), + CKB::Types::Transaction.new( + hash: "0x#{SecureRandom.hex(32)}", + cell_deps: [], + header_deps: [block1.block_hash], + inputs: inputs1, + outputs: outputs1, + outputs_data: %w[0x 0x 0x], + witnesses: [ + "0x5d0000000c00000055000000490000001000000030000000310000009bd7e06f3ecf4be0f2fcd2188b23f1b9fcc88e5d4b65a8637b17723bbda3cce801140000003954acece65096bfa81258983ddb83915fc56bd804000000123456780000000000000000" + ] + ) ] - node_block = CKB::Types::Block.new(uncles: [], proposals: [], transactions: transactions, header: header) + node_block = CKB::Types::Block.new( + uncles: [], + proposals: [], + transactions: transactions, + header: header + ) + block = node_data_processor.process_block(node_block) tx = block.ckb_transactions.where(is_cellbase: false).first @@ -3267,10 +3449,20 @@ class NodeDataProcessorTest < ActiveSupport::TestCase ckb_transaction2 = create(:ckb_transaction, tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) lock = create(:lock_script) - create(:cell_output, ckb_transaction: ckb_transaction1, cell_index: 1, - tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address, lock_script_id: lock.id) - create(:cell_output, ckb_transaction: ckb_transaction2, cell_index: 2, - tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address, lock_script_id: lock.id) + create(:cell_output, ckb_transaction: ckb_transaction1, + cell_index: 1, + tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block, + capacity: 10**8 * 1000, + address: address, + lock_script_id: lock.id) + create(:cell_output, ckb_transaction: ckb_transaction2, + cell_index: 2, + tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block, + capacity: 10**8 * 1000, + address: address, + lock_script_id: lock.id) tx1 = node_block.transactions.first output1 = tx1.outputs.first output1.type = CKB::Types::Script.new(args: "0xb2e61ff569acf041b3c2c17724e2379c581eeac3", hash_type: "type", @@ -3289,18 +3481,33 @@ class NodeDataProcessorTest < ActiveSupport::TestCase address = create(:address) block = create(:block, :with_block_hash) ckb_transaction1 = create(:ckb_transaction, - tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) + tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block) ckb_transaction2 = create(:ckb_transaction, - tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) + tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block) lock = create(:lock_script) - create(:cell_output, ckb_transaction: ckb_transaction1, cell_index: 1, - tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address, lock_script_id: lock.id) - create(:cell_output, ckb_transaction: ckb_transaction2, cell_index: 2, - tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block, capacity: 10**8 * 1000, address: address, lock_script_id: lock.id) + create(:cell_output, ckb_transaction: ckb_transaction1, + cell_index: 1, + tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block, + capacity: 10**8 * 1000, + address: address, + lock_script_id: lock.id) + create(:cell_output, ckb_transaction: ckb_transaction2, + cell_index: 2, + tx_hash: "0x598315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", + block: block, + capacity: 10**8 * 1000, + address: address, + lock_script_id: lock.id) tx1 = node_block.transactions.first output1 = tx1.outputs.first - output1.type = CKB::Types::Script.new(args: "0xb2e61ff569acf041b3c2c17724e2379c581eeac3", hash_type: "type", - code_hash: Settings.dao_type_hash) + output1.type = CKB::Types::Script.new( + args: "0xb2e61ff569acf041b3c2c17724e2379c581eeac3", + hash_type: "type", + code_hash: Settings.dao_type_hash + ) output1.capacity = 10**8 * 1000 tx1.outputs << output1 tx1.outputs_data << CKB::Utils.bin_to_hex("\x00" * 8) @@ -3308,31 +3515,61 @@ class NodeDataProcessorTest < ActiveSupport::TestCase test "should update nrc factory cell info" do old_factory_cell = create(:nrc_factory_cell, verified: true) - factory_cell_script = CKB::Types::Script.new(code_hash: old_factory_cell.code_hash, hash_type: "type", - args: old_factory_cell.args) - type_script1 = create(:type_script, args: factory_cell_script.args, code_hash: factory_cell_script.code_hash, + factory_cell_script = CKB::Types::Script.new( + code_hash: old_factory_cell.code_hash, + hash_type: "type", + args: old_factory_cell.args + ) + type_script1 = create(:type_script, args: factory_cell_script.args, + code_hash: factory_cell_script.code_hash, hash_type: "type") block1 = create(:block, :with_block_hash, number: DEFAULT_NODE_BLOCK_NUMBER - 1) tx1 = create(:ckb_transaction, block: block1) input_address1 = create(:address) - address1_lock = create(:lock_script, address_id: input_address1.id, args: "0x#{SecureRandom.hex(20)}", - code_hash: Settings.secp_cell_type_hash, hash_type: "type") - output1 = create(:cell_output, ckb_transaction: tx1, block: block1, capacity: 50000 * 10**8, - tx_hash: tx1.tx_hash, cell_index: 0, address: input_address1, cell_type: "nrc_721_factory", lock_script_id: address1_lock.id, type_script_id: type_script1.id) + address1_lock = create(:lock_script, address_id: input_address1.id, + args: "0x#{SecureRandom.hex(20)}", + code_hash: Settings.secp_cell_type_hash, + hash_type: "type") + output1 = create(:cell_output, ckb_transaction: tx1, + block: block1, capacity: 50000 * 10**8, + tx_hash: tx1.tx_hash, + cell_index: 0, + address: input_address1, + cell_type: "nrc_721_factory", + lock_script_id: address1_lock.id, + type_script_id: type_script1.id) output1.update(type_hash: CKB::Types::Script.new(**output1.type_script.to_node).compute_hash) - lock1 = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", - args: address1_lock.args) + lock1 = CKB::Types::Script.new( + code_hash: Settings.secp_cell_type_hash, + hash_type: "type", + args: address1_lock.args + ) - header = CKB::Types::BlockHeader.new(compact_target: "0x1000", hash: "0x#{SecureRandom.hex(32)}", - number: DEFAULT_NODE_BLOCK_NUMBER, parent_hash: "0x#{SecureRandom.hex(32)}", nonce: 1757392074788233522, timestamp: CkbUtils.time_in_milliseconds(Time.current), transactions_root: "0x#{SecureRandom.hex(32)}", proposals_hash: "0x#{SecureRandom.hex(32)}", extra_hash: "0x#{SecureRandom.hex(32)}", version: 0, epoch: 1, dao: "0x01000000000000000000c16ff286230000a3a65e97fd03000057c138586f0000") + header = CKB::Types::BlockHeader.new( + compact_target: "0x1000", + hash: "0x#{SecureRandom.hex(32)}", + number: DEFAULT_NODE_BLOCK_NUMBER, + parent_hash: "0x#{SecureRandom.hex(32)}", + nonce: 1757392074788233522, + timestamp: CkbUtils.time_in_milliseconds(Time.current), + transactions_root: "0x#{SecureRandom.hex(32)}", + proposals_hash: "0x#{SecureRandom.hex(32)}", + extra_hash: "0x#{SecureRandom.hex(32)}", + version: 0, + epoch: 1, + dao: "0x01000000000000000000c16ff286230000a3a65e97fd03000057c138586f0000" + ) inputs = [ CKB::Types::Input.new(previous_output: CKB::Types::OutPoint.new(tx_hash: tx1.tx_hash, index: 0)) ] outputs = [ CKB::Types::Output.new(capacity: 40000 * 10**8, lock: lock1, type: factory_cell_script) ] - miner_lock = CKB::Types::Script.new(code_hash: Settings.secp_cell_type_hash, hash_type: "type", - args: "0x#{SecureRandom.hex(20)}") + miner_lock = CKB::Types::Script.new( + code_hash: Settings.secp_cell_type_hash, + hash_type: "type", + args: "0x#{SecureRandom.hex(20)}" + ) cellbase_inputs = [ CKB::Types::Input.new( previous_output: CKB::Types::OutPoint.new( @@ -3361,7 +3598,7 @@ def node_data_processor end def fake_dao_withdraw_transaction(node_block) - block = create(:block, :with_block_hash) + block = create(:block, :with_block_hash, timestamp: 1557382351075) ckb_transaction1 = create(:ckb_transaction, tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) ckb_transaction2 = create(:ckb_transaction, @@ -3395,7 +3632,7 @@ def fake_dao_withdraw_transaction(node_block) end def fake_dao_deposit_transaction(node_block) - block = create(:block, :with_block_hash) + block = create(:block, :with_block_hash, timestamp: 1557382351075) lock = create(:lock_script) ckb_transaction1 = create(:ckb_transaction, tx_hash: "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3", block: block) diff --git a/test/models/ckb_transaction_test.rb b/test/models/ckb_transaction_test.rb index a57b437f4..384fc95d0 100644 --- a/test/models/ckb_transaction_test.rb +++ b/test/models/ckb_transaction_test.rb @@ -16,8 +16,7 @@ class CkbTransactionTest < ActiveSupport::TestCase # should belong_to(:block, required: false) should have_many(:account_books) should have_many(:referring_cells) - should have_many(:addresses). - through(:account_books) + should have_many(:addresses).through(:account_books) should have_many(:cell_inputs) should have_many(:cell_outputs) should have_many(:script_transactions) @@ -131,7 +130,7 @@ class CkbTransactionTest < ActiveSupport::TestCase :with_multiple_inputs_and_outputs) expected_tx_hashes = ckb_transaction.cell_inputs.map(&:previous_cell_output).map(&:ckb_transaction).map(&:tx_hash).sort - assert_equal expected_tx_hashes, + assert_equal expected_tx_hashes.map { |t| t.nil? ? "" : t }, ckb_transaction.display_inputs.pluck(:generated_tx_hash).sort end @@ -249,10 +248,9 @@ class CkbTransactionTest < ActiveSupport::TestCase tx_hash: "0xf9aca16b49c7d037920ad9e5aecdac272412a5fbe0396f7d95b112bf790dd39f") create(:cell_input, block: phase1_transaction.block, ckb_transaction: phase1_transaction, - previous_output: { - index: 0, - tx_hash: "0xe8a116ec65f7d2d0d4748ba2bbcf8691cbd31202908ccfa3a975414fef801042" - }, + index: 0, + previous_tx_hash: "0xe8a116ec65f7d2d0d4748ba2bbcf8691cbd31202908ccfa3a975414fef801042", + previous_index: 0, from_cell_base: false) nervos_dao_withdrawing_cell = create(:cell_output, ckb_transaction: phase1_transaction, block: phase1_transaction.block, @@ -309,7 +307,7 @@ class CkbTransactionTest < ActiveSupport::TestCase expected_display_output = CkbUtils.hash_value_to_s(id: dao_output.id, capacity: dao_output.capacity, address_hash: dao_output.address_hash, status: dao_output.status, consumed_tx_hash: consumed_tx_hash, cell_type: dao_output.cell_type).sort display_outputs = ckb_transaction.display_outputs - assert_equal expected_attributes, display_outputs.first.keys.sort + assert_equal expected_attributes - display_outputs.first.keys, [] assert_equal expected_display_output, display_outputs.first.sort end @@ -331,11 +329,10 @@ class CkbTransactionTest < ActiveSupport::TestCase create(:udt, code_hash: type_script.code_hash) cell_input = ckb_transaction.cell_inputs.first - cell_input.update(previous_output: { - "tx_hash": udt_input_transaction.tx_hash, "index": "0" }) + cell_input.update(previous_tx_hash: udt_input_transaction.tx_hash, index: 0) expected_attributes = %i( id from_cellbase capacity address_hash - generated_tx_hash udt_info cell_index cell_type since extra_info + generated_tx_hash cell_index cell_type since extra_info ).sort expected_udt_attributes = %i( symbol amount decimal type_hash published @@ -349,15 +346,16 @@ class CkbTransactionTest < ActiveSupport::TestCase generated_tx_hash: udt_cell_output.ckb_transaction.tx_hash, cell_index: udt_cell_output.cell_index, cell_type: udt_cell_output.cell_type, - udt_info: udt_cell_output.udt_info, since: { raw: "0x0000000000000000", median_timestamp: "0" }, extra_info: udt_cell_output.udt_info ) display_inputs = ckb_transaction.display_inputs - assert_equal expected_attributes, display_inputs.first.keys.sort + o = display_inputs.first + assert_equal expected_attributes - o.keys, [] assert_equal expected_udt_attributes, - display_inputs.first[:udt_info].keys.sort - assert_equal expected_display_input, display_inputs.first + o[:extra_info].keys.sort + o.delete(:udt_info) + assert_equal expected_display_input, o end test "#display_outputs should contain udt attributes for udt transaction" do @@ -375,7 +373,7 @@ class CkbTransactionTest < ActiveSupport::TestCase expected_attributes = %i( id capacity address_hash status consumed_tx_hash - cell_type udt_info extra_info + cell_type extra_info ).sort expected_udt_attributes = %i( symbol amount decimal type_hash published @@ -388,16 +386,14 @@ class CkbTransactionTest < ActiveSupport::TestCase status: udt_cell_output.status, consumed_tx_hash: nil, cell_type: udt_cell_output.cell_type, - udt_info: udt_cell_output.udt_info, extra_info: udt_cell_output.udt_info ) - - assert_equal expected_attributes, - udt_output_transaction.display_outputs.first.keys.sort + o = udt_output_transaction.display_outputs.first + assert_equal expected_attributes - o.keys, [] assert_equal expected_udt_attributes, - udt_output_transaction.display_outputs.first[:udt_info].keys.sort - assert_equal expected_display_input, - udt_output_transaction.display_outputs.first + o[:extra_info].keys.sort + o.delete(:udt_info) + assert_equal expected_display_input, o end test "#display_inputs should contain m_nft_issuer info for m_nft_issuer transaction" do @@ -416,8 +412,7 @@ class CkbTransactionTest < ActiveSupport::TestCase type_hash: "0x") cell_input = ckb_transaction.cell_inputs.first - cell_input.update(previous_output: { - "tx_hash": m_nft_input_transaction.tx_hash, "index": "0" }) + cell_input.update(previous_tx_hash: m_nft_input_transaction.tx_hash, index: 0) expected_attributes = %i( id from_cellbase capacity address_hash generated_tx_hash m_nft_info cell_index cell_type since extra_info @@ -431,15 +426,16 @@ class CkbTransactionTest < ActiveSupport::TestCase generated_tx_hash: m_nft_cell_output.ckb_transaction.tx_hash, cell_index: m_nft_cell_output.cell_index, cell_type: m_nft_cell_output.cell_type, - m_nft_info: m_nft_cell_output.m_nft_info, since: { raw: "0x0000000000000000", median_timestamp: "0" }, extra_info: m_nft_cell_output.m_nft_info ) display_inputs = ckb_transaction.display_inputs - assert_equal expected_attributes, display_inputs.first.keys.sort + o = display_inputs.first + assert_equal expected_attributes - o.keys, [] assert_equal expected_m_nft_attributes, - display_inputs.first[:m_nft_info].keys.sort - assert_equal expected_display_input, display_inputs.first + o[:extra_info].keys.sort + o.delete(:m_nft_info) + assert_equal expected_display_input, o end test "#display_inputs should contain m_nft_class info for m_nft_class transaction" do @@ -458,11 +454,10 @@ class CkbTransactionTest < ActiveSupport::TestCase type_hash: "0x") cell_input = ckb_transaction.cell_inputs.first - cell_input.update(previous_output: { - "tx_hash": m_nft_input_transaction.tx_hash, "index": "0" }) + cell_input.update(previous_tx_hash: m_nft_input_transaction.tx_hash, index: 0) expected_attributes = %i( id from_cellbase capacity address_hash - generated_tx_hash m_nft_info cell_index cell_type since extra_info + generated_tx_hash cell_index cell_type since extra_info ).sort expected_m_nft_attributes = %i(class_name total).sort expected_display_input = CkbUtils.hash_value_to_s( @@ -473,15 +468,16 @@ class CkbTransactionTest < ActiveSupport::TestCase generated_tx_hash: m_nft_cell_output.ckb_transaction.tx_hash, cell_index: m_nft_cell_output.cell_index, cell_type: m_nft_cell_output.cell_type, - m_nft_info: m_nft_cell_output.m_nft_info, since: { raw: "0x0000000000000000", median_timestamp: "0" }, extra_info: m_nft_cell_output.m_nft_info ) display_inputs = ckb_transaction.display_inputs - assert_equal expected_attributes, display_inputs.first.keys.sort + o = display_inputs.first + assert_equal expected_attributes - o.keys, [] assert_equal expected_m_nft_attributes, - display_inputs.first[:m_nft_info].keys.sort - assert_equal expected_display_input, display_inputs.first + o[:extra_info].keys.sort + o.delete(:m_nft_info) + assert_equal expected_display_input, o end test "#display_inputs should contain m_nft_token info for m_nft_token transaction" do @@ -518,11 +514,10 @@ class CkbTransactionTest < ActiveSupport::TestCase type_hash: "0x", type_script_id: type_script1.id) cell_input = ckb_transaction.cell_inputs.first - cell_input.update(previous_output: { - "tx_hash": m_nft_input_transaction.tx_hash, "index": "0" }) + cell_input.update(previous_tx_hash: m_nft_input_transaction.tx_hash, previous_index: 0) expected_attributes = %i( id from_cellbase capacity address_hash - generated_tx_hash m_nft_info cell_index cell_type since extra_info + generated_tx_hash cell_index cell_type since extra_info ).sort expected_m_nft_attributes = %i(class_name token_id total).sort expected_display_input = CkbUtils.hash_value_to_s( @@ -533,7 +528,6 @@ class CkbTransactionTest < ActiveSupport::TestCase generated_tx_hash: m_nft_cell_output.ckb_transaction.tx_hash, cell_index: m_nft_cell_output.cell_index, cell_type: m_nft_cell_output.cell_type, - m_nft_info: m_nft_cell_output.m_nft_info, since: { raw: "0x0000000000000000", median_timestamp: "0" @@ -541,11 +535,11 @@ class CkbTransactionTest < ActiveSupport::TestCase extra_info: m_nft_cell_output.m_nft_info ) display_inputs = ckb_transaction.display_inputs - - assert_equal expected_attributes, display_inputs.first.keys.sort - assert_equal expected_m_nft_attributes, - display_inputs.first[:m_nft_info].keys.sort - assert_equal expected_display_input, display_inputs.first + o = display_inputs.first + assert_equal expected_attributes - o.keys, [] + assert_equal expected_m_nft_attributes, o[:extra_info].keys.sort + o.delete(:m_nft_info) + assert_equal expected_display_input, o end test "#display_outputs should contain m_nft_issuer info for m_nft_issuer transaction" do @@ -557,7 +551,7 @@ class CkbTransactionTest < ActiveSupport::TestCase expected_attributes = %i( id capacity address_hash status consumed_tx_hash - cell_type m_nft_info extra_info + cell_type extra_info ).sort expected_m_nft_attributes = %i(issuer_name).sort expected_display_output = CkbUtils.hash_value_to_s( @@ -567,16 +561,13 @@ class CkbTransactionTest < ActiveSupport::TestCase status: m_nft_cell_output.status, consumed_tx_hash: nil, cell_type: m_nft_cell_output.cell_type, - m_nft_info: m_nft_cell_output.m_nft_info, extra_info: m_nft_cell_output.m_nft_info ) - - assert_equal expected_attributes, - m_nft_output_transaction.display_outputs.first.keys.sort - assert_equal expected_m_nft_attributes, - m_nft_output_transaction.display_outputs.first[:m_nft_info].keys.sort - assert_equal expected_display_output, - m_nft_output_transaction.display_outputs.first + o = m_nft_output_transaction.display_outputs.first + assert_equal expected_attributes - o.keys, [] + assert_equal expected_m_nft_attributes, o[:extra_info].keys.sort + o.delete(:m_nft_info) + assert_equal expected_display_output, o end test "#display_outputs should contain m_nft_class info for m_nft_class transaction" do @@ -588,7 +579,7 @@ class CkbTransactionTest < ActiveSupport::TestCase expected_attributes = %i( id capacity address_hash status consumed_tx_hash - cell_type m_nft_info extra_info + cell_type extra_info ).sort expected_m_nft_attributes = %i(class_name total).sort expected_display_output = CkbUtils.hash_value_to_s( @@ -598,16 +589,13 @@ class CkbTransactionTest < ActiveSupport::TestCase status: m_nft_cell_output.status, consumed_tx_hash: nil, cell_type: m_nft_cell_output.cell_type, - m_nft_info: m_nft_cell_output.m_nft_info, extra_info: m_nft_cell_output.m_nft_info ) - - assert_equal expected_attributes, - m_nft_output_transaction.display_outputs.first.keys.sort - assert_equal expected_m_nft_attributes, - m_nft_output_transaction.display_outputs.first[:m_nft_info].keys.sort - assert_equal expected_display_output, - m_nft_output_transaction.display_outputs.first + o = m_nft_output_transaction.display_outputs.first + assert_equal expected_attributes - o.keys, [] + assert_equal expected_m_nft_attributes, o[:extra_info].keys.sort + o.delete(:m_nft_info) + assert_equal expected_display_output, o end test "#display_outputs should contain m_nft_token info for m_nft_token transaction" do @@ -615,11 +603,22 @@ class CkbTransactionTest < ActiveSupport::TestCase m_nft_output_transaction = create(:ckb_transaction, block: m_nft_output_block) type_script = create(:type_script, - code_hash: CkbSync::Api.instance.token_script_code_hash, hash_type: "type", args: "0x407c7ab0480a3ade9351e2107341dc99a1c111070000000500000004") - m_nft_cell_output = create(:cell_output, block: m_nft_output_block, - ckb_transaction: m_nft_output_transaction, cell_type: "m_nft_token", cell_index: 0, tx_hash: m_nft_output_transaction.tx_hash, data: "0x000000000000000000c000", type_hash: "0x", type_script_id: type_script.id) + code_hash: CkbSync::Api.instance.token_script_code_hash, + hash_type: "type", + args: "0x407c7ab0480a3ade9351e2107341dc99a1c111070000000500000004") + m_nft_cell_output = create(:cell_output, + block: m_nft_output_block, + ckb_transaction: m_nft_output_transaction, + cell_type: "m_nft_token", + cell_index: 0, + tx_hash: m_nft_output_transaction.tx_hash, + data: "0x000000000000000000c000", + type_hash: "0x", + type_script_id: type_script.id) type_script1 = create(:type_script, - code_hash: CkbSync::Api.instance.token_class_script_code_hash, hash_type: "type", args: "0x407c7ab0480a3ade9351e2107341dc99a1c1110700000005") + code_hash: CkbSync::Api.instance.token_class_script_code_hash, + hash_type: "type", + args: "0x407c7ab0480a3ade9351e2107341dc99a1c1110700000005") create(:cell_output, block: m_nft_output_block, ckb_transaction: m_nft_output_transaction, consumed_by: m_nft_output_transaction, @@ -631,8 +630,9 @@ class CkbTransactionTest < ActiveSupport::TestCase type_hash: "0x", type_script_id: type_script1.id) expected_attributes = %i( - id capacity address_hash status consumed_tx_hash - cell_type m_nft_info extra_info + id capacity address_hash + status consumed_tx_hash + cell_type extra_info ).sort expected_m_nft_attributes = %i(class_name token_id total).sort expected_display_output = CkbUtils.hash_value_to_s( @@ -642,14 +642,14 @@ class CkbTransactionTest < ActiveSupport::TestCase status: m_nft_cell_output.status, consumed_tx_hash: nil, cell_type: m_nft_cell_output.cell_type, - m_nft_info: m_nft_cell_output.m_nft_info, extra_info: m_nft_cell_output.m_nft_info ) display_outputs = m_nft_output_transaction.display_outputs - assert_equal expected_attributes, display_outputs.first.keys.sort - assert_equal expected_m_nft_attributes, - display_outputs.first[:m_nft_info].keys.sort - assert_equal expected_display_output, display_outputs.first + o = display_outputs.first + assert_equal expected_attributes - o.keys, [] + assert_equal expected_m_nft_attributes - o[:extra_info].keys, [] + o.delete(:m_nft_info) + assert_equal expected_display_output, o end test "#display_outputs should contain nrc_721_token info for nrc_721_token transaction" do @@ -658,24 +658,46 @@ class CkbTransactionTest < ActiveSupport::TestCase block: nrc_721_token_output_block) nrc_factory_cell = create(:nrc_factory_cell, - code_hash: "0x00000000000000000000000000000000000000000000000000545950455f4944013620e2ced53373c5b55c5cef79b7fd0a875c60a70382a9e9664fe28e0bb345ab22c70f8e24a90dcccc7eb1ea669ac6cfecab095a1886af01d71612fdb3c836c8", args: "0x3620e2ced53373c5b55c5cef79b7fd0a875c60a70382a9e9664fe28e0bb345ab", verified: true) + code_hash: "0x00000000000000000000000000000000000000000000000000545950455f4944013620e2ced53373c5b55c5cef79b7fd0a875c60a70382a9e9664fe28e0bb345ab22c70f8e24a90dcccc7eb1ea669ac6cfecab095a1886af01d71612fdb3c836c8", + args: "0x3620e2ced53373c5b55c5cef79b7fd0a875c60a70382a9e9664fe28e0bb345ab", + verified: true) nrc_721_factory_type_script = create(:type_script, - code_hash: nrc_factory_cell.code_hash, hash_type: "type", args: nrc_factory_cell.args) + code_hash: nrc_factory_cell.code_hash, + hash_type: "type", + args: nrc_factory_cell.args) nrc_721_factory_cell_output = create(:cell_output, - block: nrc_721_token_output_block, ckb_transaction: nrc_721_token_output_transaction, cell_type: "nrc_721_factory", cell_index: 1, tx_hash: nrc_721_token_output_transaction.tx_hash, data: "0x24ff5a9ab8c38d195ce2b4ea75ca8987000a47616d62697420317374000000156465762e6b6f6c6c6563742e6d652f746f6b656e73000000000000003c000000000000000000", type_hash: "0x", type_script_id: nrc_721_factory_type_script.id) + block: nrc_721_token_output_block, + ckb_transaction: nrc_721_token_output_transaction, + cell_type: "nrc_721_factory", + cell_index: 1, + tx_hash: nrc_721_token_output_transaction.tx_hash, + data: "0x24ff5a9ab8c38d195ce2b4ea75ca8987000a47616d62697420317374000000156465762e6b6f6c6c6563742e6d652f746f6b656e73000000000000003c000000000000000000", + type_hash: "0x", + type_script_id: nrc_721_factory_type_script.id) nrc_721_token_type_script = create(:type_script, - code_hash: "0x#{SecureRandom.hex(32)}", hash_type: "type", args: "0x00000000000000000000000000000000000000000000000000545950455f4944013620e2ced53373c5b55c5cef79b7fd0a875c60a70382a9e9664fe28e0bb345ab22c70f8e24a90dcccc7eb1ea669ac6cfecab095a1886af01d71612fdb3c836c8") + code_hash: "0x#{SecureRandom.hex(32)}", + hash_type: "type", + args: "0x00000000000000000000000000000000000000000000000000545950455f4944013620e2ced53373c5b55c5cef79b7fd0a875c60a70382a9e9664fe28e0bb345ab22c70f8e24a90dcccc7eb1ea669ac6cfecab095a1886af01d71612fdb3c836c8") nrc_721_token_cell_output = create(:cell_output, - block: nrc_721_token_output_block, ckb_transaction: nrc_721_token_output_transaction, cell_type: "nrc_721_token", cell_index: 0, tx_hash: nrc_721_token_output_transaction.tx_hash, data: "0x0ddeff3e8ee03cbf6a2c6920d05c381e", type_hash: "0x", type_script_id: nrc_721_token_type_script.id) + block: nrc_721_token_output_block, + ckb_transaction: nrc_721_token_output_transaction, + cell_type: "nrc_721_token", + cell_index: 0, + tx_hash: nrc_721_token_output_transaction.tx_hash, + data: "0x0ddeff3e8ee03cbf6a2c6920d05c381e", + type_hash: "0x", + type_script_id: nrc_721_token_type_script.id) udt = create(:udt, type_hash: nrc_721_token_cell_output.type_hash, - udt_type: "nrc_721_token", nrc_factory_cell_id: nrc_factory_cell.id) + udt_type: "nrc_721_token", + nrc_factory_cell_id: nrc_factory_cell.id) address = create(:address) - udt_account = create(:udt_account, udt: udt, address: address, + udt_account = create(:udt_account, udt: udt, + address: address, nft_token_id: "22c70f8e24a90dcccc7eb1ea669ac6cfecab095a1886af01d71612fdb3c836c8") - factory_info = { symbol: "TTF" } - token_info = { symbol: "TTF", amount: udt_account.nft_token_id } + factory_info = { symbol: "TTF", amount: "", decimal: "", type_hash: "0x", published: true, display_name: "Test token factory", nan: "" } + token_info = { symbol: "TTF", amount: udt_account.nft_token_id, decimal: "6", type_hash: "0x", published: true, display_name: "kingdom fat coin", uan: ""} display_outputs = nrc_721_token_output_transaction.display_outputs assert_equal factory_info.to_a, display_outputs.first[:nrc_721_token_info].to_a diff --git a/test/models/pending_transaction_test.rb b/test/models/pending_transaction_test.rb new file mode 100644 index 000000000..77ea68a22 --- /dev/null +++ b/test/models/pending_transaction_test.rb @@ -0,0 +1,30 @@ +require "test_helper" + +class PendingTransactionTest < ActiveSupport::TestCase + setup do + CkbSync::Api.any_instance.stubs(:generate_json_rpc_id).returns(1) + end + + test "is_cellbase should always be false" do + tx = create(:pending_transaction) + assert_equal false, tx.is_cellbase + end + + test "#to_raw should return raw tx json structure" do + tx = create(:pending_transaction) + json = tx.to_raw + assert_equal %w(hash header_deps cell_deps inputs outputs outputs_data version witnesses).sort, + json.keys.map(&:to_s).sort + end + + test "should update_detailed_message_for_rejected_transaction when detailed_message is nil" do + rejected_tx_id = "0xed2049c21ffccfcd26281d60f8f77ff117adb9df9d3f8cbe5fe86e893c66d359" + tx = create :pending_transaction, tx_status: :rejected, tx_hash: rejected_tx_id + + VCR.use_cassette("get_rejected_transaction") do + PoolTransactionUpdateRejectReasonWorker.new.perform(rejected_tx_id) + end + tx.reload + assert tx.detailed_message.include?("Resolve failed Dead") + end +end diff --git a/test/models/pool_transaction_entry_test.rb b/test/models/pool_transaction_entry_test.rb deleted file mode 100644 index db5358168..000000000 --- a/test/models/pool_transaction_entry_test.rb +++ /dev/null @@ -1,37 +0,0 @@ -require "test_helper" - -class PoolTransactionEntryTest < ActiveSupport::TestCase - - setup do - CkbSync::Api.any_instance.stubs(:generate_json_rpc_id).returns(1) - end - - test "is_cellbase should always be false" do - tx = create(:pool_transaction_entry) - assert_equal false, tx.is_cellbase - end - - test "income should always be nil" do - tx = create(:pool_transaction_entry) - assert_nil tx.income - end - test "#to_raw should return raw tx json structure" do - tx = create(:pool_transaction_entry) - json = tx.to_raw - assert_equal %w(hash header_deps cell_deps inputs outputs outputs_data version witnesses).sort, json.keys.map(&:to_s).sort - end - - test "should update_detailed_message_for_rejected_transaction when detailed_message is nil" do - - rejected_tx_id = '0xed2049c21ffccfcd26281d60f8f77ff117adb9df9d3f8cbe5fe86e893c66d359' - tx = create :pool_transaction_entry, tx_status: :rejected, tx_hash: rejected_tx_id - - - VCR.use_cassette('get_rejected_transaction') do - tx.update_detailed_message_for_rejected_transaction - end - - assert tx.detailed_message.include?("Resolve failed Dead") - end - -end diff --git a/test/jobs/process_transaction_job_test.rb b/test/models/reject_reason_test.rb similarity index 58% rename from test/jobs/process_transaction_job_test.rb rename to test/models/reject_reason_test.rb index 33d13caa5..712608708 100644 --- a/test/jobs/process_transaction_job_test.rb +++ b/test/models/reject_reason_test.rb @@ -1,6 +1,6 @@ require "test_helper" -class ProcessTransactionJobTest < ActiveJob::TestCase +class RejectReasonTest < ActiveSupport::TestCase # test "the truth" do # assert true # end diff --git a/test/models/suggest_query_test.rb b/test/models/suggest_query_test.rb index 80316f139..02bb782ba 100644 --- a/test/models/suggest_query_test.rb +++ b/test/models/suggest_query_test.rb @@ -31,7 +31,8 @@ class SuggestQueryTest < ActiveSupport::TestCase address = create(:address, :with_lock_script) address.query_address = address.address_hash - assert_equal AddressSerializer.new(address).serialized_json, SuggestQuery.new(address.address_hash).find!.serialized_json + assert_equal AddressSerializer.new(address).serialized_json, + SuggestQuery.new(address.address_hash).find!.serialized_json end test "should raise BlockNotFoundError when query key is a block number that doesn't exist" do @@ -46,7 +47,8 @@ class SuggestQueryTest < ActiveSupport::TestCase create(:address, :with_lock_script) address = NullAddress.new("ckt1qyqrdsefa43s6m882pcj53m4gdnj4k440axqswmu83") - assert_equal AddressSerializer.new(address).serialized_json, SuggestQuery.new("ckt1qyqrdsefa43s6m882pcj53m4gdnj4k440axqswmu83").find!.serialized_json + assert_equal AddressSerializer.new(address).serialized_json, + SuggestQuery.new("ckt1qyqrdsefa43s6m882pcj53m4gdnj4k440axqswmu83").find!.serialized_json ENV["CKB_NET_MODE"] = "mainnet" end @@ -61,10 +63,4 @@ class SuggestQueryTest < ActiveSupport::TestCase SuggestQuery.new(udt.type_hash).find! end end - - test "should return pool tx when tx is in the pool" do - tx = create(:pool_transaction_entry) - expected_response = CkbTransactionSerializer.new(tx).serialized_json - assert_equal expected_response, SuggestQuery.new(tx.tx_hash).find!.serialized_json - end end diff --git a/test/tasks/migrations/update_cell_type_test.rb b/test/tasks/migrations/update_cell_type_test.rb new file mode 100644 index 000000000..a99ae37a2 --- /dev/null +++ b/test/tasks/migrations/update_cell_type_test.rb @@ -0,0 +1,60 @@ +require "test_helper" +require "rake" + +class UpdateCotaCellTypeTest < ActiveSupport::TestCase + setup do + CkbSync::Api.any_instance.stubs(:cota_registry_code_hash).returns(Settings.testnet_cota_registry_code_hash) + CkbSync::Api.any_instance.stubs(:cota_regular_code_hash).returns(Settings.testnet_cota_regular_code_hash) + CkbSync::Api.any_instance.stubs(:get_blockchain_info).returns(OpenStruct.new(chain: "ckb")) + Server::Application.load_tasks if Rake::Task.tasks.empty? + end + + test "update registry cota cell type" do + tx_hash = "0x498315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e3" + block = create(:block, :with_block_hash) + ckb_transaction = create(:ckb_transaction, block: block, tx_hash: tx_hash) + input_address = create(:address) + lock_script = create(:lock_script) + type_script = create(:type_script, code_hash: Settings.testnet_cota_registry_code_hash) + cell_output = create(:cell_output, ckb_transaction: ckb_transaction, block: block, + tx_hash: ckb_transaction.tx_hash, address: input_address, + lock_script: lock_script, type_script: type_script, + cell_index: 1) + cell_input = create(:cell_input, + block: ckb_transaction.block, + ckb_transaction: ckb_transaction, + previous_output: { + "tx_hash": tx_hash, + "index": "1" + }) + + Rake::Task["migration:update_cell_type"].execute + + assert_equal "cota_registry", cell_output.reload.cell_type + assert_equal "cota_registry", cell_input.reload.cell_type + end + + test "update regular cota cell type" do + tx_hash = "0x398315db9c7ba144cca74d2e9122ac9b3a3da1641b2975ae321d91ec34f1c0e2" + block = create(:block, :with_block_hash) + ckb_transaction = create(:ckb_transaction, block: block, tx_hash: tx_hash) + input_address = create(:address) + lock_script = create(:lock_script) + type_script = create(:type_script, code_hash: Settings.testnet_cota_regular_code_hash) + cell_output = create(:cell_output, ckb_transaction: ckb_transaction, block: block, + tx_hash: ckb_transaction.tx_hash, address: input_address, + lock_script: lock_script, type_script: type_script, + cell_index: 1) + cell_input = create(:cell_input, + block: ckb_transaction.block, + ckb_transaction: ckb_transaction, + previous_output: { + "tx_hash": tx_hash, + "index": "1" + }) + Rake::Task["migration:update_cell_type"].execute + + assert_equal "cota_regular", cell_output.reload.cell_type + assert_equal "cota_regular", cell_input.reload.cell_type + end +end diff --git a/test/tasks/migrations/update_token_transfer_test.rb b/test/tasks/migrations/update_token_transfer_test.rb new file mode 100644 index 000000000..c452ec80e --- /dev/null +++ b/test/tasks/migrations/update_token_transfer_test.rb @@ -0,0 +1,38 @@ +require "test_helper" +require "rake" + +class UpdateTokenTransferTest < ActiveSupport::TestCase + setup do + CotaAggregator.any_instance.stubs(:get_transactions_by_block_number).returns({ + "block_number" => 9939607, + "transactions" => [ + { + "block_number" => 9939607, + "cota_id" => "0x1e23dc506c1b15f286c9db84a4d12a4532660975", + "from" => "ckt1qzda0cr08m85hc8jlnfp3zer7xulejywt49kt2rr0vthywaa50xwsqwuwrenm6r0muupkn79huyjhv3aqfm5sqg5xwwyx", + "to" => "ckt1qzda0cr08m85hc8jlnfp3zer7xulejywt49kt2rr0vthywaa50xwsqfrkrvjpk2e7p6e90t9sc65ahf7wjhwzqq26rfzt", + "token_index" => "0x00000000", + "tx_hash" => "0xc938c9acf95a351c2de70494b1fabc22d625fd1664741535e1058e60d454738f", + "tx_type" => "transfer" + } + ] + }) + CkbSync::Api.any_instance.stubs(:get_blockchain_info).returns(OpenStruct.new(chain: "ckb")) + Server::Application.load_tasks if Rake::Task.tasks.empty? + end + + test "update token transfer action" do + collection = create(:token_collection, standard: "cota", sn: "0x1e23dc506c1b15f286c9db84a4d12a4532660975") + to = "ckt1qzda0cr08m85hc8jlnfp3zer7xulejywt49kt2rr0vthywaa50xwsqfrkrvjpk2e7p6e90t9sc65ahf7wjhwzqq26rfzt" + address = create(:address, address_hash: to) + item = create(:token_item, collection: collection, token_id: 0, owner: address) + block = create(:block, number: 9939607) + tx = create(:ckb_transaction, block: block, + tx_hash: "0xc938c9acf95a351c2de70494b1fabc22d625fd1664741535e1058e60d454738f") + transfer = create(:token_transfer, item: item, ckb_transaction: tx) + + Rake::Task["migration:update_cell_type"].invoke + + assert_equal "normal", transfer.reload.action + end +end diff --git a/test/workers/pool_transaction_check_worker_test.rb b/test/workers/pool_transaction_check_worker_test.rb index 920245f05..4d1ca20f4 100644 --- a/test/workers/pool_transaction_check_worker_test.rb +++ b/test/workers/pool_transaction_check_worker_test.rb @@ -7,65 +7,44 @@ class PoolTransactionCheckWorkerTest < ActiveSupport::TestCase end test "should detect and mark failed tx from pending tx, for inputs" do rejected_tx_id = "0xed2049c21ffccfcd26281d60f8f77ff117adb9df9d3f8cbe5fe86e893c66d359" - tx = create :pool_transaction_entry, tx_status: "pending", - tx_hash: rejected_tx_id, - inputs: [ - { - "since": 0, - "previous_output": { - "index": 0, - "tx_hash": rejected_tx_id - } - } - ], - cell_deps: [] + block = create(:block) - cell_output = create(:cell_output, :with_full_transaction, block: block, - ckb_transaction_id: rejected_tx_id) + + cell_output = create(:cell_output, + :with_full_transaction, + block: block, + ckb_transaction_id: rejected_tx_id) cell_output.update tx_hash: rejected_tx_id, cell_index: 0, status: "dead" + tx = create :pending_transaction, tx_hash: rejected_tx_id + tx.cell_inputs.create previous_tx_hash: rejected_tx_id, previous_index: 0 VCR.use_cassette("get_rejected_transaction") do PoolTransactionCheckWorker.perform_inline PoolTransactionUpdateRejectReasonWorker.perform_async rejected_tx_id - pool_transaction_entry = PoolTransactionEntry.last + pending_transaction = CkbTransaction.find_by(tx_hash: rejected_tx_id) - assert_equal "rejected", pool_transaction_entry.tx_status - assert pool_transaction_entry.detailed_message.include?("Resolve failed Dead") + assert_equal "rejected", pending_transaction.tx_status + assert pending_transaction.detailed_message.include?("Resolve failed Dead") end end test "should detect and mark failed tx from pending tx, for cell_deps" do rejected_tx_id = "0xed2049c21ffccfcd26281d60f8f77ff117adb9df9d3f8cbe5fe86e893c66d359" - tx = create :pool_transaction_entry, tx_status: "pending", - tx_hash: rejected_tx_id, - inputs: [], - cell_deps: [ - { - "dep_type": "dep_group", - "out_point": { - "index": 0, - "tx_hash": "0xd48ebd7c52ee3793ccaeef9ab40c29281c1fc4e901fb52b286fc1af74532f1cb" - } - }, - { - "dep_type": "dep_group", - "out_point": { - "index": 0, - "tx_hash": rejected_tx_id - } - } - ] + script = create :script block = create(:block) cell_output = create(:cell_output, :with_full_transaction, block: block, ckb_transaction_id: rejected_tx_id) cell_output.update tx_hash: rejected_tx_id, cell_index: 0, status: "dead" + tx = create :pending_transaction, tx_hash: rejected_tx_id + tx.cell_dependencies.create(dep_type: :code, cell_output: cell_output, script: script) + VCR.use_cassette("get_rejected_transaction") do PoolTransactionUpdateRejectReasonWorker.perform_inline rejected_tx_id - pool_transaction_entry = PoolTransactionEntry.find_by_tx_hash rejected_tx_id - assert_equal "rejected", pool_transaction_entry.tx_status - assert pool_transaction_entry.detailed_message.include?("Resolve failed Dead") + pending_transaction = CkbTransaction.find_by(tx_hash: rejected_tx_id) + assert_equal "rejected", pending_transaction.tx_status + assert pending_transaction.detailed_message.include?("Resolve failed Dead") end end end diff --git a/test/workers/pool_transaction_update_reject_reason_worker_test.rb b/test/workers/pool_transaction_update_reject_reason_worker_test.rb index 3445408a7..db39dae71 100644 --- a/test/workers/pool_transaction_update_reject_reason_worker_test.rb +++ b/test/workers/pool_transaction_update_reject_reason_worker_test.rb @@ -7,13 +7,13 @@ class PoolTransactionUpdateRejectReasonWorkerTest < ActiveSupport::TestCase test "should detect and mark failed tx from pending tx, for inputs" do Sidekiq::Testing.inline! rejected_tx_id = "0xed2049c21ffccfcd26281d60f8f77ff117adb9df9d3f8cbe5fe86e893c66d359" - create :pool_transaction_entry, tx_hash: rejected_tx_id + create :pending_transaction, tx_hash: rejected_tx_id VCR.use_cassette("get_rejected_transaction") do PoolTransactionUpdateRejectReasonWorker.perform_async rejected_tx_id - pool_transaction_entry = PoolTransactionEntry.find_by tx_hash: rejected_tx_id + pending_transaction = CkbTransaction.find_by tx_hash: rejected_tx_id - assert_equal "rejected", pool_transaction_entry.tx_status - assert pool_transaction_entry.detailed_message.include?("Resolve failed Dead") + assert_equal "rejected", pending_transaction.tx_status + assert pending_transaction.detailed_message.include?("Resolve failed Dead") end end end diff --git a/test/workers/update_h24_ckb_transactions_count_on_udts_worker_test.rb b/test/workers/update_h24_ckb_transactions_count_on_udts_worker_test.rb new file mode 100644 index 000000000..7004c1a30 --- /dev/null +++ b/test/workers/update_h24_ckb_transactions_count_on_udts_worker_test.rb @@ -0,0 +1,33 @@ +require 'test_helper' + +class UpdateH24CkbTransactionsCountOnUdtsWorkerTest < ActiveJob::TestCase + + test "update udt.h24_ckb_transactions_count when udt.ckb_transactions is blank" do + udt = create(:udt) + udt.update_h24_ckb_transactions_count + assert_equal 0, udt.h24_ckb_transactions_count + end + + test "update udt.h24_ckb_transactions_count when udt.ckb_transactions is present" do + udt_one = create(:udt) + udt_two = create(:udt) + udt_three = create(:udt) + block = create(:block, :with_block_hash) + ckb_transaction_one = create(:ckb_transaction, :with_multiple_inputs_and_outputs, block: block, block_timestamp: CkbUtils.time_in_milliseconds(4.hours.ago)) + ckb_transaction_two = create(:ckb_transaction, :with_multiple_inputs_and_outputs, block: block, block_timestamp: CkbUtils.time_in_milliseconds(3.hours.ago)) + ckb_transaction_three = create(:ckb_transaction, :with_multiple_inputs_and_outputs, block: block, block_timestamp: CkbUtils.time_in_milliseconds(2.hours.ago)) + create(:udt_transaction, udt_id: udt_one.id, ckb_transaction_id: ckb_transaction_one.id) + create(:udt_transaction, udt_id: udt_one.id, ckb_transaction_id: ckb_transaction_two.id) + create(:udt_transaction, udt_id: udt_one.id, ckb_transaction_id: ckb_transaction_three.id) + create(:udt_transaction, udt_id: udt_two.id, ckb_transaction_id: ckb_transaction_two.id) + create(:udt_transaction, udt_id: udt_two.id, ckb_transaction_id: ckb_transaction_one.id) + create(:udt_transaction, udt_id: udt_three.id, ckb_transaction_id: ckb_transaction_three.id) + udt_one.update_h24_ckb_transactions_count + udt_two.update_h24_ckb_transactions_count + udt_three.update_h24_ckb_transactions_count + assert_equal 3, udt_one.h24_ckb_transactions_count + assert_equal 2, udt_two.h24_ckb_transactions_count + assert_equal 1, udt_three.h24_ckb_transactions_count + end + +end From dd0f77da02707d3f4b3acb27b7b7feac74c3e16a Mon Sep 17 00:00:00 2001 From: ShiningRay Date: Fri, 9 Jun 2023 18:42:35 +0800 Subject: [PATCH 2/7] add tx_committed filter for transaction list --- .../api/v1/ckb_transactions_controller.rb | 27 ++++++++++++------- .../api/v2/pending_transactions_controller.rb | 23 +++++++++------- 2 files changed, 30 insertions(+), 20 deletions(-) diff --git a/app/controllers/api/v1/ckb_transactions_controller.rb b/app/controllers/api/v1/ckb_transactions_controller.rb index 55ec69cc9..91ef5dc62 100644 --- a/app/controllers/api/v1/ckb_transactions_controller.rb +++ b/app/controllers/api/v1/ckb_transactions_controller.rb @@ -17,23 +17,30 @@ def index end render json: json else - ckb_transactions = CkbTransaction.normal.select( + ckb_transactions = CkbTransaction.tx_committed.normal.select( :id, :tx_hash, :block_number, :block_timestamp, :live_cell_changes, :capacity_involved, :updated_at ) params[:sort] ||= "id.desc" - order_by, asc_or_desc = params[:sort].split('.', 2) - order_by = case order_by - when 'height' then 'block_number' - when 'capacity' then 'capacity_involved' - else order_by - end + order_by, asc_or_desc = params[:sort].split(".", 2) + order_by = + case order_by + when "height" + "block_number" + when "capacity" + "capacity_involved" + else + order_by + end - head :not_found and return unless order_by.in? %w[id block_number block_timestamp transaction_fee capacity_involved] + head :not_found and return unless order_by.in? %w[ + id block_number block_timestamp transaction_fee + capacity_involved + ] - ckb_transactions = ckb_transactions.order(order_by => asc_or_desc) - .page(@page).per(@page_size).fast_page + ckb_transactions = ckb_transactions.order(order_by => asc_or_desc). + page(@page).per(@page_size).fast_page json = Rails.cache.realize(ckb_transactions.cache_key, diff --git a/app/controllers/api/v2/pending_transactions_controller.rb b/app/controllers/api/v2/pending_transactions_controller.rb index 0ee3650d9..646c471b6 100644 --- a/app/controllers/api/v2/pending_transactions_controller.rb +++ b/app/controllers/api/v2/pending_transactions_controller.rb @@ -5,19 +5,22 @@ def index pending_transactions = CkbTransaction.tx_pending params[:sort] ||= "id.desc" - order_by, asc_or_desc = params[:sort].split('.', 2) - order_by = case order_by - when 'time' then 'created_at' - when 'fee' then 'transaction_fee' - # current we don't support this in DB - #when 'capacity' then 'capacity_involved' - else order_by - end + order_by, asc_or_desc = params[:sort].split(".", 2) + order_by = + case order_by + when "time" + "created_at" + when "fee" + "transaction_fee" + # current we don't support this in DB + # when 'capacity' then 'capacity_involved' + else order_by + end head :not_found and return unless order_by.in? %w[id created_at transaction_fee] - pending_transactions = pending_transactions.order(order_by => asc_or_desc) - .page(@page).per(@page_size).fast_page + pending_transactions = pending_transactions.order(order_by => asc_or_desc). + page(@page).per(@page_size).fast_page render json: { data: pending_transactions.map do |tx| From e4dad7c724be4fce53990fa5fb1642a602608955 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E4=BB=A3=E7=A0=81=E4=B9=8B=E5=8A=9B?= <9770+ShiningRay@users.noreply.github.com> Date: Sun, 11 Jun 2023 23:13:24 +0800 Subject: [PATCH 3/7] Optimize block query performance (#1307) --- app/controllers/api/v1/blocks_controller.rb | 80 +++++++++++++-------- app/models/block.rb | 2 +- lib/scheduler.rb | 8 ++- 3 files changed, 58 insertions(+), 32 deletions(-) diff --git a/app/controllers/api/v1/blocks_controller.rb b/app/controllers/api/v1/blocks_controller.rb index 0a42d0c4d..983d97736 100644 --- a/app/controllers/api/v1/blocks_controller.rb +++ b/app/controllers/api/v1/blocks_controller.rb @@ -1,4 +1,4 @@ -require 'csv' +require "csv" module Api module V1 class BlocksController < ApplicationController @@ -7,29 +7,37 @@ class BlocksController < ApplicationController def index if from_home_page? - blocks = Block.recent.select(:id, :miner_hash, :number, :timestamp, :reward, :ckb_transactions_count, :live_cell_changes, :updated_at).limit(ENV["HOMEPAGE_BLOCK_RECORDS_COUNT"].to_i) + blocks = Block.recent.select(:id, :miner_hash, :number, :timestamp, :reward, :ckb_transactions_count, + :live_cell_changes, :updated_at).limit(ENV["HOMEPAGE_BLOCK_RECORDS_COUNT"].to_i) json = Rails.cache.realize(blocks.cache_key, version: blocks.cache_version, race_condition_ttl: 3.seconds) do BlockListSerializer.new(blocks).serialized_json end else - blocks = Block.select(:id, :miner_hash, :number, :timestamp, :reward, :ckb_transactions_count, :live_cell_changes, :updated_at) - params[:sort] ||= "timestamp.desc" - - order_by, asc_or_desc = params[:sort].split('.', 2) - order_by = case order_by - when 'height' then 'number' - when 'transactions' then 'ckb_transactions_count' - else order_by - end + blocks = Block.select(:id, :miner_hash, :number, :timestamp, :reward, :ckb_transactions_count, + :live_cell_changes, :updated_at) + params[:sort] ||= "number.desc" + + order_by, asc_or_desc = params[:sort].split(".", 2) + order_by = + case order_by + when "height" + "number" + when "transactions" + "ckb_transactions_count" + else + order_by + end head :not_found and return unless order_by.in? %w[number reward timestamp ckb_transactions_count] - blocks = blocks.order(order_by => asc_or_desc).page(@page).per(@page_size).fast_page + + blocks = blocks.order(order_by => asc_or_desc).page(@page).per(@page_size) json = Rails.cache.realize(blocks.cache_key, version: blocks.cache_version, race_condition_ttl: 3.seconds) do records_counter = RecordCounters::Blocks.new - options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: blocks, page: @page, page_size: @page_size, records_counter: records_counter).call + options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: blocks, page: @page, + page_size: @page_size, records_counter: records_counter).call BlockListSerializer.new(blocks, options).serialized_json end end @@ -44,25 +52,39 @@ def show end def download_csv - blocks = Block.select(:id, :miner_hash, :number, :timestamp, :reward, :ckb_transactions_count, :live_cell_changes, :updated_at) - - blocks = blocks.where('timestamp >= ?', DateTime.strptime(params[:start_date], '%Y-%m-%d').to_time.to_i * 1000 ) if params[:start_date].present? - blocks = blocks.where('timestamp <= ?', DateTime.strptime(params[:end_date], '%Y-%m-%d').to_time.to_i * 1000 ) if params[:end_date].present? - blocks = blocks.where('number >= ?', params[:start_number]) if params[:start_number].present? - blocks = blocks.where('number <= ?', params[:end_number]) if params[:end_number].present? - - blocks = blocks.order('number desc').limit(5000) + blocks = Block.select(:id, :miner_hash, :number, :timestamp, :reward, :ckb_transactions_count, + :live_cell_changes, :updated_at) - file = CSV.generate do |csv| - csv << ["Blockno", "Transactions", "UnixTimestamp", "Reward(CKB)", "Miner", "date(UTC)"] - blocks.find_each.with_index do |block, index| - row = [block.number, block.ckb_transactions_count, (block.timestamp / 1000), block.reward, block.miner_hash, - Time.at((block.timestamp / 1000).to_i).in_time_zone('UTC').strftime('%Y-%m-%d %H:%M:%S') ] - csv << row - end + if params[:start_date].present? + blocks = blocks.where("timestamp >= ?", + DateTime.strptime(params[:start_date], + "%Y-%m-%d").to_time.to_i * 1000) + end + if params[:end_date].present? + blocks = blocks.where("timestamp <= ?", + DateTime.strptime(params[:end_date], + "%Y-%m-%d").to_time.to_i * 1000) end - send_data file, :type => 'text/csv; charset=utf-8; header=present', :disposition => "attachment;filename=blocks.csv" + blocks = blocks.where("number >= ?", params[:start_number]) if params[:start_number].present? + blocks = blocks.where("number <= ?", params[:end_number]) if params[:end_number].present? + + blocks = blocks.order("number desc").limit(5000) + + file = + CSV.generate do |csv| + csv << ["Blockno", "Transactions", "UnixTimestamp", "Reward(CKB)", "Miner", "date(UTC)"] + blocks.find_each.with_index do |block, _index| + row = [ + block.number, block.ckb_transactions_count, (block.timestamp / 1000), block.reward, block.miner_hash, + Time.at((block.timestamp / 1000).to_i).in_time_zone("UTC").strftime("%Y-%m-%d %H:%M:%S") + ] + csv << row + end + end + send_data file, type: "text/csv; charset=utf-8; header=present", + disposition: "attachment;filename=blocks.csv" end + private def from_home_page? diff --git a/app/models/block.rb b/app/models/block.rb index 079bcbc22..0beefadc4 100644 --- a/app/models/block.rb +++ b/app/models/block.rb @@ -51,7 +51,7 @@ class Block < ApplicationRecord attribute :uncle_block_hashes, :ckb_array_hash, hash_length: Settings.default_hash_length attribute :proposals, :ckb_array_hash, hash_length: Settings.default_short_hash_length - scope :recent, -> { order("timestamp desc nulls last") } + scope :recent, -> { order("number" => "desc") } scope :created_after, ->(timestamp) { where("timestamp >= ?", timestamp) } scope :created_before, ->(timestamp) { where("timestamp <= ?", timestamp) } scope :created_between, ->(from, to) { where(timestamp: from..to) } diff --git a/lib/scheduler.rb b/lib/scheduler.rb index 22bb52562..b9f765911 100644 --- a/lib/scheduler.rb +++ b/lib/scheduler.rb @@ -11,10 +11,14 @@ require "rufus-scheduler" s = Rufus::Scheduler.singleton -def s.around_trigger(job) +def s.around_trigger(job, &block) t = Time.now puts "Starting job #{job.id} at #{Time.now}" - yield + Rails.application.executor.wrap do + ActiveRecord::Base.connection_pool.with_connection do + ActiveRecord::Base.cache(&block) + end + end puts "job #{job.id} finished in #{Time.now - t} seconds." end From d0d160df46231899f15a5cd284fee50d45438067 Mon Sep 17 00:00:00 2001 From: zhangnan Date: Tue, 13 Jun 2023 14:10:51 +0800 Subject: [PATCH 4/7] fix: cell output cota info --- app/models/cell_output.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/models/cell_output.rb b/app/models/cell_output.rb index 926f875ad..834f0b32a 100644 --- a/app/models/cell_output.rb +++ b/app/models/cell_output.rb @@ -323,7 +323,7 @@ def cota_registry_info code_hash = CkbSync::Api.instance.cota_registry_code_hash CkbUtils.hash_value_to_s( symbol: '', amount: self.udt_amount, decimal: '', type_hash: self.type_hash, - published: 'true', display_name: '', uan: '', code_hash: self.code_hash) + published: 'true', display_name: '', uan: '', code_hash: code_hash) end def cota_regular_info @@ -331,7 +331,7 @@ def cota_regular_info code_hash = CkbSync::Api.instance.cota_regular_code_hash CkbUtils.hash_value_to_s( symbol: '', amount: self.udt_amount, decimal: '', type_hash: self.type_hash, - published: 'true', display_name: '', uan: '', code_hash: self.code_hash) + published: 'true', display_name: '', uan: '', code_hash: code_hash) end end From 89cfe25e1fad1fcfd0fa9a7ad702c40afe12f624 Mon Sep 17 00:00:00 2001 From: ShiningRay Date: Sun, 18 Jun 2023 20:05:53 +0800 Subject: [PATCH 5/7] remove duplicated ckb transactions --- app/controllers/api/v1/ckb_transactions_controller.rb | 4 +++- app/models/ckb_sync/new_node_data_processor.rb | 8 +++++--- app/models/ckb_transaction.rb | 6 +++--- app/workers/pool_transaction_check_worker.rb | 4 ++++ 4 files changed, 15 insertions(+), 7 deletions(-) diff --git a/app/controllers/api/v1/ckb_transactions_controller.rb b/app/controllers/api/v1/ckb_transactions_controller.rb index 91ef5dc62..77f4d6ee4 100644 --- a/app/controllers/api/v1/ckb_transactions_controller.rb +++ b/app/controllers/api/v1/ckb_transactions_controller.rb @@ -103,7 +103,7 @@ def query end def show - ckb_transaction = CkbTransaction.cached_find(params[:id]) + ckb_transaction = CkbTransaction.where(tx_hash: params[:id]).order(tx_status: :desc).first raise Api::V1::Exceptions::CkbTransactionNotFoundError if ckb_transaction.blank? @@ -111,6 +111,8 @@ def show PoolTransactionUpdateRejectReasonWorker.perform_async(ckb_transaction.tx_hash) end + expires_in 10.seconds, public: true, must_revalidate: true + render json: CkbTransactionSerializer.new(ckb_transaction) end diff --git a/app/models/ckb_sync/new_node_data_processor.rb b/app/models/ckb_sync/new_node_data_processor.rb index 4bfa7d69b..3321e261f 100644 --- a/app/models/ckb_sync/new_node_data_processor.rb +++ b/app/models/ckb_sync/new_node_data_processor.rb @@ -778,12 +778,14 @@ def build_cells_and_locks!( prepare_script_ids(outputs) build_cell_outputs!(node_block, outputs, ckb_txs, local_block, cell_outputs_attributes, output_capacities, tags, udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids, addrs_changes) - - CellOutput.insert_all!(cell_outputs_attributes) if cell_outputs_attributes.present? + if cell_outputs_attributes.present? + CellOutput.create_or_find_by! cell_outputs_attributes + end # prev_outputs = prepare_previous_outputs(inputs) prev_outputs = nil build_cell_inputs(inputs, ckb_txs, local_block.id, cell_inputs_attributes, prev_cell_outputs_attributes, - input_capacities, tags, udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids, prev_outputs, addrs_changes) + input_capacities, tags, udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids, + prev_outputs, addrs_changes) CellInput.insert_all!(cell_inputs_attributes) CellOutput.upsert_all(prev_cell_outputs_attributes) if prev_cell_outputs_attributes.present? diff --git a/app/models/ckb_transaction.rb b/app/models/ckb_transaction.rb index 29386d8a5..8f616cb9f 100644 --- a/app/models/ckb_transaction.rb +++ b/app/models/ckb_transaction.rb @@ -21,11 +21,11 @@ class CkbTransaction < ApplicationRecord accepts_nested_attributes_for :cell_outputs has_many :inputs, class_name: "CellOutput", inverse_of: "consumed_by", foreign_key: "consumed_by_id" has_many :outputs, class_name: "CellOutput" - has_many :dao_events - has_many :script_transactions + has_many :dao_events, dependent: :delete_all + has_many :script_transactions, dependent: :delete_all has_many :scripts, through: :script_transactions - has_many :referring_cells + has_many :referring_cells, dependent: :delete_all has_many :token_transfers, foreign_key: :transaction_id, dependent: :delete_all, inverse_of: :ckb_transaction has_many :cell_dependencies, dependent: :delete_all has_many :header_dependencies, dependent: :delete_all diff --git a/app/workers/pool_transaction_check_worker.rb b/app/workers/pool_transaction_check_worker.rb index 71a668ac4..39171738e 100644 --- a/app/workers/pool_transaction_check_worker.rb +++ b/app/workers/pool_transaction_check_worker.rb @@ -14,6 +14,10 @@ def perform # Only request the CKB Node for reject reason after we find the transaction is rejeceted. CkbTransaction.tx_pending. where(block_timestamp: ..latest_block.timestamp).includes(:cell_dependencies, cell_inputs: :previous_cell_output).find_each do |tx| + if CkbTransaction.tx_committed.exists?(tx_hash: tx.tx_hash) + tx.destroy! + next + end is_rejected = false rejected_transaction = nil # check if any input is used by other transactions From e56a8e1e1ec1acc200f89e2a1abbd752df76d1dc Mon Sep 17 00:00:00 2001 From: zhangnan Date: Mon, 19 Jun 2023 19:24:31 +0800 Subject: [PATCH 6/7] fix: block transactions sort order by id asc --- .../api/v1/block_transactions_controller.rb | 2 +- .../api/v1/block_transactions_controller_test.rb | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/app/controllers/api/v1/block_transactions_controller.rb b/app/controllers/api/v1/block_transactions_controller.rb index 6c1162a9b..2c3ffc2b3 100644 --- a/app/controllers/api/v1/block_transactions_controller.rb +++ b/app/controllers/api/v1/block_transactions_controller.rb @@ -9,8 +9,8 @@ def show temp_transactions = block.ckb_transactions .select(:id, :tx_hash, :block_id, :block_number, :block_timestamp, :is_cellbase, :updated_at) .where(block_timestamp: block.timestamp) + .order(:id) temp_transactions = temp_transactions.where(tx_hash: params[:tx_hash]) if params[:tx_hash].present? - temp_transactions = temp_transactions.order(id: :desc) @pagy, ckb_transactions = pagy( temp_transactions, diff --git a/test/controllers/api/v1/block_transactions_controller_test.rb b/test/controllers/api/v1/block_transactions_controller_test.rb index f06d92399..1e7c24fd9 100644 --- a/test/controllers/api/v1/block_transactions_controller_test.rb +++ b/test/controllers/api/v1/block_transactions_controller_test.rb @@ -80,7 +80,7 @@ class BlockTransactionsControllerTest < ActionDispatch::IntegrationTest valid_get api_v1_block_transaction_url(block.block_hash) - ckb_transactions = block.ckb_transactions.order('id desc').page(page).per(page_size) + ckb_transactions = block.ckb_transactions.order(:id).page(page).per(page_size) records_counter = RecordCounters::BlockTransactions.new(block) options = FastJsonapi::PaginationMetaGenerator.new(request: request, records: ckb_transactions, page: page, page_size: page_size, records_counter: records_counter).call @@ -157,7 +157,7 @@ class BlockTransactionsControllerTest < ActionDispatch::IntegrationTest page = 2 page_size = 10 block = create(:block, :with_ckb_transactions, transactions_count: 30) - block_ckb_transactions = block.ckb_transactions.order('id desc').page(page).per(page_size) + block_ckb_transactions = block.ckb_transactions.order(:id).page(page).per(page_size) valid_get api_v1_block_transaction_url(block.block_hash), params: { page: page } @@ -173,7 +173,7 @@ class BlockTransactionsControllerTest < ActionDispatch::IntegrationTest page = 1 page_size = 12 block = create(:block, :with_ckb_transactions, transactions_count: 15) - block_ckb_transactions = block.ckb_transactions.order('id desc').page(page).per(page_size) + block_ckb_transactions = block.ckb_transactions.order(:id).page(page).per(page_size) valid_get api_v1_block_transaction_url(block.block_hash), params: { page_size: page_size } @@ -190,7 +190,7 @@ class BlockTransactionsControllerTest < ActionDispatch::IntegrationTest page = 2 page_size = 5 block = create(:block, :with_ckb_transactions, transactions_count: 30) - block_ckb_transactions = block.ckb_transactions.order('id desc').page(page).per(page_size) + block_ckb_transactions = block.ckb_transactions.order(:id).page(page).per(page_size) valid_get api_v1_block_transaction_url(block.block_hash), params: { page: page, page_size: page_size } @@ -205,7 +205,7 @@ class BlockTransactionsControllerTest < ActionDispatch::IntegrationTest page = 2 page_size = 5 block = create(:block) - block_ckb_transactions = block.ckb_transactions.order('id desc').page(page).per(page_size) + block_ckb_transactions = block.ckb_transactions.order(:id).page(page).per(page_size) valid_get api_v1_block_transaction_url(block.block_hash), params: { page: page, page_size: page_size } From 0558b78aeef5db34e5cb23fad9db6365e0cdc61d Mon Sep 17 00:00:00 2001 From: Chen Yu Date: Mon, 3 Jul 2023 16:37:44 +0800 Subject: [PATCH 7/7] Update app/models/cell_output.rb --- app/models/cell_output.rb | 4 ---- 1 file changed, 4 deletions(-) diff --git a/app/models/cell_output.rb b/app/models/cell_output.rb index 728b5d397..8a3d79bf8 100644 --- a/app/models/cell_output.rb +++ b/app/models/cell_output.rb @@ -120,10 +120,6 @@ def dao self[:dao] || block.dao end - def dao - self[:dao] || block.dao - end - # find cell output according to the out point( tx_hash and output index ) # @param [String] tx_hash # @param [Integer] index