8000 Add indexes for groupless address columns by tdroxler · Pull Request #617 · alephium/explorer-backend · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

Add indexes for groupless address columns #617

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
May 22, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,11 @@ object DBInitializer extends StrictLogging {
): Future[Unit] = {
logger.info("Create Background Indexes")
run(for {
_ <- OutputSchema.createNonSpentOutputCoveringIndex()
_ <- InputSchema.createOutupRefAddressMainChainTimestampIndex()
_ <- OutputSchema.createConcurrentIndexes()
_ <- InputSchema.createConcurrentIndexes()
_ <- TokenOutputSchema.createConcurrentIndexes()
_ <- TransactionPerAddressSchema.createConcurrentIndexes()
_ <- TokenPerAddressSchema.createConcurrentIndexes()
} yield {
logger.info("Background Indexes created")
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ import org.alephium.util.TimeStamp
@SuppressWarnings(Array("org.wartremover.warts.AnyVal"))
object Migrations extends StrictLogging {

val latestVersion: MigrationVersion = MigrationVersion(6)
val latestVersion: MigrationVersion = MigrationVersion(7)

def migration1(implicit ec: ExecutionContext): DBActionAll[Unit] = {
// We retrigger the download of fungible and non-fungible tokens' metadata that have sub-category
Expand Down Expand Up @@ -173,6 +173,21 @@ object Migrations extends StrictLogging {
}
}

/*
* Those indexes will be re-created concurrently while including the `groupless_address` column
*/
def migration7(implicit ec: ExecutionContext): DBActionAll[Unit] = {
for {
// Renamed and modified to `non_spent_output_groupless_covering_include_idx`
_ <- sqlu"DROP INDEX IF EXISTS non_spent_output_group_covering_include_idx"
_ <- addAddressLikeColumn("uoutputs", "groupless_address")
_ <- addAddressLikeColumn("uinputs", "groupless_address")
} yield {
()
}

}

private def migrations(implicit
explorerConfig: ExplorerConfig,
ec: ExecutionContext
Expand All @@ -182,7 +197,8 @@ object Migrations extends StrictLogging {
migration3,
migration4,
migration5,
migration6
migration6,
migration7
)

def backgroundCoinbaseMigration()(implicit
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.alephium.explorer.persistence.model
import scala.collection.immutable.ArraySeq

import org.alephium.explorer.api.model.{AssetOutput, ContractOutput, MempoolTransaction}
import org.alephium.explorer.util.AddressUtil
import org.alephium.protocol.model.{GroupIndex, TransactionId}
import org.alephium.util.{TimeStamp, U256}

Expand Down Expand Up @@ -51,6 +52,7 @@ object MempoolTransactionEntity {
input.outputRef.key,
input.unlockScript,
input.address,
input.address.flatMap(AddressUtil.convertToGrouplessAddress),
order
)
},
Expand All @@ -69,6 +71,7 @@ object MempoolTransactionEntity {
output.key,
output.attoAlphAmount,
output.address,
AddressUtil.convertToGrouplessAddress(output.address),
output.tokens,
lockTime,
message,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import akka.util.ByteString

import org.alephium.explorer.api.model.{Input, OutputRef}
import org.alephium.protocol.Hash
import org.alephium.protocol.model.Address
import org.alephium.protocol.model.{Address, AddressLike}
import org.alephium.protocol.model.TransactionId

final case class UInputEntity(
Expand All @@ -29,6 +29,7 @@ final case class UInputEntity(
outputRefKey: Hash,
unlockScript: Option[ByteString],
address: Option[Address],
grouplessAddress: Option[AddressLike],
uinputOrder: Int
) {
val toApi: Input = Input(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import akka.util.ByteString

import org.alephium.explorer.api.model.{AssetOutput, Token}
import org.alephium.protocol.Hash
import org.alephium.protocol.model.{Address, TransactionId}
import org.alephium.protocol.model.{Address, AddressLike, TransactionId}
import org.alephium.util.{TimeStamp, U256}

final case class UOutputEntity(
Expand All @@ -31,6 +31,7 @@ final case class UOutputEntity(
key: Hash,
amount: U256,
address: Address,
grouplessAddress: Option[AddressLike],
tokens: Option[ArraySeq[Token]],
lockTime: Option[TimeStamp],
message: Option[ByteString],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ object MempoolQueries {
key,
amount,
address,
groupless_address,
tokens,
lock_time,
message,
Expand Down Expand Up @@ -145,6 +146,7 @@ object MempoolQueries {
output_ref_key,
unlock_script,
address,
groupless_address,
uinput_order
FROM uinputs
WHERE tx_hash IN $params
Expand Down Expand Up @@ -185,6 +187,7 @@ object MempoolQueries {
key,
amount,
address,
groupless_address,
tokens,
lock_time,
message,
Expand All @@ -202,6 +205,7 @@ object MempoolQueries {
output_ref_key,
unlock_script,
address,
groupless_address,
uinput_order
FROM uinputs
WHERE tx_hash = $hash
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,8 @@ object TransactionQueries extends StrictLogging {
FROM transaction_per_addresses
WHERE main_chain = true
AND #${addressColumn(address)} = $address
ORDER BY block_timestamp DESC, tx_order
"""
.concat(sql"""ORDER BY block_timestamp DESC, tx_order """)
.paginate(pagination)
.asAS[TxByAddressQR]
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,7 @@ object CustomGetResult {
outputRefKey = result.<<,
unlockScript = result.<<?,
address = result.<<?,
grouplessAddress = result.<<?,
uinputOrder = result.<<
)

Expand All @@ -324,6 +325,7 @@ object CustomGetResult {
key = result.<<,
amount = result.<<,
address = result.<<,
grouplessAddress = result.<<?,
tokens = result.<<?,
lockTime = result.<<?,
message = result.<<?,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package org.alephium.explorer.persistence.schema

import scala.collection.immutable.ArraySeq
import scala.concurrent.ExecutionContext

import akka.util.ByteString
import slick.jdbc.PostgresProfile.api._
Expand Down Expand Up @@ -90,9 +91,23 @@ object InputSchema extends SchemaMainChain[InputEntity]("inputs") {
ON #${name} (output_ref_tx_hash, output_ref_address, output_ref_amount, output_ref_tokens)
WHERE output_ref_amount IS NULL"""

def createConcurrentIndexes()(implicit ec: ExecutionContext): DBActionW[Unit] =
for {
_ <- createOutupRefAddressMainChainTimestampIndex()
_ <- createOutupRefGrouplessAddressMainChainTimestampIndex()
} yield ()

def createOutupRefAddressMainChainTimestampIndex(): DBActionW[Int] =
sqlu"""
CREATE INDEX CONCURRENTLY IF NOT EXISTS inputs_ref_address_main_chain_timestamp_idx ON inputs (output_ref_address, main_chain, block_timestamp);
CREATE INDEX CONCURRENTLY IF NOT EXISTS inputs_ref_address_main_chain_timestamp_idx
ON inputs (output_ref_address, main_chain, block_timestamp);
"""

def createOutupRefGrouplessAddressMainChainTimestampIndex(): DBActionW[Int] =
sqlu"""
CREATE INDEX CONCURRENTLY IF NOT EXISTS inputs_ref_groupless_address_main_chain_timestamp_idx
ON inputs (output_ref_groupless_address, main_chain, block_timestamp)
WHERE output_ref_groupless_address IS NOT NULL;
"""

val table: TableQuery[Inputs] = TableQuery[Inputs]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,26 +94,36 @@ object OutputSchema extends SchemaMainChain[OutputEntity]("outputs") {
def createNonSpentIndex(): DBActionW[Int] =
sqlu"create unique index if not exists non_spent_output_idx on #${name} (address, main_chain, key, block_hash) where spent_finalized IS NULL;"

def createNonSpentOutputCoveringIndex()(implicit ec: ExecutionContext): DBActionW[Unit] =
def createConcurrentIndexes()(implicit ec: ExecutionContext): DBActionW[Unit] =
for {
_ <- createBaseNonSpentOutputCoveringIndex()
_ <- createNonSpentOutputGroupCoveringIndex()
_ <- createNonSpentGrouplessIndex()
_ <- createSpentOutputCoveringIndex()
_ <- createNonSpentOutputGrouplessCoveringIndex()
} yield ()

def createBaseNonSpentOutputCoveringIndex(): DBActionW[Int] =
def createNonSpentGrouplessIndex(): DBActionW[Int] =
sqlu"""
CREATE UNIQUE INDEX CONCURRENTLY IF NOT EXISTS non_spent_output_groupless_idx
ON #${name} (groupless_address, main_chain, key, block_hash)
WHERE spent_finalized IS NULL
AND groupless_ad CEB7 dress IS NOT NULL;
"""

def createSpentOutputCoveringIndex(): DBActionW[Int] =
sqlu"""
CREATE INDEX CONCURRENTLY IF NOT EXISTS non_spent_output_covering_include_idx
ON #${name} (address, main_chain, spent_finalized, key)
INCLUDE (amount, lock_time)
WHERE spent_finalized IS NULL AND main_chain = true;
"""

def createNonSpentOutputGroupCoveringIndex(): DBActionW[Int] =
def createNonSpentOutputGrouplessCoveringIndex(): DBActionW[Int] =
sqlu"""
CREATE INDEX CONCURRENTLY IF NOT EXISTS non_spent_output_group_covering_include_idx
ON outputs (address, groupless_address, main_chain, spent_finalized, key)
CREATE INDEX CONCURRENTLY IF NOT EXISTS non_spent_output_groupless_covering_include_idx
ON outputs (groupless_address, main_chain, spent_finalized, key)
INCLUDE (amount, lock_time)
WHERE spent_finalized IS NULL AND groupless_address IS NOT NULL AND main_chain = true;
"""

val table: TableQuery[Outputs] = TableQuery[Outputs]
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@

package org.alephium.explorer.persistence.schema

import scala.concurrent.ExecutionContext

import akka.util.ByteString
import slick.jdbc.PostgresProfile.api._
import slick.lifted.{Index, PrimaryKey, ProvenShape}
Expand Down Expand Up @@ -84,8 +86,26 @@ object TokenOutputSchema extends SchemaMainChain[TokenOutputEntity]("token_outpu
.<>((TokenOutputEntity.apply _).tupled, TokenOutputEntity.unapply)
}

val createNonSpentIndex: DBActionW[Int] =
sqlu"create unique index if not exists non_spent_output_idx on #${name} (address, main_chain, key, block_hash) where spent_finalized IS NULL;"
def createConcurrentIndexes()(implicit ec: ExecutionContext): DBActionW[Unit] =
for {
_ <- createNonSpentIndex()
_ <- createNonSpentGrouplessIndex()
} yield ()

def createNonSpentIndex(): DBActionW[Int] =
sqlu"""
CREATE UNIQUE INDEX IF NOT EXISTS token_non_spent_output_idx
ON #${name} (token, address, main_chain, key, block_hash)
WHERE spent_finalized IS NULL;
"""

def createNonSpentGrouplessIndex(): DBActionW[Int] =
sqlu"""
CREATE UNIQUE INDEX CONCURRENTLY IF NOT EXISTS token_non_spent_output_groupless_idx
ON #${name} (token, groupless_address, main_chain, key, block_hash)
WHERE spent_finalized IS NULL
AND groupless_address IS NOT NULL;
"""

val table: TableQuery[TokenOutputs] = TableQuery[TokenOutputs]
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,12 @@

package org.alephium.explorer.persistence.schema

import scala.concurrent.ExecutionContext

import slick.jdbc.PostgresProfile.api._
import slick.lifted.{Index, PrimaryKey, ProvenShape}

import org.alephium.explorer.persistence.DBActionW
import org.alephium.explorer.persistence.model.TokenTxPerAddressEntity
import org.alephium.explorer.persistence.schema.CustomJdbcTypes._
import org.alephium.protocol.model.{Address, AddressLike, BlockHash, TokenId, TransactionId}
Expand Down Expand Up @@ -57,5 +60,17 @@ object TokenPerAddressSchema
CommonIndex.timestampIndex(this)
)

def createConcurrentIndexes()(implicit ec: ExecutionContext): DBActionW[Unit] =
for {
_ <- createTokenGrouplessAddressIndex()
} yield ()

def createTokenGrouplessAddressIndex(): DBActionW[Int] =
sqlu"""
CREATE INDEX token_tx_per_address_token_groupless_address_idx
ON token_tx_per_addresses (token, groupless_address)
WHERE groupless_address IS NOT NULL;
"""

val table: TableQuery[TokenPerAddresses] = TableQuery[TokenPerAddresses]
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,12 @@

package org.alephium.explorer.persistence.schema

import scala.concurrent.ExecutionContext

import slick.jdbc.PostgresProfile.api._
import slick.lifted.{Index, PrimaryKey, ProvenShape}

import org.alephium.explorer.persistence.DBActionW
import org.alephium.explorer.persistence.model.TransactionPerAddressEntity
import org.alephium.explorer.persistence.schema.CustomJdbcTypes._
import org.alephium.protocol.model.{Address, AddressLike, BlockHash, TransactionId}
Expand Down Expand Up @@ -58,5 +61,17 @@ object TransactionPerAddressSchema
CommonIndex.timestampIndex(this)
)

def createConcurrentIndexes()(implicit ec: ExecutionContext): DBActionW[Unit] =
for {
_ <- createGrouplessAddressTimestampIndex()
} yield ()

def createGrouplessAddressTimestampIndex(): DBActionW[Int] =
sqlu"""
CREATE INDEX CONCURRENTLY IF NOT EXISTS txs_per_address_groupless_address_timestamp_idx
ON transaction_per_addresses (groupless_address, block_timestamp)
WHERE groupless_address IS NOT NULL;
"""

val table: TableQuery[TransactionPerAddresses] = TableQuery[TransactionPerAddresses]
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import slick.lifted.{Index, PrimaryKey, ProvenShape}
import org.alephium.explorer.persistence.model.UInputEntity
import org.alephium.explorer.persistence.schema.CustomJdbcTypes._
import org.alephium.protocol.Hash
import org.alephium.protocol.model.{Address, TransactionId}
import org.alephium.protocol.model.{Address, AddressLike, TransactionId}

object UInputSchema extends Schema[UInputEntity]("uinputs") {

Expand All @@ -33,15 +33,17 @@ object UInputSchema extends Schema[UInputEntity]("uinputs") {
def outputRefKey: Rep[Hash] = column[Hash]("output_ref_key", O.SqlType("BYTEA"))
def unlockScript: Rep[Option[ByteString]] = column[Option[ByteString]]("unlock_script")
def address: Rep[Option[Address]] = column[Option[Address]]("address")
def uinputOrder: Rep[Int] = column[Int]("uinput_order")
def grouplessAddress: Rep[Option[AddressLike]] =
column[Option[AddressLike]]("groupless_address")
def uinputOrder: Rep[Int] = column[Int]("uinput_order")

def pk: PrimaryKey = primaryKey("uinputs_pk", (outputRefKey, txHash))

def uinputsTxHashIdx: Index = index("uinputs_tx_hash_idx", txHash)
def uinputsP2pkhAddressIdx: Index = index("uinputs_address_idx", address)

def * : ProvenShape[UInputEntity] =
(txHash, hint, outputRefKey, unlockScript, address, uinputOrder)
(txHash, hint, outputRefKey, unlockScript, address, grouplessAddress, uinputOrder)
.<>((UInputEntity.apply _).tupled, UInputEntity.unapply)
}

Expand Down
Loading
0