# 物化仔
## list_login_log 相關 MV
### mv_d_list_login_log
```
DROP TABLE data_center.mv_d_list_login_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_list_login_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`online_count` AggregateFunction(uniq, Nullable(UInt64)),
`new_reg_count` AggregateFunction(uniq, Nullable(UInt64))
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_list_login_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_id,
open_channel_id,
uniqState(if(is_reg = 0, user_id, NULL)) AS online_count,
uniqState(if(is_reg = 1, user_id, NULL)) AS new_reg_count
FROM data_center.list_login_log
GROUP BY
date_time,
channel_id,
open_channel_id
```
### mv_h_list_login_log
```
DROP TABLE data_center.mv_h_list_login_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_list_login_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`online_count` AggregateFunction(uniq, Nullable(UInt64)),
`new_reg_count` AggregateFunction(uniq, Nullable(UInt64))
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_list_login_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_id,
open_channel_id,
uniqState(if(is_reg = 0, user_id, NULL)) AS online_count,
uniqState(if(is_reg = 1, user_id, NULL)) AS new_reg_count
FROM data_center.list_login_log
GROUP BY
date_time,
channel_id,
open_channel_id
```
## list_diamond_log 相關 MV
### mv_d_list_diamond_log
```
DROP TABLE data_center.mv_d_list_diamond_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_list_diamond_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_diamond` AggregateFunction(sum, UInt64),
`total_diamond_use` AggregateFunction(sum, UInt64),
`user_use_count` AggregateFunction(uniq, Nullable(UInt64)),
`user_count` AggregateFunction(uniq, UInt64),
`total_gift_giving` AggregateFunction(sum, UInt64),
`total_item_buy` AggregateFunction(sum, UInt64),
`total_routine_consumption` AggregateFunction(sum, UInt64),
`total_by_live_consumption` AggregateFunction(sum, UInt64),
`total_barrage` AggregateFunction(sum, UInt64),
`total_proceed_vip` AggregateFunction(sum, UInt64),
`total_guardiance_enable` AggregateFunction(sum, UInt64),
`total_proceed_guardaiance` AggregateFunction(sum, UInt64),
`total_buy_vip` AggregateFunction(sum, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_list_diamond_log_1029', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_id,
open_channel_id,
sumState(abs(diamond)) AS total_diamond,
sumState(multiIf(diamond < 0, abs(diamond), 0)) AS total_diamond_use,
uniqState(if(diamond < 0, user_id, null)) AS user_use_count,
uniqState(user_id) AS user_count,
sumState(if((diamond < 0) AND (oper_type = 5), abs(diamond), 0)) AS total_gift_giving,
sumState(if((diamond < 0) AND (oper_type = 7), abs(diamond), 0)) AS total_item_buy,
sumState(if((diamond < 0) AND (oper_type = 12), abs(diamond), 0)) AS total_routine_consumption,
sumState(if((diamond < 0) AND (oper_type = 13), abs(diamond), 0)) AS total_by_live_consumption,
sumState(if((diamond < 0) AND (oper_type = 14), abs(diamond), 0)) AS total_barrage,
sumState(if((diamond < 0) AND (oper_type = 22), abs(diamond), 0)) AS total_proceed_vip,
sumState(if((diamond < 0) AND (oper_type = 25), abs(diamond), 0)) AS total_guardiance_enable,
sumState(if((diamond < 0) AND (oper_type = 21), abs(diamond), 0)) AS total_buy_vip
FROM data_center.list_diamond_log
GROUP BY
date_time,
id,
channel_id,
open_channel_id;
```
### mv_h_list_diamond_log
```
DROP TABLE data_center.mv_h_list_diamond_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_list_diamond_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_diamond` AggregateFunction(sum, UInt64),
`total_diamond_use` AggregateFunction(sum, UInt64),
`user_use_count` AggregateFunction(uniq, Nullable(UInt64)),
`user_count` AggregateFunction(uniq, UInt64),
`total_gift_giving` AggregateFunction(sum, UInt64),
`total_item_buy` AggregateFunction(sum, UInt64),
`total_routine_consumption` AggregateFunction(sum, UInt64),
`total_by_live_consumption` AggregateFunction(sum, UInt64),
`total_barrage` AggregateFunction(sum, UInt64),
`total_proceed_vip` AggregateFunction(sum, UInt64),
`total_guardiance_enable` AggregateFunction(sum, UInt64),
`total_proceed_guardaiance` AggregateFunction(sum, UInt64),
`total_buy_vip` AggregateFunction(sum, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_list_diamond_log_1029', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_id,
open_channel_id,
sumState(abs(diamond)) AS total_diamond,
sumState(multiIf(diamond < 0, abs(diamond), 0)) AS total_diamond_use,
uniqState(if(diamond < 0, user_id, null)) AS user_use_count,
uniqState(user_id) AS user_count,
sumState(if((diamond < 0) AND (oper_type = 5), abs(diamond), 0)) AS total_gift_giving,
sumState(if((diamond < 0) AND (oper_type = 7), abs(diamond), 0)) AS total_item_buy,
sumState(if((diamond < 0) AND (oper_type = 12), abs(diamond), 0)) AS total_routine_consumption,
sumState(if((diamond < 0) AND (oper_type = 13), abs(diamond), 0)) AS total_by_live_consumption,
sumState(if((diamond < 0) AND (oper_type = 14), abs(diamond), 0)) AS total_barrage,
sumState(if((diamond < 0) AND (oper_type = 22), abs(diamond), 0)) AS total_proceed_vip,
sumState(if((diamond < 0) AND (oper_type = 25), abs(diamond), 0)) AS total_guardiance_enable,
sumState(if((diamond < 0) AND (oper_type = 21), abs(diamond), 0)) AS total_buy_vip
FROM data_center.list_diamond_log
GROUP BY
date_time,
id,
channel_id,
open_channel_id;
```
## recharge 相關 MV
### mv_d_recharge
```
DROP TABLE data_center.mv_d_recharge on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_recharge on cluster default
(
`date_time` DateTime,
`open_channel_id` String,
`channel_id` String,
`first_recharge_count` AggregateFunction(uniq, Nullable(UInt64)),
`first_count` AggregateFunction(uniq, Nullable(String)),
`first_money` AggregateFunction(sum, Decimal(38, 20)),
`second_recharge_count` AggregateFunction(uniq, Nullable(UInt64)),
`second_count` AggregateFunction(uniq, Nullable(String)),
`second_money` AggregateFunction(sum, Decimal(38, 20)),
`total_count` AggregateFunction(uniq, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_recharge_1029', '{replica}')
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
uniqState(if(is_first_recharge = 1, user_id, NULL)) AS first_recharge_count,
uniqState(if(is_first_recharge = 1, id, NULL)) AS first_count,
sumState(if(is_first_recharge = 1, money, 0)) AS first_money,
uniqState(if(is_second_recharge = 1, user_id, NULL)) AS second_recharge_count,
uniqState(if(is_second_recharge = 1, id, NULL)) AS second_count,
sumState(if(is_second_recharge = 1, money, 0)) AS second_money,
uniqState(id) AS total_count
FROM data_center.recharge
GROUP BY
date_time,
channel_i,
channel_ii
```
### mv_h_recharge
```
DROP TABLE data_center.mv_h_recharge on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_recharge on cluster default
(
`date_time` DateTime,
`open_channel_id` String,
`channel_id` String,
`first_recharge_count` AggregateFunction(uniq, Nullable(UInt64)),
`first_count` AggregateFunction(uniq, Nullable(String)),
`first_money` AggregateFunction(sum, Decimal(38, 20)),
`second_recharge_count` AggregateFunction(uniq, Nullable(UInt64)),
`second_count` AggregateFunction(uniq, Nullable(String)),
`second_money` AggregateFunction(sum, Decimal(38, 20)),
`total_count` AggregateFunction(uniq, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_recharge_1029', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
uniqState(if(is_first_recharge = 1, user_id, NULL)) AS first_recharge_count,
uniqState(if(is_first_recharge = 1, id, NULL)) AS first_count,
sumState(if(is_first_recharge = 1, money, 0)) AS first_money,
uniqState(if(is_second_recharge = 1, user_id, NULL)) AS second_recharge_count,
uniqState(if(is_second_recharge = 1, id, NULL)) AS second_count,
sumState(if(is_second_recharge = 1, money, 0)) AS second_money,
uniqState(id) AS total_count
FROM data_center.recharge
GROUP BY
date_time,
channel_i,
channel_ii;
```
## list_exchange_log 相關 MV
### mv_d_list_exchange_log
```
DROP TABLE data_center.mv_d_list_exchange_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_list_exchange_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_item_count` AggregateFunction(sum, UInt64),
`total_money` AggregateFunction(sum, Decimal(38, 20)),
`total_count` AggregateFunction(uniq, Nullable(UInt64)),
`user_count` AggregateFunction(uniq, Nullable(UInt64)),
`first_recharge_money` AggregateFunction(sum, Decimal(38, 20)),
`first_recharge_user` AggregateFunction(uniq, Nullable(UInt64)),
`first_recharge_item_count` AggregateFunction(sum, UInt64),
`first_recharge_user_count` AggregateFunction(uniq, Nullable(UInt64))
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_list_exchange_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_id,
open_channel_id,
sumState(if(status = 1, item_count, 0)) AS total_item_count,
sumState(if(status = 1, money, 0)) AS total_money,
uniqState(if(status = 1, id, NULL)) AS total_count,
uniqState(if(status = 1, user_id, NULL)) AS user_count,
sumState(if((is_first_recharge = 1) AND (status = 1), money, 0)) AS first_recharge_money,
uniqState(if((is_first_recharge = 1) AND (status = 1), user_id, NULL)) AS first_recharge_user,
sumState(if((is_first_recharge = 1) AND (status = 1), item_count, 0)) AS first_recharge_item_count,
uniqState(if((is_first_recharge = 1) AND (status = 1), user_id, NULL)) AS first_recharge_user_count
FROM data_center.list_exchange_log
GROUP BY
date_time,
channel_id,
open_channel_id;
```
### mv_h_list_exchange_log
```
DROP TABLE data_center.mv_h_list_exchange_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_list_exchange_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_item_count` AggregateFunction(sum, UInt64),
`total_money` AggregateFunction(sum, Decimal(38, 20)),
`total_count` AggregateFunction(uniq, Nullable(UInt64)),
`user_count` AggregateFunction(uniq, Nullable(UInt64)),
`first_recharge_money` AggregateFunction(sum, Decimal(38, 20)),
`first_recharge_user` AggregateFunction(uniq, Nullable(UInt64)),
`first_recharge_item_count` AggregateFunction(sum, UInt64),
`first_recharge_user_count` AggregateFunction(uniq, Nullable(UInt64))
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_list_exchange_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_id,
open_channel_id,
sumState(if(status = 1, item_count, 0)) AS total_item_count,
sumState(if(status = 1, money, 0)) AS total_money,
uniqState(if(status = 1, id, NULL)) AS total_count,
uniqState(if(status = 1, user_id, NULL)) AS user_count,
sumState(if((is_first_recharge = 1) AND (status = 1), money, 0)) AS first_recharge_money,
uniqState(if((is_first_recharge = 1) AND (status = 1), user_id, NULL)) AS first_recharge_user,
sumState(if((is_first_recharge = 1) AND (status = 1), item_count, 0)) AS first_recharge_item_count,
uniqState(if((is_first_recharge = 1) AND (status = 1), user_id, NULL)) AS first_recharge_user_count
FROM data_center.list_exchange_log
GROUP BY
date_time,
channel_id,
open_channel_id;
```
## caipiao_bet 相關 MV
### mv_d_caipiao_bet
```
DROP TABLE data_center.mv_d_caipiao_bet on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_caipiao_bet on cluster default
(
`date_time` DateTime,
`cp_type` Int64,
`state` Int64,
`voided` String,
`channel_id` String,
`open_channel_id` String,
`total_ticket` AggregateFunction(sum, Decimal(30, 10)),
`user_count` AggregateFunction(uniq, UInt64),
`bet_count` AggregateFunction(count, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_caipiao_bet', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, cp_type, state, voided, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
cp_type,
state,
voided,
channel_id,
open_channel_id,
sumState(amount * multiple) AS total_ticket,
uniqState(distinct user_id) AS user_count,
countState(distinct id) AS bet_count
FROM data_center.caipiao_bet
GROUP BY
date_time,
cp_type,
state,
voided,
channel_id,
open_channel_id;
```
### mv_h_caipiao_bet
```
DROP TABLE data_center.mv_h_caipiao_bet on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_caipiao_bet on cluster default
(
`date_time` DateTime,
`cp_type` Int64,
`state` Int64,
`voided` String,
`channel_id` String,
`open_channel_id` String,
`total_ticket` AggregateFunction(sum, Decimal(30, 10)),
`user_count` AggregateFunction(uniq, UInt64),
`bet_count` AggregateFunction(count, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_caipiao_bet', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, cp_type, state, voided, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
cp_type,
state,
voided,
channel_id,
open_channel_id,
sumState(amount * multiple) AS total_ticket,
uniqState(distinct user_id) AS user_count,
countState(distinct id) AS bet_count
FROM data_center.caipiao_bet
GROUP BY
date_time,
cp_type,
state,
voided,
channel_id,
open_channel_id;
```
## list_game_log 相關 MV
### mv_d_game_data
```
DROP TABLE data_center.mv_d_game_data on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_game_data on cluster default
(
`date_time` DateTime,
`type` String,
`pid` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(uniq, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_game_data', '{replica}')
ORDER BY (date_time, pid, type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
type,
pid,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
uniqState(user_id) AS user_count,
countState(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
type,
pid,
channel_i,
channel_ii;
```
### mv_h_game_data
```
DROP TABLE data_center.mv_h_game_data on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_game_data on cluster default
(
`date_time` DateTime,
`type` String,
`pid` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(uniq, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_game_data', '{replica}')
ORDER BY (date_time, pid, type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
type,
pid,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
uniqState(user_id) AS user_count,
countState(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
type,
pid,
channel_i,
channel_ii
```
### mv_d_game_data_type
```
DROP TABLE data_center.mv_d_game_data_type on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_game_data_type on cluster default
(
`date_time` DateTime,
`type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(uniq, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_game_data_type', '{replica}')
ORDER BY (date_time, type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
uniqState(user_id) AS user_count,
countState(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
type,
channel_i,
channel_ii;
```
### mv_h_game_data_type
```
DROP TABLE data_center.mv_h_game_data_type on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_game_data_type on cluster default
(
`date_time` DateTime,
`type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(uniq, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_game_data_type', '{replica}')
ORDER BY (date_time, type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
uniqState(user_id) AS user_count,
countState(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
type,
channel_i,
channel_ii;
```
### mv_d_game_data_date
*用時間分類的物化(by day)*
```
DROP TABLE data_center.mv_d_game_data_date on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_game_data_date on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(uniq, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_game_data_date', '{replica}')
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
uniqState(user_id) AS user_count,
countState(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
channel_i,
channel_ii
```
### mv_h_game_data_date
*用時間分類的物化(by hour)*
```
DROP TABLE data_center.mv_h_game_data_date on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_game_data_date on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(uniq, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_game_data_date', '{replica}')
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
uniqState(user_id) AS user_count,
countState(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
channel_i,
channel_ii
```
### mv_d_list_game_log
```
DROP TABLE data_center.mv_d_list_game_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_list_game_log on cluster default
(
`date_time` DateTime,
`open_channel_id` String,
`channel_id` String,
`activity_user_count` AggregateFunction(uniq, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_list_game_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
uniqState(user_id) AS activity_user_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
channel_i,
channel_ii;
```
### mv_h_list_game_log
```
DROP TABLE data_center.mv_h_list_game_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_list_game_log on cluster default
(
`date_time` DateTime,
`open_channel_id` String,
`channel_id` String,
`activity_user_count` AggregateFunction(uniq, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_list_game_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
uniqState(user_id) AS activity_user_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
channel_i,
channel_ii
```
### mv_d_game_data_pid_type
```
DROP TABLE data_center.mv_d_game_data_pid_type on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_game_data_pid_type on cluster default
(
`date_time` DateTime,
`pid` String,
`game_type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(uniq, UInt64),
`ticket_count` AggregateFunction(uniq, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_game_data_pid_type', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, pid,game_type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d 00:00:00')) AS date_time,
pid,
game_type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
uniqState(user_id) AS user_count,
uniqState(id) AS ticket_count
FROM data_center.list_game_log
WHERE (del_time = 0)
GROUP BY
date_time,
pid,
game_type,
channel_i,
channel_ii;
```
### mv_h_game_data_pid_type
```
DROP TABLE data_center.mv_h_game_data_pid_type on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_game_data_pid_type
(
`date_time` DateTime,
`pid` String,
`game_type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(uniq, UInt64),
`ticket_count` AggregateFunction(uniq, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_game_data_pid_type', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, pid,game_type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
pid,
game_type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
uniqState(user_id) AS user_count,
uniqState(id) AS ticket_count
FROM data_center.list_game_log
WHERE (del_time = 0)
GROUP BY
date_time,
pid,
game_type,
channel_i,
channel_ii;
```
### mv_d_jbcp_fenghong
```
DROP TABLE data_center.mv_d_jbcp_fenghong on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_jbcp_fenghong on cluster default
(
`date_time` DateTime,
`game_type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(uniq, UInt64),
`ticket_count` AggregateFunction(uniq, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_jbcp_fenghong', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, game_type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d 00:00:00')) AS date_time,
game_type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
uniqState(user_id) AS user_count,
uniqState(id) AS ticket_count
FROM data_center.list_game_log
WHERE (del_time = 0) AND (pid = 'jbcp')
GROUP BY
date_time,
game_type,
channel_i,
channel_ii
```
### mv_h_jbcp_fenghong
```
DROP TABLE data_center.mv_h_jbcp_fenghong on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_jbcp_fenghong on cluster default
(
`date_time` DateTime,
`game_type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(uniq, UInt64),
`ticket_count` AggregateFunction(uniq, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_jbcp_fenghong', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, game_type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
game_type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(available_bet) AS total_available_bet,
sumState(bet + win) AS total_income,
uniqState(user_id) AS user_count,
uniqState(id) AS ticket_count
FROM data_center.list_game_log
WHERE (del_time = 0) AND (pid = 'jbcp')
GROUP BY
date_time,
game_type,
channel_i,
channel_ii
```
## 相關語法
```
TRUNCATE TABLE mv_d_game_data
TRUNCATE TABLE mv_d_game_data_date
TRUNCATE TABLE mv_d_game_data_pid_type
TRUNCATE TABLE mv_d_game_data_type
TRUNCATE TABLE mv_d_jbcp_fenghong
TRUNCATE TABLE mv_d_list_game_log
TRUNCATE TABLE mv_h_game_data
TRUNCATE TABLE mv_h_game_data_date
TRUNCATE TABLE mv_h_game_data_pid_type
TRUNCATE TABLE mv_h_game_data_type
TRUNCATE TABLE mv_h_jbcp_fenghong
TRUNCATE TABLE mv_h_list_game_log
TRUNCATE TABLE list_game_log
```
## 下一版上線備存(解決 list_game_log 開啟緩慢問題)
*已部屬至data_center_1101, QA尚未驗收->已併入 v1.1
```
https://docs.google.com/document/d/1dQdbf128PVYyxUBvEIJ9r8j4PT1n_egckPqm3riz5FE/edit#
```
## 下下一版上線備存(payment, cash, recharge)
*已部屬至data_center_1101, QA尚未驗收->已併入 v1.1
```
https://docs.google.com/document/d/1hHA1C3u7UASLlvVOvOIXW84Jqg4vr9lgmZoy5GgODHU/edit
```
# 物化仔 v1.1(等下面的data_center_1101 測試完畢後再上, 現在勿上)
## list_login_log 相關 MV
### mv_d_list_login_log
```
DROP TABLE data_center.mv_d_list_login_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_list_login_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`online_count` AggregateFunction(count, UInt64),
`new_reg_count` AggregateFunction(count, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_list_login_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_id,
open_channel_id,
countStateDistinct(if(is_reg = 0, user_id, NULL)) AS online_count,
countStateDistinct(if(is_reg = 1, user_id, NULL)) AS new_reg_count
FROM data_center.list_login_log
GROUP BY
date_time,
channel_id,
open_channel_id
```
### mv_h_list_login_log
```
DROP TABLE data_center.mv_h_list_login_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_list_login_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`online_count` AggregateFunction(count, UInt64),
`new_reg_count` AggregateFunction(count, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_list_login_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_id,
open_channel_id,
countStateDistinct(if(is_reg = 0, user_id, NULL)) AS online_count,
countStateDistinct(if(is_reg = 1, user_id, NULL)) AS new_reg_count
FROM data_center.list_login_log
GROUP BY
date_time,
channel_id,
open_channel_id
```
## list_diamond_log 相關 MV
### mv_d_list_diamond_log
```
DROP TABLE data_center.mv_d_list_diamond_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_list_diamond_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_diamond` AggregateFunction(sum, UInt64),
`total_diamond_use` AggregateFunction(sum, UInt64),
`user_use_count` AggregateFunction(uniq, Nullable(UInt64)),
`user_count` AggregateFunction(uniq, UInt64),
`total_gift_giving` AggregateFunction(sum, UInt64),
`total_item_buy` AggregateFunction(sum, UInt64),
`total_routine_consumption` AggregateFunction(sum, UInt64),
`total_by_live_consumption` AggregateFunction(sum, UInt64),
`total_barrage` AggregateFunction(sum, UInt64),
`total_proceed_vip` AggregateFunction(sum, UInt64),
`total_guardiance_enable` AggregateFunction(sum, UInt64),
`total_proceed_guardaiance` AggregateFunction(sum, UInt64),
`total_buy_vip` AggregateFunction(sum, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_list_diamond_log_1029', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_id,
open_channel_id,
sumState(abs(diamond)) AS total_diamond,
sumState(multiIf(diamond < 0, abs(diamond), 0)) AS total_diamond_use,
uniqState(if(diamond < 0, user_id, null)) AS user_use_count,
uniqState(user_id) AS user_count,
sumState(if((diamond < 0) AND (oper_type = 5), abs(diamond), 0)) AS total_gift_giving,
sumState(if((diamond < 0) AND (oper_type = 7), abs(diamond), 0)) AS total_item_buy,
sumState(if((diamond < 0) AND (oper_type = 12), abs(diamond), 0)) AS total_routine_consumption,
sumState(if((diamond < 0) AND (oper_type = 13), abs(diamond), 0)) AS total_by_live_consumption,
sumState(if((diamond < 0) AND (oper_type = 14), abs(diamond), 0)) AS total_barrage,
sumState(if((diamond < 0) AND (oper_type = 22), abs(diamond), 0)) AS total_proceed_vip,
sumState(if((diamond < 0) AND (oper_type = 25), abs(diamond), 0)) AS total_guardiance_enable,
sumState(if((diamond < 0) AND (oper_type = 21), abs(diamond), 0)) AS total_buy_vip
FROM data_center.list_diamond_log
GROUP BY
date_time,
id,
channel_id,
open_channel_id;
```
### mv_h_list_diamond_log
```
DROP TABLE data_center.mv_h_list_diamond_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_list_diamond_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_diamond` AggregateFunction(sum, UInt64),
`total_diamond_use` AggregateFunction(sum, UInt64),
`user_use_count` AggregateFunction(uniq, Nullable(UInt64)),
`user_count` AggregateFunction(uniq, UInt64),
`total_gift_giving` AggregateFunction(sum, UInt64),
`total_item_buy` AggregateFunction(sum, UInt64),
`total_routine_consumption` AggregateFunction(sum, UInt64),
`total_by_live_consumption` AggregateFunction(sum, UInt64),
`total_barrage` AggregateFunction(sum, UInt64),
`total_proceed_vip` AggregateFunction(sum, UInt64),
`total_guardiance_enable` AggregateFunction(sum, UInt64),
`total_proceed_guardaiance` AggregateFunction(sum, UInt64),
`total_buy_vip` AggregateFunction(sum, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_list_diamond_log_1029', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_id,
open_channel_id,
sumState(abs(diamond)) AS total_diamond,
sumState(multiIf(diamond < 0, abs(diamond), 0)) AS total_diamond_use,
uniqState(if(diamond < 0, user_id, null)) AS user_use_count,
uniqState(user_id) AS user_count,
sumState(if((diamond < 0) AND (oper_type = 5), abs(diamond), 0)) AS total_gift_giving,
sumState(if((diamond < 0) AND (oper_type = 7), abs(diamond), 0)) AS total_item_buy,
sumState(if((diamond < 0) AND (oper_type = 12), abs(diamond), 0)) AS total_routine_consumption,
sumState(if((diamond < 0) AND (oper_type = 13), abs(diamond), 0)) AS total_by_live_consumption,
sumState(if((diamond < 0) AND (oper_type = 14), abs(diamond), 0)) AS total_barrage,
sumState(if((diamond < 0) AND (oper_type = 22), abs(diamond), 0)) AS total_proceed_vip,
sumState(if((diamond < 0) AND (oper_type = 25), abs(diamond), 0)) AS total_guardiance_enable,
sumState(if((diamond < 0) AND (oper_type = 21), abs(diamond), 0)) AS total_buy_vip
FROM data_center.list_diamond_log
GROUP BY
date_time,
id,
channel_id,
open_channel_id;
```
## recharge 相關 MV
### mv_d_recharge
```
DROP TABLE data_center.mv_d_recharge on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_recharge on cluster default
(
`date_time` DateTime,
`open_channel_id` String,
`channel_id` String,
`first_recharge_count` AggregateFunction(uniq, Nullable(UInt64)),
`first_count` AggregateFunction(uniq, Nullable(String)),
`first_money` AggregateFunction(sum, Decimal(38, 20)),
`second_recharge_count` AggregateFunction(uniq, Nullable(UInt64)),
`second_count` AggregateFunction(uniq, Nullable(String)),
`second_money` AggregateFunction(sum, Decimal(38, 20)),
`total_count` AggregateFunction(uniq, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_recharge_1029', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
uniqState(if(is_first_recharge = 1, user_id, NULL)) AS first_recharge_count,
uniqState(if(is_first_recharge = 1, id, NULL)) AS first_count,
sumState(if(is_first_recharge = 1, money, 0)) AS first_money,
uniqState(if(is_second_recharge = 1, user_id, NULL)) AS second_recharge_count,
uniqState(if(is_second_recharge = 1, id, NULL)) AS second_count,
sumState(if(is_second_recharge = 1, money, 0)) AS second_money,
uniqState(id) AS total_count
FROM data_center.recharge
GROUP BY
date_time,
channel_i,
channel_ii
```
### mv_h_recharge
```
DROP TABLE data_center.mv_h_recharge on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_recharge on cluster default
(
`date_time` DateTime,
`open_channel_id` String,
`channel_id` String,
`first_recharge_count` AggregateFunction(uniq, Nullable(UInt64)),
`first_count` AggregateFunction(uniq, Nullable(String)),
`first_money` AggregateFunction(sum, Decimal(38, 20)),
`second_recharge_count` AggregateFunction(uniq, Nullable(UInt64)),
`second_count` AggregateFunction(uniq, Nullable(String)),
`second_money` AggregateFunction(sum, Decimal(38, 20)),
`total_count` AggregateFunction(uniq, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_recharge_1029', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
uniqState(if(is_first_recharge = 1, user_id, NULL)) AS first_recharge_count,
uniqState(if(is_first_recharge = 1, id, NULL)) AS first_count,
sumState(if(is_first_recharge = 1, money, 0)) AS first_money,
uniqState(if(is_second_recharge = 1, user_id, NULL)) AS second_recharge_count,
uniqState(if(is_second_recharge = 1, id, NULL)) AS second_count,
sumState(if(is_second_recharge = 1, money, 0)) AS second_money,
uniqState(id) AS total_count
FROM data_center.recharge
GROUP BY
date_time,
channel_i,
channel_ii;
```
## payment 相關 MV
### mv_h_payment
```
DROP TABLE data_center.mv_h_payment on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_payment on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_money` AggregateFunction(sum, Decimal(38, 20)),
`total_count` AggregateFunction(count, UInt8),
`user_count` AggregateFunction(count, UInt64),
`total_money_kedan` AggregateFunction(sum, Decimal(38, 20)),
`total_count_kedan` AggregateFunction(count, Nullable(UInt8)),
`user_count_kedan` AggregateFunction(uniq, Nullable(UInt64))
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_payment', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(pay_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_id,
open_channel_id,
sumState(if((oper_type = 1) OR (oper_type = 23) OR (oper_type = 25), money, 0)) AS total_money,
countState(1) AS total_count,
countState(distinct user_id) AS user_count,/*沒用到*/
sumState(if((oper_type = 23), money, 0)) AS total_money_kedan,
countState(if((oper_type = 23), 1, NULL)) AS total_count_kedan,
uniqState(if((oper_type = 23), user_id, NULL)) AS user_count_kedan
FROM data_center.payment
where is_pay =1
GROUP BY
date_time,
channel_id,
open_channel_id;
```
### mv_d_payment
```
DROP TABLE data_center.mv_h_payment on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_payment on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_money` AggregateFunction(sum, Decimal(38, 20)),
`total_count` AggregateFunction(count, UInt8),
`user_count` AggregateFunction(count, UInt64),
`total_money_kedan` AggregateFunction(sum, Decimal(38, 20)),
`total_count_kedan` AggregateFunction(count, Nullable(UInt8)),
`user_count_kedan` AggregateFunction(uniq, Nullable(UInt64))
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_payment', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(pay_time, 'Asia/Shanghai'), '%Y-%m-%d 00:00:00')) AS date_time,
channel_id,
open_channel_id,
sumState(if((oper_type = 1) OR (oper_type = 23) OR (oper_type = 25), money, 0)) AS total_money,
countState(1) AS total_count,
countState(distinct user_id) AS user_count,/*沒用到*/
sumState(if((oper_type = 23), money, 0)) AS total_money_kedan,
countState(if((oper_type = 23), 1, NULL)) AS total_count_kedan,
uniqState(if((oper_type = 23), user_id, NULL)) AS user_count_kedan
FROM data_center.payment
where is_pay =1
GROUP BY
date_time,
channel_id,
open_channel_id;
```
### mv_h_cash
```
DROP TABLE data_center.mv_h_payment on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_cash on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_money` AggregateFunction(sum, Decimal(30, 20)),
`total_count` AggregateFunction(count, UInt8),
`user_count` AggregateFunction(count, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_cash', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(op_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(money) AS total_money,
countState(1) AS total_count,
countStateDistinct(user_id) AS user_count
FROM data_center.cash
WHERE (status = 1) AND (send_status = 1)
GROUP BY
date_time,
channel_id,
open_channel_id;
```
### mv_d_cash
```
DROP TABLE data_center.mv_d_cash on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_cash on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_money` AggregateFunction(sum, Decimal(30, 20)),
`total_count` AggregateFunction(count, UInt8),
`user_count` AggregateFunction(count, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_cash', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(op_time, 'Asia/Shanghai'), '%Y-%m-%d 00:00:00')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(money) AS total_money,
countState(1) AS total_count,
countStateDistinct(user_id) AS user_count
FROM data_center.cash
WHERE (status = 1) AND (send_status = 1)
GROUP BY
date_time,
channel_id,
open_channel_id;
```
## list_exchange_log 相關 MV
### mv_d_list_exchange_log
```
DROP TABLE data_center.mv_d_list_exchange_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_list_exchange_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_item_count` AggregateFunction(sum, UInt64),
`total_money` AggregateFunction(sum, Decimal(38, 20)),
`total_count` AggregateFunction(uniq, Nullable(UInt64)),
`user_count` AggregateFunction(uniq, Nullable(UInt64)),
`first_recharge_money` AggregateFunction(sum, Decimal(38, 20)),
`first_recharge_user` AggregateFunction(uniq, Nullable(UInt64)),
`first_recharge_item_count` AggregateFunction(sum, UInt64),
`first_recharge_user_count` AggregateFunction(uniq, Nullable(UInt64))
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_list_exchange_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_id,
open_channel_id,
sumState(if(status = 1, item_count, 0)) AS total_item_count,
sumState(if(status = 1, money, 0)) AS total_money,
uniqState(if(status = 1, id, NULL)) AS total_count,
uniqState(if(status = 1, user_id, NULL)) AS user_count,
sumState(if((is_first_recharge = 1) AND (status = 1), money, 0)) AS first_recharge_money,
uniqState(if((is_first_recharge = 1) AND (status = 1), user_id, NULL)) AS first_recharge_user,
sumState(if((is_first_recharge = 1) AND (status = 1), item_count, 0)) AS first_recharge_item_count,
uniqState(if((is_first_recharge = 1) AND (status = 1), user_id, NULL)) AS first_recharge_user_count
FROM data_center.list_exchange_log
GROUP BY
date_time,
channel_id,
open_channel_id;
```
### mv_h_list_exchange_log
```
DROP TABLE data_center.mv_h_list_exchange_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_list_exchange_log on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_item_count` AggregateFunction(sum, UInt64),
`total_money` AggregateFunction(sum, Decimal(38, 20)),
`total_count` AggregateFunction(uniq, Nullable(UInt64)),
`user_count` AggregateFunction(uniq, Nullable(UInt64)),
`first_recharge_money` AggregateFunction(sum, Decimal(38, 20)),
`first_recharge_user` AggregateFunction(uniq, Nullable(UInt64)),
`first_recharge_item_count` AggregateFunction(sum, UInt64),
`first_recharge_user_count` AggregateFunction(uniq, Nullable(UInt64))
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_list_exchange_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_id,
open_channel_id,
sumState(if(status = 1, item_count, 0)) AS total_item_count,
sumState(if(status = 1, money, 0)) AS total_money,
uniqState(if(status = 1, id, NULL)) AS total_count,
uniqState(if(status = 1, user_id, NULL)) AS user_count,
sumState(if((is_first_recharge = 1) AND (status = 1), money, 0)) AS first_recharge_money,
uniqState(if((is_first_recharge = 1) AND (status = 1), user_id, NULL)) AS first_recharge_user,
sumState(if((is_first_recharge = 1) AND (status = 1), item_count, 0)) AS first_recharge_item_count,
uniqState(if((is_first_recharge = 1) AND (status = 1), user_id, NULL)) AS first_recharge_user_count
FROM data_center.list_exchange_log
GROUP BY
date_time,
channel_id,
open_channel_id;
```
## caipiao_bet 相關 MV
### mv_d_caipiao_bet
```
DROP TABLE data_center.mv_d_caipiao_bet on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_caipiao_bet on cluster default
(
`date_time` DateTime,
`cp_type` Int64,
`state` Int64,
`voided` String,
`channel_id` String,
`open_channel_id` String,
`total_ticket` AggregateFunction(sum, Decimal(30, 10)),
`user_count` AggregateFunction(uniq, UInt64),
`bet_count` AggregateFunction(count, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_caipiao_bet', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, cp_type, state, voided, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
cp_type,
state,
voided,
channel_id,
open_channel_id,
sumState(amount * multiple) AS total_ticket,
uniqState(distinct user_id) AS user_count,
countState(distinct id) AS bet_count
FROM data_center.caipiao_bet
GROUP BY
date_time,
cp_type,
state,
voided,
channel_id,
open_channel_id;
```
### mv_h_caipiao_bet
```
DROP TABLE data_center.mv_h_caipiao_bet on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_caipiao_bet on cluster default
(
`date_time` DateTime,
`cp_type` Int64,
`state` Int64,
`voided` String,
`channel_id` String,
`open_channel_id` String,
`total_ticket` AggregateFunction(sum, Decimal(30, 10)),
`user_count` AggregateFunction(uniq, UInt64),
`bet_count` AggregateFunction(count, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_caipiao_bet', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, cp_type, state, voided, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(create_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
cp_type,
state,
voided,
channel_id,
open_channel_id,
sumState(amount * multiple) AS total_ticket,
uniqState(distinct user_id) AS user_count,
countState(distinct id) AS bet_count
FROM data_center.caipiao_bet
GROUP BY
date_time,
cp_type,
state,
voided,
channel_id,
open_channel_id;
```
## list_game_log 相關 MV
### mv_d_game_data
```
DROP TABLE data_center.mv_d_game_data on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_game_data on cluster default
(
`date_time` DateTime,
`type` String,
`pid` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_game_data', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, pid, type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
type,
pid,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
type,
pid,
channel_i,
channel_ii
```
### mv_h_game_data
```
DROP TABLE data_center.mv_h_game_data on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_game_data on cluster default
(
`date_time` DateTime,
`type` String,
`pid` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_game_data', '{replica}')
PARTITION BY toYYYYMMDD(toDate(game_time))
ORDER BY (date_time, pid, type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
type,
pid,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
type,
pid,
channel_i,
channel_ii
```
### mv_d_game_data_type
```
DROP TABLE data_center.mv_d_game_data_type on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_game_data_type on cluster default
(
`date_time` DateTime,
`type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_game_data_type', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
type,
channel_i,
channel_ii
```
### mv_h_game_data_type
```
DROP TABLE data_center.mv_h_game_data_type on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_game_data_type on cluster default
(
`date_time` DateTime,
`type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_game_data_type', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
type,
channel_i,
channel_ii;
```
### mv_d_game_data_date
*用時間分類的物化(by day)*
```
DROP TABLE data_center.mv_d_game_data_date on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_game_data_date on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_game_data_date', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
channel_i,
channel_ii
```
### mv_h_game_data_date
*用時間分類的物化(by hour)*
```
DROP TABLE data_center.mv_h_game_data_date on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_game_data_date on cluster default
(
`date_time` DateTime,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_game_data_date', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
channel_i,
channel_ii
```
### mv_d_list_game_log
```
DROP TABLE data_center.mv_d_list_game_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_list_game_log on cluster default
(
`date_time` DateTime,
`open_channel_id` String,
`channel_id` String,
`activity_user_count` AggregateFunction(count, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_list_game_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
countStateDistinct(user_id) AS activity_user_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
channel_i,
channel_ii
```
### mv_h_list_game_log
```
DROP TABLE data_center.mv_h_list_game_log on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_list_game_log on cluster default
(
`date_time` DateTime,
`open_channel_id` String,
`channel_id` String,
`activity_user_count` AggregateFunction(count, UInt64)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_list_game_log', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
channel_i AS channel_id,
channel_ii AS open_channel_id,
countStateDistinct(user_id) AS activity_user_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
channel_i,
channel_i
```
### mv_d_game_data_pid_type
```
DROP TABLE data_center.mv_d_game_data_pid_type on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_game_data_pid_type on cluster default
(
`date_time` DateTime,
`pid` String,
`game_type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_game_data_pid_type', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, pid, game_type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d 00:00:00')) AS date_time,
pid,
game_type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
pid,
game_type,
channel_i,
channel_ii
```
### mv_h_game_data_pid_type
```
DROP TABLE data_center.mv_h_game_data_pid_type on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_game_data_pid_type on cluster default
(
`date_time` DateTime,
`pid` String,
`game_type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_game_data_pid_type', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, pid, game_type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
pid,
game_type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
pid,
game_type,
channel_i,
channel_ii
```
### mv_d_game_data_no_channel
```
DROP TABLE data_center.mv_d_game_data_no_channel on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_game_data_no_channel on cluster default
(
`date_time` DateTime,
`game_type` String,
`pid` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_game_data_no_channel', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, game_type, pid)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
game_type,
pid,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
game_type,
pid
```
### mv_d_game_data_no_open_channel
```
DROP TABLE data_center.mv_d_game_data_no_open_channel on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_game_data_no_open_channel on cluster default
(
`date_time` DateTime,
`game_type` String,
`pid` String,
`channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_game_data_no_open_channel', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, game_type, pid, channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d')) AS date_time,
game_type,
pid,
channel_i AS channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
game_type,
pid,
channel_id
```
### mv_h_game_data_no_channel
```
DROP TABLE data_center.mv_h_game_data_no_channel on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_game_data_no_channel on cluster default
(
`date_time` DateTime,
`game_type` String,
`pid` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_game_data_no_channel', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, game_type, pid)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
game_type,
pid,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
game_type,
pid
```
### mv_h_game_data_no_open_channel
```
DROP TABLE data_center.mv_h_game_data_no_open_channel on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_game_data_no_open_channel on cluster default
(
`date_time` DateTime,
`game_type` String,
`pid` String,
`channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_game_data_no_open_channel', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, game_type, pid, channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
game_type,
pid,
channel_i AS channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
game_type,
pid,
channel_id
```
### mv_m_game_data_no_channel
```
DROP TABLE data_center.mv_m_game_data_no_channel on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_m_game_data_no_channel on cluster default
(
`date_time` DateTime,
`game_type` String,
`pid` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_m_game_data_no_channel', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, game_type, pid)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-01')) AS date_time,
game_type,
pid,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
game_type,
pid
```
### mv_m_game_data_no_open_channel
```
DROP TABLE data_center.mv_m_game_data_no_open_channel on cluster default
```
```
CREATE MATERIALIZED VIEW data_center_.mv_m_game_data_no_open_channel on cluster default
(
`date_time` DateTime,
`game_type` String,
`pid` String,
`channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_m_game_data_no_open_channel', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, game_type, pid, channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-01')) AS date_time,
game_type,
pid,
channel_i AS channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
game_type,
pid,
channel_id
```
### mv_m_game_data_pid_type
```
DROP TABLE data_center.mv_m_game_data_pid_type on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_m_game_data_pid_type on cluster default_xxxxx
(
`date_time` DateTime,
`pid` String,
`game_type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`total_tips` AggregateFunction(sum, Decimal(30, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_m_game_data_pid_type', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, pid, game_type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-01')) AS date_time,
pid,
game_type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
sumState(tip) AS total_tips,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE del_time = 0
GROUP BY
date_time,
pid,
game_type,
channel_i,
channel_ii
```
### mv_d_jbcp_fenghong
```
DROP TABLE data_center.mv_d_jbcp_fenghong on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_d_jbcp_fenghong on cluster default
(
`date_time` DateTime,
`game_type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_d_jbcp_fenghong', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, game_type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDate(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d 00:00:00')) AS date_time,
game_type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(abs(available_bet)) AS total_available_bet,
sumState(bet + win) AS total_income,
countStateDistinct(user_id) AS user_count,
countStateDistinct(trandfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE (del_time = 0) AND (pid = 'jbcp')
GROUP BY
date_time,
game_type,
channel_i,
channel_ii
```
### mv_h_jbcp_fenghong
```
DROP TABLE data_center.mv_h_jbcp_fenghong on cluster default
```
```
CREATE MATERIALIZED VIEW data_center.mv_h_jbcp_fenghong on cluster default
(
`date_time` DateTime,
`game_type` String,
`channel_id` String,
`open_channel_id` String,
`total_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_available_bet` AggregateFunction(sum, Decimal(30, 20)),
`total_income` AggregateFunction(sum, Decimal(38, 20)),
`user_count` AggregateFunction(count, UInt64),
`ticket_count` AggregateFunction(count, String)
)
ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/replicated/{shard}/mv_h_jbcp_fenghong', '{replica}')
PARTITION BY toYYYYMMDD(toDate(date_time))
ORDER BY (date_time, game_type, channel_id, open_channel_id)
SETTINGS index_granularity = 8192 AS
SELECT
toDateTime(formatDateTime(toDateTime(game_time, 'Asia/Shanghai'), '%Y-%m-%d %H:00:00')) AS date_time,
game_type,
channel_i AS channel_id,
channel_ii AS open_channel_id,
sumState(bet) AS total_bet,
sumState(available_bet) AS total_available_bet,
sumState(bet + win) AS total_income,
countStateDistinct(user_id) AS user_count,
countStateDistinct(transfer_id) AS ticket_count
FROM data_center.list_game_log
WHERE (del_time = 0) AND (pid = 'jbcp')
GROUP BY
date_time,
game_type,
channel_i,
channel_ii
```
# 專為data_center_1101 的物化仔
*(hackMD放不下了)*
```
https://hackmd.io/YzXuHLDSREKsN2mTwA_aMQ
```