diff --git a/en/06_pratical_case_lens_protocol_part1/image_01.png b/en/06_pratical_case_lens_protocol/lens_part1/image_01.png similarity index 100% rename from en/06_pratical_case_lens_protocol_part1/image_01.png rename to en/06_pratical_case_lens_protocol/lens_part1/image_01.png diff --git a/en/06_pratical_case_lens_protocol_part1/image_02.png b/en/06_pratical_case_lens_protocol/lens_part1/image_02.png similarity index 100% rename from en/06_pratical_case_lens_protocol_part1/image_02.png rename to en/06_pratical_case_lens_protocol/lens_part1/image_02.png diff --git a/en/06_pratical_case_lens_protocol_part1/image_03.png b/en/06_pratical_case_lens_protocol/lens_part1/image_03.png similarity index 100% rename from en/06_pratical_case_lens_protocol_part1/image_03.png rename to en/06_pratical_case_lens_protocol/lens_part1/image_03.png diff --git a/en/06_pratical_case_lens_protocol_part1/image_04.png b/en/06_pratical_case_lens_protocol/lens_part1/image_04.png similarity index 100% rename from en/06_pratical_case_lens_protocol_part1/image_04.png rename to en/06_pratical_case_lens_protocol/lens_part1/image_04.png diff --git a/en/06_pratical_case_lens_protocol_part1/image_05.png b/en/06_pratical_case_lens_protocol/lens_part1/image_05.png similarity index 100% rename from en/06_pratical_case_lens_protocol_part1/image_05.png rename to en/06_pratical_case_lens_protocol/lens_part1/image_05.png diff --git a/en/06_pratical_case_lens_protocol_part1/image_06.png b/en/06_pratical_case_lens_protocol/lens_part1/image_06.png similarity index 100% rename from en/06_pratical_case_lens_protocol_part1/image_06.png rename to en/06_pratical_case_lens_protocol/lens_part1/image_06.png diff --git a/en/06_pratical_case_lens_protocol_part1/image_07.png b/en/06_pratical_case_lens_protocol/lens_part1/image_07.png similarity index 100% rename from en/06_pratical_case_lens_protocol_part1/image_07.png rename to en/06_pratical_case_lens_protocol/lens_part1/image_07.png diff --git a/en/06_pratical_case_lens_protocol_part1/image_08.png b/en/06_pratical_case_lens_protocol/lens_part1/image_08.png similarity index 100% rename from en/06_pratical_case_lens_protocol_part1/image_08.png rename to en/06_pratical_case_lens_protocol/lens_part1/image_08.png diff --git a/en/06_pratical_case_lens_protocol_part1/lens_part1.md b/en/06_pratical_case_lens_protocol/lens_part1/lens_part1.md similarity index 100% rename from en/06_pratical_case_lens_protocol_part1/lens_part1.md rename to en/06_pratical_case_lens_protocol/lens_part1/lens_part1.md diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_09.png b/en/06_pratical_case_lens_protocol/lens_part2/image_09.png new file mode 100644 index 0000000..f9e0c26 Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_09.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_10.png b/en/06_pratical_case_lens_protocol/lens_part2/image_10.png new file mode 100644 index 0000000..83deeac Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_10.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_11.png b/en/06_pratical_case_lens_protocol/lens_part2/image_11.png new file mode 100644 index 0000000..8721e73 Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_11.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_12.png b/en/06_pratical_case_lens_protocol/lens_part2/image_12.png new file mode 100644 index 0000000..0e8a8a3 Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_12.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_13.png b/en/06_pratical_case_lens_protocol/lens_part2/image_13.png new file mode 100644 index 0000000..0505df4 Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_13.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_14.png b/en/06_pratical_case_lens_protocol/lens_part2/image_14.png new file mode 100644 index 0000000..7f0a5b2 Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_14.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_15.png b/en/06_pratical_case_lens_protocol/lens_part2/image_15.png new file mode 100644 index 0000000..fea551d Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_15.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_16.png b/en/06_pratical_case_lens_protocol/lens_part2/image_16.png new file mode 100644 index 0000000..8255a34 Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_16.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_17.png b/en/06_pratical_case_lens_protocol/lens_part2/image_17.png new file mode 100644 index 0000000..ba027f8 Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_17.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_18.png b/en/06_pratical_case_lens_protocol/lens_part2/image_18.png new file mode 100644 index 0000000..c0f9518 Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_18.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_19.png b/en/06_pratical_case_lens_protocol/lens_part2/image_19.png new file mode 100644 index 0000000..75211f5 Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_19.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_20.png b/en/06_pratical_case_lens_protocol/lens_part2/image_20.png new file mode 100644 index 0000000..f91bb60 Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_20.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/image_21.png b/en/06_pratical_case_lens_protocol/lens_part2/image_21.png new file mode 100644 index 0000000..49e563b Binary files /dev/null and b/en/06_pratical_case_lens_protocol/lens_part2/image_21.png differ diff --git a/en/06_pratical_case_lens_protocol/lens_part2/lens_part2.md b/en/06_pratical_case_lens_protocol/lens_part2/lens_part2.md new file mode 100644 index 0000000..fdba772 --- /dev/null +++ b/en/06_pratical_case_lens_protocol/lens_part2/lens_part2.md @@ -0,0 +1,506 @@ +# Practice case: Making Lens Protocol's data dashboard (II) + +In the first part of this tutorial, we introduced the Lens protocol to you, and made a preliminary dashboard for it, analyzing the total number of transactions and total users, the number of transactions and the number of unique users by day, creators Profile data analysis, Lens domain name analysis, registered domain name search and other related content. Let's go ahead and add new queries and visualizations to this dashboard. We will analyze and add the following content: Create multiple profiles with the same address, follow data, post data, comment data, collection data, mirror data, comprehensive operation of profiles, and comprehensive operation of regular user addresses. + +## Create multiple Profiles with the same address + +The Lens protocol allows multiple Profiles to be created for a single address.We can write a query to count the data distribution of addresses with multiple profiles created.In the following query, we first use CTE `profile_created` to obtain the data details of all created profiles, and then use `multiple_profiles_addresses` to count the number of profiles created for each address. Finally, we use a CASE statement to return aggregated statistics by sorting each address by the number of profiles created. + +```sql +with profile_created as ( + select json_value(vars, 'lax $.to') as user_address, + json_value(vars, 'lax $.handle') as handle_name, + replace(json_value(vars, 'lax $.handle'), '.lens', '') as short_name, + call_block_time, + output_0 as profile_id, + call_tx_hash + from lens_polygon.LensHub_call_createProfile + where call_success = true +), + +multiple_profiles_addresses as ( + select user_address, + count(profile_id) as profile_count + from profile_created + group by 1 + order by 2 desc +) + +select (case when profile_count >= 10 then '10+ Profiles' + when profile_count >= 3 then '5+ Profiles' + when profile_count = 2 then '2 Profiles' + else '1 Profile' + end) as profile_count_type, + count(user_address) as user_address_count, + sum(profile_count) as profile_count +from multiple_profiles_addresses +group by 1 +``` + +When doing this kind of data statistics, we usually also need to get some Counter type statistical values, such as the total number of addresses that have created multiple Profiles, how many Profiles have been created by these addresses, and the proportion of these Profiles in all created Profiles etc. The above CTE subquery code can be shared when querying these data, so we make few changes to it and add two additional CTEs to count the values of these Counter types. Add a visualisation chart for this query and add it to the data dashboard respectively. The display effect is as follows: + +![image_09.png](img/image_09.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1562662](https://dune.com/queries/1562662) +- [https://dune.com/queries/1553030](https://dune.com/queries/1553030) + + +## Profile Posts data analysis + +### Top posting accounts daily analysis + +The creators of Lens have two ways to post (Post). They have been posting directly with their own accounts, and the other is to entrust other accounts or post via API. Post data is stored in the `LensHub_call_post` and `LensHub_call_postWithSig` tables respectively. The content of each topic Post is stored in the field `vars` in the form of a JSON string, including the author's ProfileID, the URL of the post content, and other information. For JSON content in string form, we can use the : operator to access the value. The following query can get some sample data: + +```sql +select call_block_time, + call_tx_hash, + output_0 as post_id, + json_value(vars, 'lax $.profileId') as profile_id, -- Access element in json string + json_value(vars, 'lax $.contentURI') as content_url, + json_value(vars, 'lax $.collectModule') as collection_module, + json_value(vars, 'lax $.referenceModule') as reference_module, + vars +from lens_polygon.LensHub_call_post +where call_success = true +limit 10 +``` + +In view of the large number of Profiles posting, we can make a classification and statistics of Profiles with different posting numbers as in the previous analysis of 'creating multiple Profiles at the same address', and also pay attention to the top users, that is, the data of accounts with the most posts. Here we analyze the accounts with the most posts, and compare the number of posts of these accounts with the total number of posts, and output the Counter chart. The complete SQL is as follows: + +```sql +with post_data as ( + select call_block_time, + call_tx_hash, + output_0 as post_id, + json_value(vars, 'lax $.profileId') as profile_id, -- Access element in json string + json_value(vars, 'lax $.contentURI') as content_url, + json_value(vars, 'lax $.collectModule') as collection_module, + json_value(vars, 'lax $.referenceModule') as reference_module, + from lens_polygon.LensHub_call_post + where call_success = true + + union all + + select call_block_time, + call_tx_hash, + output_0 as post_id, + json_value(vars, 'lax $.profileId') as profile_id, -- Access element in json string + json_value(vars, 'lax $.contentURI') as content_url, + json_value(vars, 'lax $.collectModule') as collection_module, + json_value(vars, 'lax $.referenceModule') as reference_module, + from lens_polygon.LensHub_call_postWithSig + where call_success = true +), + +posts_summary as ( + select count(*) as total_post_count, + count(distinct profile_id) as posted_profile_count + from post_data +), + +top_post_profiles as ( + select profile_id, + count(*) as post_count + from post_data + group by 1 + order by 2 desc + limit 1000 +) + +select profile_id, + post_count, + sum(post_count) over () as top_profile_post_count, + total_post_count, + posted_profile_count, + cast(sum(post_count) over () as double) / total_post_count * 100 as top_profile_posts_ratio +from top_post_profiles +inner join posts_summary on true +order by 2 desc +``` + +Interpretation of the above SQL: Since Post data is stored in two tables separately, in CTE `post_data`, we use `union all` to merge the data taken from the two tables together. We use `posts_summary` to count the number of Profiles and the cumulative number of Posts they have posted. In `top_post_profiles`, we follow the data of the 1000 Profiles with the largest number of posts per Profile. Finally, we query `top_post_profiles` and `posts_summary` in association, and output the account data with the most posts and their comparison with the total post data. After the query results are visualized and added to the data dashboard, the display effect is as follows: + +![image_10.png](img/image_10.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1554541](https://dune.com/queries/1554541) + +### New posts count daily statistics + +The daily number of new posts by Lens users is an important indicator for observing the trend of overall activity. We write a query to count the number of daily posts. The `post_data` CTE in this query is exactly the same as before, so we omit its details in the code below. Because we also want to accumulate the number of posts per day and return the cumulative number of posts, we define `post_daily_summary` CTE as an intermediate step to make the SQL code easy to understand. The corresponding SQL is as follows: + +```sql +with post_data as ( + -- Get post data from LensHub_call_post and LensHub_call_postWithSig tables +), + +post_daily_summary as ( + select date_trunc('day', call_block_time) as block_date, + count(*) post_count, + count(distinct profile_id) as profile_count + from post_data + group by 1 +) + +select block_date, + post_count, + profile_count, + sum(post_count) over (order by block_date) as accumulate_post_count +from post_daily_summary +order by block_date +``` + +The display after visualising the query results and adding them to the data dashboard is shown below: + +[image_11.png](img/image_11.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1555124](https://dune.com/queries/1555124) + + +### Top active Profile statistics by posts count 30 days + +Similarly, we may be interested in the profiles with the most active postings in the most recent period. To this end, we only need to add date filter conditions to filter the posts in the last 30 days in the aforementioned `post_data` CTE, and then summarize statistics by date. The SQL is as follows: + +```sql +with post_data as ( + select call_block_time, + call_tx_hash, + output_0 as post_id, + json_value(vars, 'lax $.profileId') as profile_id, -- Access element in json string + json_value(vars, 'lax $.contentURI') as content_url, + json_value(vars, 'lax $.collectModule') as collection_module, + json_value(vars, 'lax $.referenceModule') as reference_module + from lens_polygon.LensHub_call_post + where call_success = true + and call_block_time >= now() - interval '30' day + + union all + + select call_block_time, + call_tx_hash, + output_0 as post_id, + json_value(vars, 'lax $.profileId') as profile_id, -- Access element in json string + json_value(vars, 'lax $.contentURI') as content_url, + json_value(vars, 'lax $.collectModule') as collection_module, + json_value(vars, 'lax $.referenceModule') as reference_module + from lens_polygon.LensHub_call_postWithSig + where call_success = true + and call_block_time >= now() - interval '30' day +) + +select profile_id, + count(*) as post_count +from post_data +group by 1 +order by 2 desc +limit 100 +``` + +We can add a histogram to display the number of posts of the 100 accounts with the most posts in the past 30 days, and add a Table type chart to output details. The display effect after the relevant chart is added to the data dashboard is as follows: + +![image_12.png](img/image_12.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1559981](https://dune.com/queries/1559981) + + +## Profile Comments data analysis + +### Top Profiles Comments Count analysis + +Lens comment data is similar to post data, which are stored in the `LensHub_call_comment` and `LensHub_call_commentWithSig` tables according to the source of the data. Based on the current functions of the Lens protocol, users must have created their own Profile to comment on other creators' Posts. In the comment data table, it is tracked by the profile ID of the commenter. At the same time, the number of each creator's post is incremented from 1. That is to say, posts from different creators may have the same number. We need to associate the creator's Profile ID with its Publication ID so as to get a unique number. The SQL is as follows: + +```sql +select call_block_time, + call_tx_hash, + output_0 as comment_id, -- 评论编号 + json_value(vars, 'lax $.profileId') as profile_id_from, -- 评论者的Profile ID + json_value(vars, 'lax $.contentURI') as content_url, -- 评论内容链接 + json_value(vars, 'lax $.pubIdPointed') as publication_id_pointed, -- 被评论的Publication ID + json_value(vars, 'lax $.profileIdPointed') as profile_id_pointed, -- 被评论的创作者的Profile ID + json_value(vars, 'lax $.profileIdPointed') || '-' || json_value(vars, 'lax $.pubIdPointed') as unique_publication_id -- 组合生成唯一编号 +from lens_polygon.LensHub_call_comment +where call_success = true +limit 10 +``` + +We also obtain the total comment data by defining an additional CTE, so that the Counter chart can be output in the same query, and the comment data of the 1000 accounts with the most comments and the comment data of all accounts can be compared. After the query results are visualized and added to the data dashboard, the display effect is as follows: + +![image_13.png](img/image_13.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1560028](https://dune.com/queries/1560028) + +### Top Publication Comments statistics + +Each comment is aimed at a specific object (Publication) (here the author thinks it should be Post, please correct me if there is any misunderstanding). It is of certain value to analyze the Publications that have been commented the most. We write a query to count the top 500 most commented Publications and compare it with all comment data. The SQL is as follows: + +```sql +with comment_data as ( + -- get comment data from LensHub_call_comment and LensHub_call_commentWithSig tables +) + +select profile_id_pointed, + publication_id_pointed, + unique_publication_id, + count(*) as comment_count +from comment_data +group by 1, 2, 3 +order by 4 desc +limit 500 +``` + +In the same way, we add an additional CTE to obtain the data of all comments, and compare the data of the top 500 most commented Publications with the global data. Add the corresponding visual chart to the data dashboard, the effect is as follows: + +![image_14.png](img/image_14.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1560578](https://dune.com/queries/1560578) + +## Profile Mirrors data analysis + +Mirroring data is highly similar to comment data, and users must first create their own Profile to mirror other people's Publications. We write two queries respectively to count the top 1000 account data with the most mirroring operations and the top 500 publication data with the most mirroring operations. Compare them with the overall mirror data as well. The effect after adding the data dashboard is shown in the following figure: + +![image_15.png](img/image_15.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1561229](https://dune.com/queries/1561229) +- [https://dune.com/queries/1561558](https://dune.com/queries/1561558) + + +## Profiles Collections data analysis + +Lens collection data is also stored in the two tables `LensHub_call_collect` and `LensHub_call_collectWithSig` respectively. Unlike comments or mirror data, collecting a Publication does not require the collector to have his own Lens Profile. That is to say, any address (user) can bookmark Publications under other Profiles. So we need to track the specific collection operation through the address of the collector. What's special is that the collector's address data is not saved in the `LensHub_call_collect` table, but the `LensHub_call_collectWithSig` table has this data. We need to link the `LensHub_call_collect` table to the transactions table to obtain the user address of the current operation collection. The SQL example is as follows: + +```sql +select call_block_time, + t."from" as collector, + c.profileId as profile_id, + c.pubId as publication_id, + cast(c.profileId as varchar) || '-' || cast(c.pubId as varchar) as unique_publication_id, + c.output_0 as collection_id +from lens_polygon.LensHub_call_collect c +inner join polygon.transactions t on c.call_tx_hash = t.hash -- 关联交易表获取用户地址 +where call_block_time >= date('2022-05-18') -- Lens合约的发布日期,提升查询效率 + and block_time >= date('2022-05-18') + and c.call_success = true +limit 10 +``` + +Since the transaction table records are quite large, the query time consumption will increase significantly. A rule of thumb is to avoid joining operations on raw data tables (transactions, logs, traces) as much as possible. + +The other parts of the collection data analysis SQL are basically the same as the previous example, so I won't repeat them here. Similarly, we also conduct statistical analysis on the most collected Publications. The display effect after adding the relevant visualisation images to the data dashboard is shown in the following figure: + +![image_16.png](img/image_16.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1560847](https://dune.com/queries/1560847) +- [https://dune.com/queries/1561009](https://dune.com/queries/1561009) + + +## Profile Follows data analysis + +### Top Profile by followers counts + +The follow data of the Lens protocol is still stored in two tables, `LensHub_call_follow` and `LensHub_call_followWithSig` respectively. Any address (user) can follow other Profiles. Similar to favorites, the `LensHub_call_follow` table does not save the addresses of followers, so we also need to get the addresses of users currently operating favorites by associating with the `transactions` table. In addition, there is a special feature of following, that is, multiple profiles can be followed in batches at the same time in one transaction. In the `LensHub_call_follow` table, the followed Profile data is stored in the array type field profileIds, which is relatively easy to handle. In the table `LensHub_call_followWithSig`, it is an array value in JSON string format. An example of the field `vars` is as follows (some contents are omitted): + +```json +{"follower":"0xdacc5a4f232406067da52662d62fc75165f21b23","profileIds":[21884,25271,39784],"datas":["0x","0x","0x"],"sig":"..."} +``` + +Using Dune SQL's JSON functions, you can read array values from JSON strings. We can first use `json_extract()` to extract the required element value from the json string, and then use the `cast()` method to convert it into an array of the specified type. The sample code is as follows: + +```sql +select +json_query(vars, 'lax $.follower') AS follower, -- single value +json_query(vars, 'lax $.profileIds') AS profileIds, -- still string +from_hex(cast(json_extract(vars,'$.follower') as varchar)) as follower2, -- cast to varbinary +cast(json_extract(vars,'$.profileIds') as array(integer)) as profileIds2, -- cast to array +vars +from lens_polygon.LensHub_call_followWithSig +where cardinality(output_0) > 1 +limit 10 +``` + +The complete SQL code for reading attention details is as follows: + +```sql +with follow_data as ( + select f.follower, p.profile_id + from ( + select from_hex(cast(json_extract(vars,'$.follower') as varchar)) as follower, -- cast to varbinary + cast(json_extract(vars,'$.profileIds') as array(integer)) as profile_ids -- cast to array + from lens_polygon.LensHub_call_followWithSig + + union all + + select t."from" as follower, + cast(f.profileIds as array(integer)) as profile_ids + from lens_polygon.LensHub_call_follow f + inner join polygon.transactions t on f.call_tx_hash = t.hash + where call_block_time >= date('2022-05-18') -- Lens launch date + and block_time >= date('2022-05-18') + and call_success = true + ) f + cross join unnest(f.profile_ids) as p(profile_id) +) + +select * from follow_data +limit 100 +``` + +It's important to note here that we use the `cross join unnest(f.profile_ids) as p(profile_id)` clause to break up the array in the subquery and get the individual ID values that are broken up. Also, since the element type in the lens_polygon.`LensHub_call_follow table` is `uint256`, which is a Dune custom type that we can't use when extracting values from json strings, we use `cast(f.profileIds as array(integer) )` to convert `uint256` to `integer` type. + +Similarly, on the basis of the above query, we also add the CTE definition to obtain all the concerned data, so that when obtaining the most concerned proile list, we can compare it with the overall number of concerned. After the query results are visualized and added to the data dashboard, the effect is as follows: + +![image_17.png](img/image_17.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1554454](https://dune.com/queries/1554454) + +### Profile distribution by number of follow range + +We see that almost most of the Profiles are followed, and we can use a query to analyze the distribution of the attention of each Profile. The SQL code is as follows: + +```sql +with follow_data as ( + -- Get follow data from table LensHub_call_follow and LensHub_call_followWithSig +), + +profile_follower as ( + select profile_id, + count(follower) as follower_count + from follow_data + group by 1 +) + +select (case when follower_count >= 10000 then '10K+ Followers' + when follower_count >= 1000 then '1K+ Followers' + when follower_count >= 100 then '100+ Followers' + when follower_count >= 50 then '50+ Followers' + when follower_count >= 10 then '10+ Followers' + when follower_count >= 5 then '5+ Followers' + else '1 - 5 Followers' + end) as follower_count_type, + count(profile_id) as profile_count +from profile_follower +group by 1 +``` + +Use a Pie chart to visualize the above query results. After adding it to the data dashboard, the display effect is as shown in the figure below: + +![image_18.png](img/image_18.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1554888](https://dune.com/queries/1554888) + +### The number of daily new followers count statistics + +The number of daily new followers of Lens users is also an important indicator for observing changes in overall activity. We write a query to count the number of posts per day. The `follow_data` CTE in this query is exactly the same as before. The query processing method is also highly similar to the statistics of the number of daily posts mentioned above, so the details will not be detailed here. Add a visualization chart to the query result and add it to the data dashboard, the display effect is as follows: + +![image_19.png](img/image_19.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1555185](https://dune.com/queries/1555185) + +## Comprehensive analysis of profile operations + +Combining the previous content, it can be seen that creators (users with Profile) can post, comment or mirror the data of other creators, while ordinary users (without creating a Profile) can follow Creators and collections of publications. So we can combine data that creators can manipulate for comprehensive analysis. + +We define an `action_data` CTE, and use the method of nested definition CTE to gather related data together. Among them, post_data, comment_data and mirror_data are all exactly the same as the definitions in the previous related queries. We use union all to merge the above data together, and at the same time distribute and specify the corresponding action type to generate a field `action_type` for classification. Then we only need to perform summary statistics according to the classification fields to calculate the number of transactions and the corresponding number of profiles for each operation type. The SQL example is as follows: + +```sql +with action_data as ( + with post_data as ( + -- get post data from relevant tables + ), + + comment_data as ( + -- get comment data from relevant tables + ), + + mirror_data as ( + -- get mirror data from relevant tables + ) + + select 'Post' as action_type, * from post_data + union all + select 'Mirror' as action_type, * from mirror_data + union all + select 'Comment' as action_type, * from comment_data +) + +select action_type, + count(*) as transaction_count, + count(distinct profile_id) as profile_count +from action_data +group by 1 +``` + +In a similar way, we can create a query that summarizes the daily counts of various operations by date. The sample code is as follows: + +``` +with action_data as ( + -- same as above query +) + +select date_trunc('day', call_block_time) as block_date, + action_type, + count(*) as transaction_count +from action_data +group by 1, 2 +order by 1, 2 +``` + +Visualize the above query results and add them to the data dashboard, the display effect is as follows: + +![image_20.png](img/image_20.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1561822](https://dune.com/queries/1561822) +- [https://dune.com/queries/1561898](https://dune.com/queries/1561898) + +## Comprehensive analysis of regular user operations + +Similar to creators, we can combine follow and collection operations that ordinary users can perform for analysis. We also write two queries to count the overall distribution of operations and the number of operations by date. The `action_data data` in the query also comes from the collection query and follow query introduced earlier. The SQL example is as follows: + +```sql +with action_data as ( + with follow_data as ( + -- get follow data from relevant tables + ), + + collect_data as ( + -- get collect data from relevant tables + ) + + select 'Follow' as action_type, * from follow_data + union all + select 'Collect' as action_type, * from collect_data +) +``` + +Except for the different data sources, these two queries are basically the same as the comprehensive analysis of creator operations. Visualize the query results and add them to the data dashboard, the display effect is as follows: + +![image_21.png](img/image_21.png) + +Reference link for the above query on Dune: +- [https://dune.com/queries/1562000](https://dune.com/queries/1562000) +- [https://dune.com/queries/1562178](https://dune.com/queries/1562178) + + +## Summary and Homework + +Very good! We have completed an overall analysis of the Lens protocol. However, due to space issues, there are still many indicators worthy of analysis that we have not yet covered, including but not limited to: analysis of relevant data of the three NFTs, analysis of the creator’s income, analysis of the transfer of Profile accounts, etc. This part is left for you to continue to explore. + +Please continue to improve your own Lens protocol data dashboard based on the content of the tutorial. You can fork the query in this tutorial to modify it, and make any further extensions according to your own understanding. Please practice actively, create data dashboards and share them with the community. We will record the completion and quality of the homework, and then retroactively provide certain rewards for everyone, including but not limited to Dune community identity, peripheral objects, API free quota, POAP, various cooperative data product members, and blockchain data analysis Job opportunity recommendation, priority registration qualification for community offline activities, and other Sixdegree community incentives, etc. + +## Introduction of SixDegreeLab + +SixDegreeLab([@SixdegreeLab](https://twitter.com/sixdegreelab)) is a professional on-chain data team, our mission is to provide users with accurate on-chain data charts, analyses, and insights, and we are committed to popularising on-chain data analysis. Through building communities and writing tutorials, we train on-chain data analysts, output valuable analysis content, promote the community to build the data layer of blockchain, and cultivate talents for the future broad blockchain data application. + +Welcome to [SixDegreeLab's Dune homepage](https://dune.com/sixdegree). + +Due to the limitations of the level, shortcomings are inevitable. If you find any errors, please correct them. \ No newline at end of file diff --git a/en/07_common_query_samples/11_common_queries_part3.md b/en/07_common_query_samples/11_common_queries_part3.md new file mode 100644 index 0000000..3e0b4c5 --- /dev/null +++ b/en/07_common_query_samples/11_common_queries_part3.md @@ -0,0 +1,311 @@ +# Common query part3: custom data, number sequence, array, JSON, etc + +In the first two parts of common queries, we introduce some common query methods such as price query, holder, and holding balance of ERC20 tokens, respectively. In this section, we'll look at some other common queries. + +## Custom data table using CTE + +Dune V2 does not currently support user-custom tables and views. For some data from external sources or a small amount of manually curated data, we can consider using CTE to generate a custom list of data within the query. It can support custom CTE tables with thousands of rows with only a few fields, and that they will execute successfully as long as they do not exceed the maximum size of the Dune query request. There are two ways to customize CTE tables: + +Example of the first syntax: +```sql +with raydium_lp_pairs(account_key, pair_name) as ( + values + ('58oQChx4yWmvKdwLLZzBi4ChoCc2fqCUWBkwMihLYQo2', 'SOL/USDC'), + ('7XawhbbxtsRcQA8KTkHT9f9nc6d69UwqCDh6U5EEbEmX', 'SOL/USDT'), + ('AVs9TA4nWDzfPJE9gGVNJMVhcQy3V9PGazuz33BfG2RA', 'RAY/SOL'), + ('6UmmUiYoBjSrhakAobJw8BvkmJtDVxaeBtbt7rxWo1mg', 'RAY/USDC'), + ('DVa7Qmb5ct9RCpaU7UTpSaf3GVMYz17vNVU67XpdCRut', 'RAY/USDT'), + ('GaqgfieVmnmY4ZsZHHA6L5RSVzCGL3sKx4UgHBaYNy8m', 'RAY/SRMSOL'), + ('6a1CsrpeZubDjEJE9s1CMVheB6HWM5d7m1cj2jkhyXhj', 'STSOL/USDC'), + ('43UHp4TuwQ7BYsaULN1qfpktmg7GWs9GpR8TDb8ovu9c', 'APEX4/USDC') +) + +select * from raydium_lp_pairs +``` + +Example of the second syntax: + +```sql +with token_plan as ( + select token_name, hook_amount from ( + values + ('Token Type','BEP-20 on BNB Chain'), + ('Total Token Supply','500,000,000 HOOK'), + ('Private Sale Allocation','100,000,000 HOOK'), + ('Private Sale Token Price','0.06 USD to 0.12 USD / HOOK'), + ('Private Sale Amount Raised','~ 6,000,000 USD'), + ('Binance Launchpad Sale Allocation','25,000,000 HOOK'), + ('Binance Launchpad Sale Price','0.10 USD / HOOK'), + ('Binance Launchpad Amount to be Raised','2,500,000 USD'), + ('Initial Circ. Supply When Listed on Binance','50,000,000 HOOK (10.00%)') + ) as tbl(token_name, hook_amount) +) + +select * from token_plan +``` + +Of course, with the second syntax, you can omit the CTE definition and use the SELECT query directly if you happen to only need to return this part of the custom data. + +Example link to the above query: +- [https://dune.com/queries/781862](https://dune.com/queries/781862) +- [https://dune.com/queries/1650640](https://dune.com/queries/1650640) + +Due to the limitations mentioned earlier, the execution may not succeed when there are too many rows, and you need to duplicate the same CTE code for every query, which is relatively inconvenient. For large amounts of data, multiple times, long-term use, etc., you should still consider generating the spells table by submitting spellbook PR. + +## Decode data from the logs + +Earlier in calculating the price of ERC20 tokens, we saw an example of calculating the price from logs. Let's look at another example where we need to decode data directly from logs. When the smart contract is not decoded by Dune, or the decode table for the corresponding event is not generated because the ABI data used during decoding is incomplete, we may need to decode the query data directly from the logs. Taking the Lens protocol as an example, we found that in the Lens smart contract source code ([Lens Core](https://github.com/lens-protocol/core)), almost every operation has generated event logs. However, there are only a few event-related tables in Dune's decoded data. Further investigation revealed that the ABI used during decoding was missing the definition of these events. Although we can regenerate or get the Lens team to get the full ABI and submit it to Dune to parse again, the main point here is how to extract data from the undecoded logs. + +In the Lens smart contract source code, we see the `FollowNFTTransferred` event definition, [code] link (https://github.com/lens-protocol/core/blob/main/contracts/libraries/Events.sol#L347). There is also a `Followed` event in the code, but decoding is complicated by the array argument, so we'll use the previous event as an example. From the event name, we can infer that when a user follows a Lens Profile, a FollowNFT will be generated and transferred to the follower's address. So we can find a transaction record of interest, let's look at the logs inside, example transaction:[https://polygonscan.com/tx/0x30311c3eb32300c8e7e173c20a6d9c279c99d19334be8684038757e92545f8cf](https://polygonscan.com/tx/0x30311c3eb32300c8e7e173c20a6d9c279c99d19334be8684038757e92545f8cf)。opening the transaction Logs page in our browser and switch to the "Logs" TAB, we can see that there are four event logs in total. For some events, the blockchain browser can display the original event name. The Lens transaction we're looking at doesn't show the original name, so how do we know which one corresponds to the `FollowNFTTransferred` event log? Here we can use third-party tools to compare by generating the keccak256 hash of the event definition. [Keccak - 256] (https://emn178.github.io/online-tools/keccak_256.html) this page can generate online Keccak - 256 hash value. Let's clean up the definition of the `FollowNFTTransferred` event in the source code to a minified mode (remove parameter names, remove Spaces), Get ` FollowNFTTransferred (uint256 uint256, address, the address, uint256) `, then paste it to Keccak - 256 tool page, The generated hash value for ` 4996ad2257e7db44908136c43128cc10ca988096f67dc6bb0bcee11d151368fb `. (The latest Dune parse table already has the full event table for the Lens project, here is just for example purposes) + +![image_08.png](img/image_08.png) + +Using this hash, we can search Polygonscan's transaction log list to find a match. You can see that the first log entry is exactly what we're looking for. + +![image_09.png](img/image_09.png) + +After finding the corresponding log record, with the event definition, we can easily decode the data: + +```sql +select block_time, + tx_hash, + bytearray_to_uint256(topic1) as profile_id, -- the followed Profile ID + bytearray_to_uint256(topic2) as follower_token_id, -- follower's NFT Token ID + bytearray_ltrim(bytearray_substring(data, 1, 32)) as from_address2, -- address (out) + bytearray_ltrim(bytearray_substring(data, 1 + 32, 32)) as to_address2 -- address (in)(address of the follower) +from polygon.logs +where contract_address = 0xdb46d1dc155634fbc732f92e853b10b288ad5a1d -- Lens contract address + and block_time >= date('2022-05-01') -- The Lens contract is deployed after this date, and this condition is used to improve query speed + and topic0 = 0x4996ad2257e7db44908136c43128cc10ca988096f67dc6bb0bcee11d151368fb -- Event topic FollowNFTTransferred +limit 10 +``` + +Example link to the above query: +- [https://dune.com/queries/1652759](https://dune.com/queries/1652759) +- [Keccak-256 Tool](https://emn178.github.io/online-tools/keccak_256.html) + +## Use sequences of numbers to simplify queries + +When studying NFT projects, we may want to analyze the distribution of prices of all transactions for a given NFT project during a certain time period, i.e., how many transactions were recorded in each price range. We typically set the minimum and maximum transaction prices (either by input or by querying the transaction data and handling outliers), divide the range into N ranges, and count the number of transactions in each range. Here is an example of a query that is simple in logic but cumbersome in comparison: + +```sql +-- nft Position cost distribution +-- 0x306b1ea3ecdf94ab739f1910bbda052ed4a9f949 beanz +-- 0xED5AF388653567Af2F388E6224dC7C4b3241C544 azuki +with contract_transfer as ( + select * + from nft.trades + where nft_contract_address = 0xe361f10965542ee57D39043C9c3972B77841F581 + and tx_to != 0x0000000000000000000000000000000000000000 + and amount_original is not null +), + +transfer_rn as ( + select row_number() over (partition by token_id order by block_time desc) as rn, * + from contract_transfer +), + +latest_transfer as ( + select * from transfer_rn + where rn = 1 +), + +min_max as ( + select (cast({{max_price}} as double) - cast({{min_price}} as double))/20.0 as bin +), + +bucket_trade as (select *, + case + when amount_original between {{min_price}}+0*bin and {{min_price}}+1*bin then 1*bin + when amount_original between {{min_price}}+1*bin and {{min_price}}+2*bin then 2*bin + when amount_original between {{min_price}}+2*bin and {{min_price}}+3*bin then 3*bin + when amount_original between {{min_price}}+3*bin and {{min_price}}+4*bin then 4*bin + when amount_original between {{min_price}}+4*bin and {{min_price}}+5*bin then 5*bin + when amount_original between {{min_price}}+5*bin and {{min_price}}+6*bin then 6*bin + when amount_original between {{min_price}}+6*bin and {{min_price}}+7*bin then 7*bin + when amount_original between {{min_price}}+7*bin and {{min_price}}+8*bin then 8*bin + when amount_original between {{min_price}}+8*bin and {{min_price}}+9*bin then 9*bin + when amount_original between {{min_price}}+9*bin and {{min_price}}+10*bin then 10*bin + when amount_original between {{min_price}}+10*bin and {{min_price}}+11*bin then 11*bin + when amount_original between {{min_price}}+11*bin and {{min_price}}+12*bin then 12*bin + when amount_original between {{min_price}}+12*bin and {{min_price}}+13*bin then 13*bin + when amount_original between {{min_price}}+13*bin and {{min_price}}+14*bin then 14*bin + when amount_original between {{min_price}}+14*bin and {{min_price}}+15*bin then 15*bin + when amount_original between {{min_price}}+15*bin and {{min_price}}+16*bin then 16*bin + when amount_original between {{min_price}}+16*bin and {{min_price}}+17*bin then 17*bin + when amount_original between {{min_price}}+17*bin and {{min_price}}+18*bin then 18*bin + when amount_original between {{min_price}}+18*bin and {{min_price}}+19*bin then 19*bin + when amount_original between {{min_price}}+19*bin and {{min_price}}+20*bin then 20*bin + ELSE 21*bin + end as gap + from latest_transfer,min_max + ) + +select gap, count(*) as num +from bucket_trade +group by gap +order by gap +``` + +In this example, we define two parameters `min_price` and `max_price`, divide their difference equally into 20 price bands, and then use a lengthy CASE statement to count the number of transactions in each band. Imagine if you had to break it up into 50 groups. Is there an easier way? The answer is yes. Look at the code first: + +```sql +with contract_transfer as ( + select * + from nft.trades + where nft_contract_address = 0xe361f10965542ee57D39043C9c3972B77841F581 + and tx_to != 0x0000000000000000000000000000000000000000 + and amount_original is not null +), + +transfer_rn as ( + select row_number() over (partition by token_id order by block_time desc) as rn, * + from contract_transfer +), + +latest_transfer as ( + select * + from transfer_rn + where rn = 1 +), + +min_max as ( + select (cast({{max_price}} as double) - cast({{min_price}} as double))/20.0 as bin +), + +-- Generates a single column table with numbers from 1 to 20 +num_series as ( + select num from unnest(sequence(1, 20)) as tbl(num) +), + +-- Generates the start and end prices of the group price range +bin_gap as ( + select (num - 1) * bin as gap, + (num - 1) * bin as price_lower, + num * bin as price_upper + from num_series + join min_max on true + + union all + + -- Add an additional interval to cover other data + select num * bin as gap, + num * bin as price_lower, + num * 1e4 * bin as price_upper + from num_series + join min_max on true + where num = 20 +), + +bucket_trade as ( + select t.*, + b.gap + from latest_transfer t + join bin_gap b on t.amount_original >= b.price_lower and t.amount_original < b.price_upper + ) + +select gap, count(*) as num +from bucket_trade +group by gap +order by gap +``` + +In CTE `num_series`, we use` unnest(sequence(1, 20)) as tbl(num) `to generate a sequence of numbers from 1 to 20 points and convert it into 20 rows of one number per row. Then in `bin_gap`, we get the low and high price for each interval by joining the two CTEs. Using the `union all` set adds an additional range of high price values large enough to cover other transactions. `bucket_trade` can then be simplified to simply concatenate `bin_gap` and compare prices falling into the corresponding range. The overall logic is simplified and much clearer to understand. + +Example link to the above query: +- [https://dune.com/queries/1054461](https://dune.com/queries/1054461) +- [https://dune.com/queries/1654001](https://dune.com/queries/1654001) + +## Read data from Array and Struct fields + +Some smart contracts emit event logs using array parameters, and the data table generated by Dune after decoding is also stored in arrays. The Solana blockchain's raw transaction tables make heavy use of arrays to store data. Some data is stored in structs, or we need to borrow them when we want to extract the data (see below for an example). Let's look at how to access the data stored in array fields and struct fields. + +```sql +select tokens, deltas, evt_tx_hash +from balancer_v2_arbitrum.Vault_evt_PoolBalanceChanged +where evt_tx_hash = 0x65a4f35d81fd789d93d79f351dc3f8c7ed220ab66cb928d2860329322ffff32c +``` + +The first two fields returned by the preceding query are arrays (shown in the following image): + +![image_10.png](img/image_10.png) + +We can use `cross join unnest(tokens) as tbl1(token)` to split the `tokens` array field into multiple lines: +```sql +select evt_tx_hash, deltas, token -- Returns the split field +from balancer_v2_arbitrum.Vault_evt_PoolBalanceChanged +cross join unnest(tokens) as tbl1(token) -- Split into multiple lines, and name the new field token +where evt_tx_hash = 0x65a4f35d81fd789d93d79f351dc3f8c7ed220ab66cb928d2860329322ffff32c +``` + +We can also split the `deltas` field. But because each `cross join` appends the split value to the original result set of the query, if we perform operations on both fields at the same time, we will have an incorrect result set that looks like a Cartesian product. The following screenshot shows the query code and the resulting output: + +```sql +select evt_tx_hash, token, delta +from balancer_v2_arbitrum.Vault_evt_PoolBalanceChanged +cross join unnest(tokens) as tbl1(token) -- Split into multiple lines, and name the new field token +cross join unnest(deltas) as tbl2(delta) -- Split into multiple lines, and name the new field delta +where evt_tx_hash = 0x65a4f35d81fd789d93d79f351dc3f8c7ed220ab66cb928d2860329322ffff32c +``` + +![image_11.png](img/image_11.png) + +To avoid duplication, it is advisable to split multiple fields simultaneously within the same `unnest()` function, it will return a temporary table with multiple corresponding new fields. + +```sql +select evt_tx_hash, token, delta +from balancer_v2_arbitrum.Vault_evt_PoolBalanceChanged +cross join unnest(tokens, deltas) as tbl(token, delta) -- Split into multiple lines, and name the new field token snd delta +where evt_tx_hash = 0x65a4f35d81fd789d93d79f351dc3f8c7ed220ab66cb928d2860329322ffff32c +``` + +The result is shown in the following figure: + +![image_12.png](img/image_12.png) + +Example link to the above query: +- [https://dune.com/queries/1654079](https://dune.com/queries/1654079) + + +## Read JSON string data + +In some smart contracts, objects containing multiple values are serialized as json strings in the parse table, such as the Lens creation Profile event we saw earlier. We can use `:` to read variables directly from a json string. For example: + +```sql +select json_value(vars, 'lax $.to') as user_address, -- Read a json string of user address + json_value(vars, 'lax $.handle') as handle_name, -- Read a json string of user nicknames + call_block_time, + output_0 as profile_id, + call_tx_hash +from lens_polygon.LensHub_call_createProfile +where call_success = true +limit 100 +``` + +Alternatively, use the `json_query()` or `json_extract()` function to extract the corresponding data. The `json_extract()` function supports type conversion when you need to extract array values from a JSON string. Here are some examples: +```sql +select +json_query(vars, 'lax $.follower') AS follower, -- single value +json_query(vars, 'lax $.profileIds') AS profileIds, -- still string +from_hex(cast(json_extract(vars,'$.follower') as varchar)) as follower2, -- cast to varbinary +cast(json_extract(vars,'$.profileIds') as array(integer)) as profileIds2, -- cast to array +vars +from lens_polygon.LensHub_call_followWithSig +where cardinality(output_0) > 1 +limit 10 +``` + +Example link to the above query: +- [https://dune.com/queries/1562662](https://dune.com/queries/1562662) +- [https://dune.com/queries/941978](https://dune.com/queries/941978) +- [https://dune.com/queries/1554454](https://dune.com/queries/1554454) + +Dune SQL (Trino) For detailed help on JSON functions, check out: https://trino.io/docs/current/functions/json.html + +## SixDegreeLab introduction + +SixDegreeLab([@SixdegreeLab](https://twitter.com/sixdegreelab))is a professional on-chain data team dedicated to providing accurate on-chain data charts, analysis, and insights to users. Our mission is to popularize on-chain data analysis and foster a community of on-chain data analysts. Through community building, tutorial writing, and other initiatives, we aim to cultivate talents who can contribute valuable analytical content and drive the construction of a data layer for the blockchain community, nurturing talents for the future of blockchain data applications. + +Feel free to visit[SixDegreeLab's Dune homepage](https://dune.com/sixdegree). + +Due to our limitations, mistakes may occur. If you come across any errors, kindly point them out, and we appreciate your feedback. diff --git a/en/08_nft_analysis/12_nft_analysis.md b/en/08_nft_analysis/12_nft_analysis.md new file mode 100644 index 0000000..897120a --- /dev/null +++ b/en/08_nft_analysis/12_nft_analysis.md @@ -0,0 +1,338 @@ +# NFT data analysis + +## Background Information + +[NFT(Non-Fungable Token)](https://ethereum.org/zh/nft/) Non-Fungable token is a kind of standard token follow [ERC721] (https://eips.ethereum.org/EIPS/eip-721) . compared to follow ERC20 standard in terms of fungable tokens, Traditionally, the most typical characteristics of NFT are that each token is indivisible, irreplaceable, and unique. The common uses of NFT are: + + +- Digital art/collections +- In-game items +- Domain names +- Tickets or coupons that allow you to attend an event +- Digital identity +- Articles + +For example, in digital art, different NFTS have different styles; For example, in the ENS domain name, each domain name is unique and cannot be repeated. As for tickets, each ticket has a fixed seat, and different seats are also different numbers. + +With the development of NFT, other standard NFT have been derived: + +- ERC-1155: Non-fungible tokens, each token is different, but can do quantity transfer +- SBT: Non-transferable Token +- ERC-3235: Semi-fungible token, each token is different, and supports settlement + +![](assets/ERC-Standard.jpeg) + +## Contract description + +The contracts related to NFT are usually divided into two categories: one is the contract of the project party, and the other is the contract of the third-party exchange platform used to trade NFT. + +### ERC721 contract + +We use ERC721 NFT as an example to show the characteristics of contract, the other can according to demand to understand, we in the market of NFT Opensea to [azuki] (https://opensea.io/collection/azuki) on the NFT, for example, that contract contain what events: + +```solidity +interface ERC721 { + /// @dev This event is fired when the ownership of any NFT changes (either way). + /// Both at creation time (` from `== 0) and destruction time (` to` == 0), except when the contract is created. + event Transfer(address indexed _from, address indexed _to, uint256 indexed _tokenId); + + /// @dev Triggered when the approval address of the NFT is changed or confirmed. + /// A zero address indicates an address without approval + /// When a `Transfer` event occurs, it also means that the approved address (if any) for that NFT is reset to "none" (zero address). + event Approval(address indexed _owner, address indexed _approved, uint256 indexed _tokenId); + + /// @dev Triggered when the owner enables or disables the operator. (Operator can manage NFTs held by owner) + event ApprovalForAll(address indexed _owner, address indexed _operator, bool _approved); + + /// @notice Transfer ownership of NFT from one address to another + /// @dev Throws an exception if `msg.sender` is not the current owner (or approver) + /// Throw an exception if `_from` is not the owner, `_to` is a zero address, and `_tokenId` is not a valid id. + /// When the transfer completes, the function checks if `_to` is a contract, and if so, calls` onERC721Received `of` _to `and checks if the returned value is `0x150b7a02` (That is:`bytes4(keccak256("onERC721Received(address,address,uint256,bytes)"))`) If it doesn't throw an exception. + /// @param _from : current owner + /// @param _to : new owner + /// @param _tokenId : The token id to transfer. + /// @param data : Additional parameters (no format specified) are passed to the receiver. + function safeTransferFrom(address _from, address _to, uint256 _tokenId, bytes data) external payable; + + /// @notice Transfer ownership - The caller is responsible for confirming if `_to` is capable of receiving NFT, otherwise it may be lost permanently. + /// @dev If `msg.sender` is not the current owner (or approver or operator) that is throwing the exception + /// Throw an exception if `_from` is not the owner, `_to` is a zero address, and `_tokenId` is not a valid id. + function transferFrom(address _from, address _to, uint256 _tokenId) external payable; + + /// @notice Change or confirm the approved address of the NFT + /// @dev A zero address indicates an address without approval + /// If `msg.sender` is not the current owner or operator + /// @param _approved The newly approved controller + /// @param _tokenId : token id + function approve(address _approved, uint256 _tokenId) external payable; + + /// @notice Enable or disable a third party (operator) to manage `msg.sender` all assets + /// @dev To trigger the `ApprovalForAll` event, the contract must allow each owner to have multiple operators. + /// @param _operator The address to add to the list of approved operators + /// @param _approved `True` indicates approval and `false` indicates revocation + function setApprovalForAll(address _operator, bool _approved) external; + + ... +} +``` + +For data analysis, the most important function is the Transfer event, which is triggered on every transaction and recorded on the chain. In addition to Transfer, there is also the Mint event, which is usually used to mint a new NFT at the time of sale. Dune's spells table provides ERC721, ERC1155 type Transfer tables such as `erc721_ethereum.evt_Transfer `, `erc1155_ethereum.evt_Transfer`, etc. (different names under different blockchains), We can query NFT transfer events for a contract or an EOA address. + +In the Transfer event, there are three main parameters: the sender address `from`, the receiver address `to` and the number of the NFT `tokenId`. In the case of transaction, both from and to are a normal address. If `mint`, the from address is all 0, and if `burn`, the address of to is all 0. The `nft.mint` and `nft.burn` tables on Dune also decode this event to get the final transaction information. +![](assets/nft-transfer-etherscan.png) + +### marketplace contracts + +Some common marketplace contracts are Opensea, X2Y2, Blur, etc. Let's take Opensea Seaport1.1 contract as an example. All functions related to a transaction will trigger the OrderFulfilled event to record the data on the chain. Dune's nft.trades parses this event to get the final trades. The seaport contract writable functions are as follows: + +![](assets/seaport1.1.png) + +```solidity +uint256 constant receivedItemsHash_ptr = 0x60; + +/* + * Memory layout in _prepareBasicFulfillmentFromCalldata of + * data for OrderFulfilled + * + * event OrderFulfilled( + * bytes32 orderHash, + * address indexed offerer, + * address indexed zone, + * address fulfiller, + * SpentItem[] offer, + * > (itemType, token, id, amount) + * ReceivedItem[] consideration + * > (itemType, token, id, amount, recipient) + * ) + * +``` + +For example, Alice made an order for an Azuki NFT with the number [3638](https://opensea.io/assets/ethereum/0xed5af388653567af2f388e6224dc7c4b3241c544/3638)at 10ETH, then it triggers the 'fulfillBasicOrder' function, and when the transaction succeeds, it triggers the 'OrderFulfilled' event to be logged to the chain.[Etherscan link](https://etherscan.io/tx/0x9beb69ec6505e27f845f508169dae4229e851a8d7c7b580abef110bf831dc338) and[dune link](https://dune.com/queries/1660679). + + + +## Common table Description + +- Original base table: located in `Raw`-->`transactions` and `logs`tables; +- Specific project table: located in `Decoded Projects`->Search the name of the specific project table, and the name of the trading platform; +- Aggregate table: + - Spells-->erc721: Record all transfer records of erc721 + - Spells-->nft: It contains information about trades, mints, transfers, fees, and burns, and the most important of these is the trades table, which aggregates all the trade data from the major exchanges. + +![](assets/dune-nft-related.png) + +The important details of nft.trades table are as follows: + +| Field | Description | +| ------------------------ | ----------------------------------- | +| blockchain | Most of chain of data gathered in this table | +| project | Trading platform name | +| version | Trading platform version | +| block_time | Block time | +| token_id | NFT Token ID | +| collection | NFT collection name | +| amount_usd | The dollar value when trading | +| token_standard | The standards of the Token | +| trade_type | Transaction type, it is single NFT deal or multiple NFTs trade | +| number_of_items | Number of the NFT traded | +| trade_category | Transaction type  (Direct buy, auction, etc...) | +| evt_type | Evt type (Trade, Mint, Burn) | +| seller | Seller wallet address | +| buyer | Buyer wallet address | +| amount_original | The original amount of transactions (under the original units of tokens token)| +| amount_raw | The raw transaction amount without being numeric | +| currency_symbol | The token symbol of the transaction (what token is used as the unit of payment) | +| currency_contract | The token contract address of the original transaction,
use WETH on ETH contract address | +| nft_contract_address | NFT contract address | +| project_contract_address | Trading platform contract address | +| aggregator_name | Aggregation platform name, if the transaction is initiated from the aggregation platform, such as gem | +| aggregator_address | Aggregate platform contract address | +| tx_hash | Transaction hash | +| block_number | Block of transaction | +| tx_from | The address from which the transaction is initiated, usually the buyer | +| tx_to | The address to which the transaction is to be received, usually the trading platform | +| unique_trade_id | Transaction id | + +## Key metrics + +In general, an NFT project will focus on the following basic metrics: + +**Transaction price movement** + +It is necessary to query the transaction amount of all the trading markets and express all the transactions with a scatter plot. At the same time, different ranges can be selected through the time range, such as the last 24h, the last 7 days, the last 1 month, and so on. It should be noted that for some transactions, the transaction price is too high, you need to filter out these, otherwise you will not be able to clearly show most of the transaction price movement. + +![](assets/history-price.png) + +Reference Links: https://dune.com/queries/1660237 + +**Floor price** + +Because we can only obtain the data of completed transactions on the chain and cannot obtain the data of pending orders in the trading market, we generally use the minimum transaction amount among the last 10 transactions as the floor price, which is not much different from the pending order price, unless the project is particularly unpopular + +```sql +-- Find the 10 most recent transactions for this contract, sorted by time +with lastest_trades as ( + select * + from nft.trades + where nft_contract_address = 0xed5af388653567af2f388e6224dc7c4b3241c544 -- contract address of azuki NFT + -- and block_time > now() - interval '24' hour --It can also be sorted by time + order by block_time desc + limit 10 +) + +select min(amount_original) as floor_price --Get the minimum value directly + -- percentile_cont(.05) within GROUP (order by amount_original) as floor_price --This is done by taking the 5% quantile between the lowest and highest prices to prevent some too low price trading effects +from lastest_trades +where currency_symbol IN ('ETH', 'WETH') + and cast(number_of_items as integer) = 1 -- This can be filtered by different chains, different transaction tokens +``` + +Reference Links: https://dune.com/queries/1660139 + +**Transaction volume, total transaction quota, total number of transactions, etc., 24 hours /7 days /1 month transaction quota** + +```sql +with total_volume as( + SELECT + sum(amount_original) as "Total Trade Volume(ETH)", --总成交量ETH + sum(amount_usd) as "Total Trade Volume(USD)", --总成交量USD + count(amount_original) as "Total Trade Tx" --总交易笔数 + FROM nft.trades + WHERE nft_contract_address = 0xed5af388653567af2f388e6224dc7c4b3241c544 + -- AND currency_symbol IN ('ETH', 'WETH') +), + +total_fee as ( + select + sum(royalty_fee_amount) as "Total Royalty Fee(ETH)", --总版权税ETH + sum(royalty_fee_amount_usd) as "Total Royalty Fee(USD)", --总版权税USD + sum(platform_fee_amount) as "Total Platform Fee(ETH)", --总平台抽成ETH + sum(platform_fee_amount_usd) as "Total Platform Fee(USD)" --总平台抽成USD + from nft.fees + WHERE nft_contract_address = 0xed5af388653567af2f388e6224dc7c4b3241c544 + -- AND royalty_fee_currency_symbol IN ('ETH', 'WETH') +) + +select * from total_volume, total_fee +``` + +Reference Links: https://dune.com/queries/1660292 + +**Daily/monthly/weekly volume** + +```sql +with hourly_trade_summary as ( + select date_trunc('day', block_time) as block_date, + sum(number_of_items) as items_traded, + sum(amount_raw) / 1e18 as amount_raw_traded, + sum(amount_usd) as amount_usd_traded + from opensea.trades + where nft_contract_address = 0xed5af388653567af2f388e6224dc7c4b3241c544 + -- and block_time > now() - interval '90' day + group by 1 + order by 1 +) + +select block_date, + items_traded, + amount_raw_traded, + amount_usd_traded, + sum(items_traded) over (order by block_date asc) as accumulate_items_traded, + sum(amount_raw_traded) over (order by block_date asc) as accumulate_amount_raw_traded, + sum(amount_usd_traded) over (order by block_date asc) as accumulate_amount_usd_traded +from hourly_trade_summary +order by block_date +``` + +![](./assets/daily-trade-volune.png) + +Reference Links: https://dune.com/queries/1664420 + + +**Current number of holders, total number of tokens, distribution of holders, etc** +```sql +with nft_trade_details as ( --Get the buy and sell side detail table for a trade, where the sell side is negative and the buy side is positive + select seller as trader, + -1 * cast(number_of_items as integer) as hold_item_count + from nft.trades + where nft_contract_address = 0xed5af388653567af2f388e6224dc7c4b3241c544 + + union all + + select buyer as trader, + cast(number_of_items as integer) as hold_item_count + from nft.trades + where nft_contract_address = 0xed5af388653567af2f388e6224dc7c4b3241c544 +), + +nft_traders as ( + select trader, + sum(hold_item_count) as hold_item_count + from nft_trade_details + group by trader + having sum(hold_item_count) > 0 + order by 2 desc +), + +nft_traders_summary as ( + select (case when hold_item_count >= 100 then 'Hold >= 100 NFT' + when hold_item_count >= 20 and hold_item_count < 100 then 'Hold 20 - 100' + when hold_item_count >= 10 and hold_item_count < 20 then 'Hold 10 - 20' + when hold_item_count >= 3 and hold_item_count < 10 then 'Hold 3 - 10' + else 'Hold 1 or 2 NFT' + end) as hold_count_type, + count(*) as holders_count + from nft_traders + group by 1 + order by 2 desc +), + +total_traders_count as ( + select count(*) as total_holders_count, + max(hold_item_count) as max_hold_item_count + from nft_traders +), + +total_summary as ( + select + 0 as total_nft_count, + count(*) as transaction_count, + sum(number_of_items) as number_of_items_traded, + sum(amount_raw) / 1e18 as eth_amount_traded, + sum(amount_usd) as usd_amount_traded + from opensea.trades + where nft_contract_address = 0xed5af388653567af2f388e6224dc7c4b3241c544 +) + +select * +from nft_traders_summary +join total_traders_count on true +join total_summary on true +``` + +Reference Links: https://dune.com/queries/1300500/2228120 + + +## NFT Comprehensive dashboard example + +We made a dashboard where you can enter the address of an NFT contract and see all kinds of information about the project. You can learn more about queries using the query on the dashboard: + +https://dune.com/sixdegree/nft-collections-metrics-custom-dashboard + +![](./assets/nft-all-in-one.png) + + +## Reference + +- https://mirror.xyz/0x07599B7E947A4F6240F826F41768F76149F490D5/CHcwsp_d0AINEalFq_0FcqkLeEyeeGpYDDtw82TyMes +- https://github.com/cryptochou/seaport-analysis +- https://dune.com/sixdegree/soulda-nft-soulda16club +- https://dune.com/sixdegree/digidaigaku-nft-by-limit-break + +## SixDegreeLab introduction + +SixDegreeLab([@SixdegreeLab](https://twitter.com/sixdegreelab))is a professional on-chain data team dedicated to providing accurate on-chain data charts, analysis, and insights to users. Our mission is to popularize on-chain data analysis and foster a community of on-chain data analysts. Through community building, tutorial writing, and other initiatives, we aim to cultivate talents who can contribute valuable analytical content and drive the construction of a data layer for the blockchain community, nurturing talents for the future of blockchain data applications. + +Feel free to visit[SixDegreeLab's Dune homepage](https://dune.com/sixdegree). + +Due to our limitations, mistakes may occur. If you come across any errors, kindly point them out, and we appreciate your feedback.