diff --git a/assets/jsconfig.json b/assets/jsconfig.json
index eb498c9a..d0d7a6ec 100644
--- a/assets/jsconfig.json
+++ b/assets/jsconfig.json
@@ -3,7 +3,7 @@
"baseUrl": ".",
"paths": {
"*": [
- "..\\themes\\hugo-theme-stack\\assets\\*"
+ "../themes/hugo-theme-stack/assets/*"
]
}
}
diff --git a/config.yaml b/config.yaml
index 5c455f59..3fccdd95 100644
--- a/config.yaml
+++ b/config.yaml
@@ -7,10 +7,10 @@ paginate: 5
title: Xianfei's Blog
languages:
- # en:
- # languageName: English
- # title: Xianfei's Blog
- # weight: 1
+ en:
+ languageName: English
+ title: Xianfei's Blog
+ weight: 2
zh-cn:
languageName: 中文
title: 衔飞的博客
diff --git a/content/.DS_Store b/content/.DS_Store
index c51c6098..cdcd1f05 100644
Binary files a/content/.DS_Store and b/content/.DS_Store differ
diff --git a/content/_index.md b/content/_index.en.md
similarity index 100%
rename from content/_index.md
rename to content/_index.en.md
diff --git a/content/page/about/index.en.md b/content/page/about/index.en.md
new file mode 100644
index 00000000..828ee2b5
--- /dev/null
+++ b/content/page/about/index.en.md
@@ -0,0 +1,45 @@
+---
+title: About Me
+slug: about
+description: I'm xianfei (衔飞)
+date: '2022-06-28'
+license: CC BY-NC-ND
+lastmod: '2020-10-09'
+menu:
+ main:
+ weight: -90
+ params:
+ icon: user
+---
+
+## Hi there, I'm xianfei (衔飞) 👋
+
+> The only limit is your imagination.
+
+
+
+
+- 🔭 I'm an undergraduate student of software engineering from China now.
+- 🏡 I'm living in Beijing and looking for a job in Beijing.
+- 🌱 I'm currently learning Computer Graphics & Machine Learning & Web Technologies.
+- 😍 I like eating delicious foods 🍔, traveling 🏝, photography 📸, coding 🧑💻 and sleeping 🛌.
+- 😡 I hate working overtime.
+- 💬 I like chating. Be free to ask me about anything [here](https://github.com/xianfei/xianfei/issues).
+
+### Skills and Interested
+
+- Programming Languages: C++/JavaScript/C/Java/Python
+
+- Other Languages: HTML/CSS/MarkDown/JSON/XML
+
+- Web Front-end Frameworks: Vue.js/Electron.js
+
+- Web Back-end Frameworks & Database: Express.js(Node.js)/MongoDB
+
+- Tools: Linux Shell/SSH/Git/VSCode/CMake/WireShark and etc.
+
+- Other: Arduino/Rasberry Pi
+
+### What's more
+- 🥰 You can follow me on [Bilibili: xianfei](https://space.bilibili.com/9872607) and 微信公众号:Xianfei
+- 📬 Email: xianfei@bupt.cn
diff --git a/content/page/gallery/index.en.md b/content/page/gallery/index.en.md
new file mode 100644
index 00000000..cb653295
--- /dev/null
+++ b/content/page/gallery/index.en.md
@@ -0,0 +1,38 @@
+---
+title: "Photography Portfolio"
+date: 2019-05-28
+slug: "gallery"
+menu:
+ main:
+ weight: -70
+ params:
+ icon: archives
+---
+
+## Entrance
+
+[Click Here to Visit](https://photography.xianfei.win/)
+
+https://photography.xianfei.win/
+
+## Preface
+
+Photography is one of my hobbies, usually focusing on portraits, daily life, and events;
+
+Experience:
+
+- 2020-2021 President of BUPT Photography Club
+- 2018-2021 BUPT Student Union Etiquette Team, Photographer
+- 2020-2021 BUPT Hongyan New Media Center, Photographer
+- 2019-2020 BUPT Campus Youth League Propaganda Department, Photographer
+- 2018-2019 BUPT Youth New Media Center, Photographer & Videographer
+- 2017 Official Photographer for the 23rd IDO Anime Expo
+- 2015-2018 Hui Long Guan Yu Xin School YDTV Station, Videographer & Editor
+
+## Photography Portfolio Build Tools
+
+Build Tool: https://github.com/xianfei/zing-gallery-next
+
+Hosting: Github Pages
+
+Build Output/Webpage Source Code: https://github.com/xianfei/xianfei-photography-portfolio
\ No newline at end of file
diff --git a/content/page/links/index.en.md b/content/page/links/index.en.md
new file mode 100644
index 00000000..9ef21976
--- /dev/null
+++ b/content/page/links/index.en.md
@@ -0,0 +1,30 @@
+---
+title: Links
+slug: links
+links:
+ - title: 北京信息科技大学
+ description: Beijing Information Science & Technology University (BISTU)
+ website: https://www.bistu.edu.cn/
+ image: bistu.webp
+ - title: 北京邮电大学
+ description: Beijing University of Posts and Telecommunications
+ website: https://www.bupt.edu.cn/
+ image: bupt.png
+ - title: GitHub
+ description: GitHub is the world's largest software development platform.
+ website: https://github.com
+ image: https://github.githubassets.com/images/modules/logos_page/GitHub-Mark.png
+ - title: TypeScript
+ description: TypeScript is a typed superset of JavaScript that compiles to plain JavaScript.
+ website: https://www.typescriptlang.org
+ image: ts-logo-128.jpg
+menu:
+ main:
+ weight: -50
+ params:
+ icon: link
+
+comments: false
+---
+
+Here's some links
\ No newline at end of file
diff --git a/content/page/links/index.md b/content/page/links/index.md
index 95af8dfc..2570c0ae 100644
--- a/content/page/links/index.md
+++ b/content/page/links/index.md
@@ -2,12 +2,6 @@
title: 友情链接
slug: links
links:
- - title: 旧版本博客主页
- description: Based on WordPress.
- website: https://xianfei.ml:8001/
- - title: Xianfei's 摄影作品展
- description: 汇总了之前拍过的一些照片.
- website: https://xianfei.ml:8001/gallery/
- title: 北京信息科技大学
description: Beijing Information Science & Technology University (BISTU)
website: https://www.bistu.edu.cn/
diff --git a/content/page/search/index.en.md b/content/page/search/index.en.md
new file mode 100644
index 00000000..b2a5943d
--- /dev/null
+++ b/content/page/search/index.en.md
@@ -0,0 +1,13 @@
+---
+title: "Search"
+slug: "search"
+layout: "search"
+outputs:
+ - html
+ - json
+menu:
+ main:
+ weight: -60
+ params:
+ icon: search
+---
\ No newline at end of file
diff --git a/content/post/.DS_Store b/content/post/.DS_Store
index a27fdc65..0f24589f 100644
Binary files a/content/post/.DS_Store and b/content/post/.DS_Store differ
diff --git a/content/post/2022to2023/index.en.md b/content/post/2022to2023/index.en.md
new file mode 100644
index 00000000..a51c2f60
--- /dev/null
+++ b/content/post/2022to2023/index.en.md
@@ -0,0 +1,340 @@
+---
+author: Xianfei
+title: Goodbye 2022, Hello 2023 — Annual Summary, Travel Recap & Miscellaneous Thoughts
+date: 2023-02-01
+slug: 2022to2023
+image: 2023banner.webp
+color: "#f2745c"
+categories:
+ - Life
+ - 年度总结
+---
+
+
+
+## Preface
+
+Most of 2022 was spent under pandemic prevention measures based on a zero-case policy. As a result, I missed the last half-semester on campus (spent the entire second half of my senior year at home). I missed graduation ceremonies, graduation photos, and graduation trips...
+
+In the last month, for some well-known reasons, there was a sudden relaxation of restrictions, which exposed many people who had only heard of the virus in the previous three years to its "warmth" (physically, fever), abruptly putting an end to the pandemic era. As expected, I also tested positive.
+
+To make up for the regrets of 2022, I planned a trip after recovering. Therefore, this year's annual review is slightly different; it starts with a trip at the beginning of 2023. In the following text, I will try to avoid the topic of "pandemic" and focus on reflections triggered by certain events, rather than merely creating a "WeChat Moments compilation".
+
+Here's a background track: "A Bottle of Magic Potion for You" by Gao Wu Ren. It's a song I've recently grown fond of. Feel free to listen while reading.
+
+
+
+## Travel
+
+This trip was actually planned at the end of 2022. After sorting out many details and finalizing the schedule, we decided to start the journey on January 3rd, with the return date to be determined. The tentative plan was Beijing -> Changsha -> Shenzhen -> Guangzhou -> Guilin -> Beijing. We would take flights to and from Beijing, and high-speed trains for the rest of the journey.
+
+### Changsha—The Essence of Human Life
+
+> Premier Li Keqiang praised the street-stall economy and small-shop economy: It's the essence of human life, the vitality of China.
+
+When I arrived in Changsha, my first impression was: People in Changsha have a unique way of speaking Mandarin! Especially when I first went to get some Chayan Yuezhe tea, it reminded me of a friend—Gu Naicao (you can check [her Bilibili channel](https://space.bilibili.com/140075416)). She is from Hunan, and people in Changsha talk just like her!
+
+
+
+Changsha is renowned for its snacks and nightlife, as well as its famous tea beverages:
+
+{{< tianzige 茶颜悦色 >}}
+
+
+
+Chayan Yuezhe is quite delicious among milk teas. Unlike Coco, which uses red tea + hydrogenated vegetable oil + syrup, Chayan Yuezhe uses milk and tea and often adds milk froth, which you usually find in coffee. It also comes with a dollop of whipped cream on top for some varieties, making it both good-looking and tasty—definitely a must-try in Changsha.
+
+
+![](1.webp) ![](2.webp) ![](3.webp)
+
+You can find Chayan Yuezhe everywhere in Changsha; just walk a few steps and you'll find one. They even have fairs where you can buy some related merchandise.
+
+
+![](DSC00082.webp) ![](DSC00734.webp)
+
+![](DSC00729.webp) ![](DSC00732.webp)
+
+Additionally, my first time in Hunan meant I had to try Hunanese cuisine:
+
+{{< tianzige 湘菜 >}}
+
+> Hunan Cuisine, also known as Xiang cuisine, is a type of Chinese cooking known for its rich flavors, meticulous preparation, and emphasis on the original taste of the ingredients. It's one of the eight major cuisines of China and is considered a representative of Chinese cuisine overseas.
+
+For me, it's just that Hunan cuisine is super spicy🌶! I thought I could handle spice in Beijing, but when eating Xiang cuisine, I had to ask for "less spice." However, many places would tell you they can't do "less spicy." For example, "How do you make stir-fried pork with less pepper?"
+
+
+![](IMG_20230103_211959.webp)
+
+{{< tianzige 解放西路 >}}
+
+The scene in Changsha at two or three in the morning is something I've never seen in the North.
+
+
+![](DSC00325.webp) ![](DSC00349.webp) ![](DSC00312.webp)
+
+This is the legendary Jiefang West Road. The famous Bilibili documentary "Guarding Jiefang West" is about this place. This street is packed with restaurants, snack stalls, and bars, making it probably one of the most bustling places in Changsha at night.
+
+{{< tianzige 长沙文和友 >}}
+
+Wenheyou has indoor structures designed to look like outdoor settings, reminiscing old Changsha. It's worth a visit; you can look it up on Xiaohongshu for more details.
+
+I invited a friend (Ya Shi) to have dinner together. We enjoyed the environment and food, and it was a good way to socialize and catch up with friends you haven't seen in a while.
+
+I also visited Changsha's famous Orange Island. There's no time to describe it in detail here, but the scenery is beautiful. I guess the meaning of travel is to find different beautiful things in different places.
+
+
+---
+
+**More to come...**
+
+### 深圳——改革开放的现代繁荣
+
+> “1979年,那是一个春天,有一位老人,在中国的南海边,划了一个圈……”
+
+
+![](DSC04779.webp)
+
+![](DSC04763.webp) ![](DSC04628.webp) ![](DSC04756.webp)
+
+{{< tianzige 弘法寺 >}}
+
+> 弘法寺位于中国广东省深圳市罗湖区仙湖植物园内,背靠梧桐山,是一座汉传佛教寺院。
+
+去的那天正好赶上下小雨🌧,颇有一番仙境的的感觉。拍照挺好看的!此外,由于它在仙湖植物园内,能看见许多我在北方看不到的植物。
+
+
+
+![](DSC05215.webp) ![](mmexport1673724522273.webp)
+
+![](mmexport1673724496089.webp) ![](mmexport1673724515886.webp) ![](mmexport1673725149852.webp)
+
+哦对,在南油附近发现了一加比较小的店,做的肠粉真的很好吃!来观看一下做肠粉的视频吧!
+
+
+
+他家的肠粉可以说是又便宜又好吃了,比许多有名的连锁品牌要好吃的多。
+
+
+
+
+![](IMG_20230110_134152.webp) ![](IMG_20230110_133836.webp)
+
+
+### 广州——老广东特色
+
+在深圳的时候,可能并不觉得有特别多的广东特色,身边的人都讲着普通话,吃饭的馆子也和北京没有多大的区别,但是来到广州,就是一番截然不同的感觉了。
+
+刚到广州的第一天晚上,来到了老城区这边——越秀区,从上下九步行街一路走到了永庆坊。当天也是有一点点下雨,湿漉漉的路面上反射着地面上的灯光,有一种老城区独有的热闹。
+
+{{< tianzige 永庆坊 >}}
+
+
+![](DSC05633.webp) ![](DSC05713.webp) ![](DSC05577.webp)
+
+正好是一个广州的学妹带我来逛这里,在广州本地人点菜都是用粤语来点菜的。去了附近的东湖酒楼(永庆坊店),是一家老字号的粤菜馆。这个烤乳鸽简直是太好吃了!第一次吃就被惊艳到了,这次旅行吃了三四只鸽子还是他们家的最好吃!
+
+
+![](mmexport1673448176472.webp) ![](WechatIMG43.webp)
+
+![](DSC05604.webp) ![](DSC05600.webp) ![](mmexport1673448171068.webp) ![](DSC05609.webp)
+
+顺便帮学妹拍了一些照片,还是很好看的!
+
+
+![](mmexport1674398552773.webp) ![](mmexport1674398565231.webp)
+
+![](DSC05489.webp) ![](mmexport1674398522962.webp)
+
+第二天去了番禺去找七七同学。
+
+`TODO: 待补全`
+北京路 广州塔&珠江 越秀公园
+
+
+### 桂林——出名的自然风光
+
+
+![](DSC06027.webp)
+
+`TODO: 待补全`
+
+## Daily
+
+2022的daily由于众所周知的原因,可能没那么有趣,主要是干饭为主(这里就不发吃的了,感兴趣的不如去看我朋友圈)。想来想去,不如挑几件事儿来说一说吧。
+
+### 春游
+
+{{< tianzige 玉渊潭 >}}
+
+每年的三四月份其实都回去玉渊潭看樱花,今年也不例外,玉渊潭也毫不例外的人山人海。
+
+> 京城柳初绿,玉渊樱已红。
+
+
+![](mmexport1649253343034.webp) ![](mmexport1649253346343.webp)
+
+![](2022-04-09-22-44-12-738.webp) ![](2022-04-09-22-45-00-724.webp) ![](mmexport1649253344680.webp)
+
+![](DSC05108.webp) ![](7dbf746cb517bc0ab5579afd2ea566f.webp) ![](DSC05123.webp)
+
+### 野餐
+
+由于北京五六月份的一段时间不让堂食,所以我和小伙伴们决定去野餐!于是搬上桌子和椅子,带上卡式炉、鸳鸯锅和火锅食材,找了一片没有人的小森林,我们就开始野餐啦!在吃完火锅过后,我们拿了一副扑克牌开始打牌,很快乐~
+
+
+![](DSC05858.webp) ![](DSC05866.webp)
+
+![](C0127.MP4_snapshot_00.04.565.webp) ![](0d002a94375bfcd889952f4c91ff0d9.webp) ![](C0128.MP4_snapshot_00.19.587.webp)
+
+### 志愿者
+
+今年正好赶上北京举办2022冬奥会,后来寒假期间有一天群里发了一个冬残奥会城市志愿者的补充报名活动,大概是又缺岗位了问有没有同学想要去,我就顺手报了一个。当志愿者还是很有意思的!
+
+
+![](DSC04062.webp)
+
+城市志愿者的工作内容比较简单,主要是为了疏导观众、和观众打招呼什么的,相当于冬残奥气氛组。
+
+
+![](DSC03954.webp) ![](DSC03991.webp) ![](DSC03736.webp)
+
+![](DSC04432.webp) ![](DSC03241.webp) ![](DSC03102.webp)
+
+
+## Thinking
+
+年终总结吧,想写一点点更经过思考的东西在里面,而不是简简单单的介绍生活。我十分喜欢和朋友们聊天,也喜欢深入地交流或思考一些话题。
+
+(这部分总是忘,想到啥写啥吧)
+
+### 科技与生活
+
+作为一个科技工作者吧,如何将科技让我们的生活变得更美好也是一个值得思考的话题。以下分享两个小小的事情,让我更加愿意在计算摄影与计算机视觉研究的这条道路上走得更远。
+
+1. 计算摄影与拍星星
+
+> 计算摄影是指使用数字计算而不是光学处理的数字图像捕获和处理技术。计算摄影可以提高照相机的能力,或者引入基于胶片的摄影根本不可能的特征,或者降低照相机元件的成本或尺寸。
+
+2022年秋天的一个晚上,正当我带着有些许难过的心情打算在小区附近的小河边散步的时候,偶然一抬头,发现天空中好多星星。感觉到这个景色真好看,正好想到了Google Camera有Night Sight模式,于是拿出手机,尝试拍下来。本来对手机没报太大希望,结果居然拍出来了和我预想中能排到差不多的画面(因为之前玩摄影的经验告诉我,在恰当的参数设置下,其实夜晚相机拍出来的会比肉眼看到的还要好看),或许这也是算法提升带给人们生活偶然的一些小惊喜吧。
+
+
+![](AGC_20221116_001728331.NIGHT.webp) ![](LieAll_20221105_005115034.NIGHT.webp)
+
+这让我想起了2017年吧,我上高中的时候去澳大利亚,从悉尼飞往北京的航班上,在大约赤道上空的位置,我看见过我所见过的最好看的星空,但是当时打开手机拍摄一片漆黑,拿着相机也由于所需要的快门时间过长(ISO6400,光圈全开也需要三秒)而无法拍到一张清晰的照片,大概是这个样子。我当时就在想,如果能有现在的计算摄影技术,那么这一定是一张极为震撼的照片。
+
+
+![](DSC02525.webp) ![](DSC02530.webp)
+
+就像拍星星一样,以往一个需要专业相机加上三脚架固定在某个位置才能拍出来的照片,现如今我们只需要拿着手机手持就可以拍出来。我相信随着科技的发展,拿出手机,无需专业摄影知识的每一个人,都能随手拍出一张想要的照片,也可以利用计算摄影所带来的宽容度来给后期调整带来丰富的空间。
+
+2. 计算机视觉与照片的管理
+
+我十分热爱拍照片,有时候只是把它当做一种记录或者说纪念,像写日记一样。许嵩的《摄影艺术》中有一句歌词“时光流转,谁还用日记本,往事有底片为证”,虽然底片被HEIF/HEIC这种高效的数字化的存储格式所取代,但也只是为了帮助我更高效更省空间的存储这些“日记”。我在Google Photos上备份了有超过十万张照片,Google Photos有着我认为商用环境中最棒的算法加持,在多年前就支持了使用自然语言搜索相册、人脸及物品识别、AI-based图像编辑等等。
+
+前几天晚上在思考人生的时候,随手拿起来了一个我放在床上的毛绒玩具。忽然觉得这个毛绒玩具已经陪伴我很多年了,想回忆一下过去,于是我打开手机上的Google Photos搜索了它,翻到了和它经过的点点滴滴。
+
+2017年的时候,我和当时的一个好朋友一起逛街,我们一起看到了这个白熊,我觉得它特别可爱,于是她就把它买了下来送给了我。
+
+
+![](WX20230205-140246@2x.webp)
+
+之后上高三的时候,我抱着它去学校上课,不想听课的时候还可以垫着它睡觉。
+
+
+![](WX20230205-140119@2x.webp)
+
+之后上大学的时候需要住宿舍,由于来到了沙河这么偏僻的地方上学,所以从家里搬了不少东西去陪着我,也包括这个白熊。
+
+
+![](WX20230205-140045@2x.webp)
+
+它还和我当时在宜家新买的大狗勾陪着我一起写代码做实验🥺。
+
+
+![](WX20230205-140143@2x.webp)
+
+与数字化的文字记录相比,照片在之前看来是一种更难于搜索和管理的载体,但是有了现在的基于计算机视觉的技术,这一点也不太需要担心了。反而,使用照片可以更加省事儿且逼真的记录下当时的场景。
+
+### 关于MBTI-ENFP
+
+此处和几个ENFP的朋友交流的时候有一些感悟,这里可能主要的关键点是“外向”与“内向”吧。
+
+1. Ideal Match (理想匹配)
+
+有人可能注意过MBTI性格测试结果给出了一张Compatibility Chart,里面大概讲了什么样的性格的人比较合得来。我们可以看到内向和外向的在一起是理想匹配。但是和朋友的聊天中在思考一个问题,如果我很外向和喜欢社交,真的能给内向的ta足够的安全感吗。
+
+
+![](mbti.webp) ![](WechatIMG35.webp)
+
+2. 什么是快乐星球
+
+这是针对知乎上一个的问题:[为何我发现在中国ENFP真的很少?](https://www.zhihu.com/question/528276616/answer/2637804333) 的思考,和朋友们聊天更多的感觉是“感同身受”吧,但如何改变或者说把外向的人们汇集在一起或者说让整个社交变得更有趣,我们也没什么想法。
+
+### 目标与道路
+
+和很多朋友聊过的一个问题就是,为什么学这个专业,或者说这个专业是你自己选出来的吗,感觉身边大多数人在上大学之前对于自己学什么是没有特别明确的想法的,很多朋友给出的答案是“我家里人帮我选的”、“听说这个专业好就业”、“我啥专业都报了,看分数到那个分数段就随缘学什么专业吧”等等。
+
+在我之前以为,大学里那些“大学霸”们会是那种对当前专业很有求知欲很热衷于探索的,比如我当时的大学委就是这样。年底的时候和一个朋友聊天,偶然发现她居然是她们专业的第一或第二,但是她对她们专业大概态度就是骂骂咧咧。于是我就问她,那你为什么不考虑换个专业或者说你是怎么学到成绩这么好的,她说她也不知道自己该学什么,让她学什么她都能学得很好。
+
+经常会思考,每个人为什么会学这个专业,我一直觉得计算机或者说编程并不是每一个人都适合的,虽然我不知道这个话说得有没有道理。但是确实,即使我就读于计算机专业,我刚上大学的时候也发现有的同学对计算机是完全不了解的。可能是我上大学之前对计算机这方面了解还比较多吧,上大学之后我经常觉得一些可能是称之为常识的东西,有的同学居然不知道。
+
+我在大学之后的目标以及方向也不是特别明确,但是我是热爱写代码和研究新事物的。所以在大学期间我感觉我似乎把许多技术都了解、尝试、浅浅的研究了一下,比如从单片机到操作系统,前后端数据库,桌面与移动端GUI程序开发,机器学习与图像处理什么的自己也都做过一些尝试并写过许多代码。我不认为那些专注于一个技术明确就业方向与像我这样都研究一遍最后也没有明确的就业方向的谁优谁劣,毕竟还有许多人大学毕业之后从事的工作与自己本科学的没什么关系。
+
+## 毕业、考研与读研
+
+`TODO: 待补全`
\ No newline at end of file
diff --git a/content/post/2023spring/index.en.md b/content/post/2023spring/index.en.md
new file mode 100644
index 00000000..779ada4f
--- /dev/null
+++ b/content/post/2023spring/index.en.md
@@ -0,0 +1,49 @@
+---
+author: Xianfei
+title: "[Photography-Landscape] Spring in Beijing 2023"
+date: 2023-04-02
+slug: 2023spring
+image: DSC00507.webp
+color: "#f2745c"
+categories:
+ - Life
+ - Photography
+---
+
+
+
+## Introduction
+
+Spring in Beijing is beautiful but brief. My wait traverses through a long and cold winter, until the arrival of spring. Spring in Beijing seems to come somewhat abruptly; the wind that was harsh yesterday becomes much gentler today.
+
+Shooting Locations: Haitang Flower Creek, Olympic Park (outside Xinao Shopping Center), Bajia Countryside Park
+
+Equipment: Sony A7m2
+
+## Photos
+
+
+
+![](DSC00507.webp)
+
+![](DSC00712.webp) ![](DSC00432.webp)
+
+![](DSC00481.webp) ![](DSC01348_1.webp)
+
+![](DSC00851_1.webp) ![](DSC00850.webp)
+
+![](DSC01012.webp) ![](DSC01267_1.webp)
\ No newline at end of file
diff --git a/content/post/ar2022/index.en.md b/content/post/ar2022/index.en.md
new file mode 100644
index 00000000..1451d3fc
--- /dev/null
+++ b/content/post/ar2022/index.en.md
@@ -0,0 +1,97 @@
+---
+author: Xianfei
+title: AR Bing Dwen Dwen and Snow Rong Rong! Now you can take a selfie with Bing Dwen Dwen at home!
+date: 2022-02-09
+slug: ar2022
+image: ar2022.png
+color: "#5c9dd2"
+categories:
+ - AR/VR
+ - Web
+---
+
+
Recently, Bing Dwen Dwen has become a sensation!
With the opening of the Beijing Winter Olympics,
The mascot "Bing Dwen Dwen" has instantly become a top-tier phenomenon.
People can't help but exclaim,
"Who wouldn't want a cute Bing Dwen Dwen!"
Merchandise featuring Bing Dwen Dwen at the official Olympic flagship store keeps selling out,
Both online and offline,
Truly exemplifying the sentiment that "A Dwen is hard to buy."
+
+## Technical Background
+
+Augmented Reality (AR), also referred to as "Simulated Reality" or "Extended Reality" in relation to Virtual Reality (VR), involves calculating the position and angle of a camera feed and integrating image analysis technology. This allows the virtual world on the screen to interact with the real-world scene. This technology was proposed in 1990. With the increasing computational power of portable electronic devices, the applications of AR are becoming more extensive.
+
+## Demo Effect
+
+![](1.jpg)
+
+## How to Use
+
+### Android: Requires a phone that supports ARCore and has ARCore installed
+
+#### Check ARCore Support
+
+For information on ARCore support, you can refer to Google's official documentation on ARCore (https://developers.google.com/ar/devices). If your device is not supported but has the Magisk framework installed, you can download ARCore/Playground Patcher from Magisk to forcibly support it.
+
+
+
+For Chinese phones like MIUI, you need to activate Google services in the settings and update ARCore in the Xiaomi App Store. For phones that come with Google services by default, ARCore should already be installed.
+
+#### Use Google App or Latest Version of Chrome to Open Web Page
+
+Website (Recommended to use VPN):
+https://xianfei.github.io/ar2022/
+
+
+Click to show website QR code
+
+
+
+Copy this address into the Google app or Chrome to access. (iOS users can directly visit this link)
+The effect is better when opened in the Google app. The demonstration in the video is done through the default browser (which does not support WebXR) and calls the Google APP for 3D AR interaction. Please download the Google app from the Play Store, otherwise, you will see the following prompt:
+
+
+
+Alternatively, you can use Chrome or other browsers that support WebXR technology. However, the effect is not as good as the Google app. WebXR is a very cutting-edge web frontend technology that few browsers support.
+
+![](xr.webp)
+
+### iOS: Simply visit the website in Safari
+
+https://xianfei.github.io/ar2022/
+
+
+Click to show website QR code
+
+
+
+### Click the Cube Icon to Enter AR Mode
+
+Click on the icon at the bottom right of the image to enter AR mode.
+
+
+
+If all goes well, you should be able to see Bing Dwen Dwen and Snow Rong Rong.
+
+## Coolapk Headlines
+
+I posted it on Coolapk last night before going to sleep. When I woke up, I found that it had made the headlines, and many netizens said they succeeded. Some also offered solutions. Those who didn’t succeed can look for useful information in the comments section of Coolapk.
+
+
+
+Here are some success screenshots shared by Coolapk users in the comments:
+
+
+
+## Technical Solutions
+
+Modeling: Blender Display: model-viewer
+
+![](p6.png)
+
+For AR display technology, you can refer to Google ARCore WebXR documentation (https://developers.google.com/ar/develop/webxr/model-viewer). It is easy to understand, and those with a slight understanding of frontend development will get it instantly.
+
+![](640.png)
+
+Here, all we need to do is export the glb file (if you also want to preview it on iOS, you need to export a usdz file). After completing the 3D modeling in Blender, just export the glb and usdz files (if you don't know how to model, you can search on Bilibili where many creators have shared model files). Note that materials involving reflection and refraction like glass may not necessarily be realized in AR. It is recommended to use a semi-transparent solid color material to draw Bing Dwen Dwen's shell.
+
+![](640-1.png)
+
+Project source code:
+
+https://github.com/xianfei/xianfei.github.io/tree/master/static/ar2022
\ No newline at end of file
diff --git a/content/post/bishe/index.en.md b/content/post/bishe/index.en.md
new file mode 100644
index 00000000..90d7a9e5
--- /dev/null
+++ b/content/post/bishe/index.en.md
@@ -0,0 +1,85 @@
+---
+author: Xianfei
+title: Let’s talk about my graduation design project -- SysMocap!
+date: 2022-08-29
+slug: bishe
+image: sysmocap.png
+color: "#034188"
+categories:
+ - Tech
+---
+
+The complete version of the thesis can be viewed on the WeChat public account "Xianfei".
+
+The English version of this project has been accepted into IEEE ISMAR 2022-Adjunct.
+
+As of the time of writing, this project has received 1.5k Stars on GitHub, and the introductory video has received 27k likes and 11k coins on Bilibili.
+
+[GitHub Repo](https://github.com/xianfei/SysMocap/)
+
+## Introduction
+
+Four years of university life have flown by, during which I've learned a lot. The graduation project is the last opportunity to independently implement a project I'm interested in during my undergraduate studies. After discussing with my supervisor, I decided to build a video-driven virtual avatar system, or virtual anchor system. This allows users to choose their virtual avatars and control them.
+
+![](Picture1.jpg)
+
+Of course, developing this system was a considerable challenge for me. It involved computer vision, computer graphics, desktop GUI development, network communication, and other areas. Before this, I had little experience with advanced technologies in posture assessment and motion capture. My supervisor provided me with a Kinect (a hardware device specifically used for motion capture) and introduced me to some open-source projects in this field. The goal was to drive the skeletal movements of a virtual avatar using open-source algorithms.
+
+I also learned about 3D graphic engines like Unreal Engine and Unity.
+
+## Technical Approach
+
+During development, I researched and compared multiple existing algorithms. Many algorithms required specialized computer setups to run, which was a limitation. I wanted my system to be accessible for everyone, bringing the joy of technology to all.
+
+While browsing through open-source projects, I stumbled upon an article on Google's TensorFlow Blog: [3D Pose Detection with MediaPipe BlazePose GHUM and TensorFlow.js](https://blog.tensorflow.org/2021/08/3d-pose-detection-with-mediapipe-blazepose-ghum-tfjs.html). It discussed a web-based 3D human joint detection method. This aligned perfectly with my vision for a highly portable and universal system. Using the Electron framework, I was able to develop a first-class cross-platform desktop GUI application.
+
+![](p2.png)
+
+Even better, this project, known as [kalidokit](https://github.com/yeemachine/kalidokit), provided source code, including its pipeline as illustrated below:
+
+![](kalidokit-pipeline.png)
+
+I then proceeded to study Mediapipe and kalidokit.
+
+## Innovation
+
+While Mediapipe and kalidokit took me a huge step forward, they only supported VRM type virtual avatars. More common formats like fbx, glb/gltf were not supported.
+
+After implementing the tasks outlined in the project description, I wanted to add some practical and cutting-edge features. With virtual anchors gaining popularity, the virtual avatars generated by this software needed to be applied in real-time streaming. I designed an HTTP and WebSocket-based system for virtual avatar and action forwarding, and I set up a dedicated interface for OBS streaming software to stream to mainstream platforms.
+
+As we move closer to a metaverse era, AR/VR/MR (Augmented Reality, Virtual Reality, and Mixed Reality) will become mainstream. I designed a WebXR-based virtual avatar streaming solution. Users only need to access the system on WebXR-compatible devices to view virtual avatars and their actions in their surroundings in real-time. Imagine a science fiction scenario: you open an AR-enabled phone or put on VR/MR glasses, and the virtual avatar appears right in front of you.
+
+Main tasks include:
+
+1. Support for driving different types of virtual avatars.
+2. Develop a visually pleasing and easy-to-use GUI.
+3. OBS streaming support.
+4. AR/VR display support.
+
+## Challenges
+
+### Driving Different Types of Virtual Avatars
+
+Different virtual avatars often have different skeletal structures and naming conventions. One of the major challenges was learning about the skeletal systems of 3D models.
+
+![](p3.png)
+
+This may be the first virtual anchor software that supports a variety of different virtual avatar formats and skeletal structures.
+
+### AR Display
+
+In my previous article about "AR Ice Cube," I wanted to implement AR technology in this project.
+
+![](p4.png)
+
+This may be the first virtual anchor software that can be used for AR/VR streaming.
+
+## Conclusion
+
+Through this project, I gained valuable knowledge in computer graphics—such as Euler angles, rotation matrices, quaternions, and spherical interpolation.
+
+A friend mentioned that she saw an article about my project on a WeChat public account she follows. Their review of my project was "mind-blowing" 😂
+
+The idea was to do something fun and unique. I wanted my project to be something that not everyone in my class or even my field could easily replicate.
+
+![](p5.png)
\ No newline at end of file
diff --git a/content/post/dice/index.en.md b/content/post/dice/index.en.md
new file mode 100644
index 00000000..4fa16dad
--- /dev/null
+++ b/content/post/dice/index.en.md
@@ -0,0 +1,41 @@
+---
+author: Xianfei
+title: Probability Calculations When Playing Dice While Drinking
+date: 2022-10-02
+slug: dice
+color: '#bb3718'
+image: banner.png
+categories:
+ - Life
+---
+
+## Introduction
+
+During the National Holiday, a junior friend invited me to a party, which was essentially just drinking, playing games, singing, and eating. The final activity was drinking, and we played a dice game to guess the points. You can look up the specific rules online; [this is the general idea](https://zhinan.sogou.com/guide/d1610025575.htm).
+
+For this game, we only had 30 dice, and 7 of us participated in the activity, which meant that each person could only use 4 dice.
+
+## Preliminary Probability Calculations
+
+In the gaming segment, we thought about calculating probabilities. However, it wasn't the right time and place to think about this kind of problem, so we simply concluded that it's "not easy to calculate." But to answer the question, "When is the best time to call 'open' to maximize the winning rate?" we still have to use probability. Indeed, the next day I started pondering this question. Since my math skills are not that great, I used computer code to simulate the whole process, and the results are as follows:
+
+![Calculation Result](1.webp)
+
+That is to say, in this scenario, for a call of "five 1s," the winning rate for "opening" is about 49%; for "ten of x" (1
+
+Source code: [https://github.com/xianfei/xianfei.github.io/blob/master/content/post/dice/dice.html](https://github.com/xianfei/xianfei.github.io/blob/master/content/post/dice/dice.html)
+
+## Conclusion
+
+In truth, playing dice while drinking is just for fun. If you find someone you're interested in, it's fine to have a few more drinks yourself.
+
+Cover photo: https://unsplash.com/photos/pQyTChJwEDI
\ No newline at end of file
diff --git a/content/post/gpt4/index.en.md b/content/post/gpt4/index.en.md
new file mode 100644
index 00000000..a30c99d0
--- /dev/null
+++ b/content/post/gpt4/index.en.md
@@ -0,0 +1,250 @@
+---
+author: Xianfei
+title: 【译】GPT-4 Architecture, Infrastructure, Training Dataset, Costs,Vision, MoE
+description: Translate from English to Chinese
+date: 2023-07-11
+slug: gpt4
+image: banner.webp
+categories:
+ - Tech
+---
+
+Original:https://www.semianalysis.com/p/gpt-4-architecture-infrastructure
+
+Title:GPT-4 Architecture, Infrastructure, Training Dataset, Costs,Vision, MoE
+
+SubTitle:Demystifying GPT-4: The engineering tradeoffs that led OpenAI to their architecture.
+
+
+## 前言 & 综述
+
+OpenAI 保持 GPT-4 架构的神秘感及闭源并不是因为怕人们担心对人类存在一些生存风险,而是因为他们构建的东西是可复制的。 事实上,我们预计 Google、Meta、Anthropic、Inflection、Character、腾讯、字节跳动、百度等在短期内都将拥有与 GPT-4 一样强大的模型。
+
+不要误会我们的意思,OpenAI 拥有令人惊叹的工程,他们构建的东西令人难以置信,但他们得出的解决方案并不神奇。 这是一个优雅的解决方案,具有许多复杂的权衡。 做大只是战斗的一部分。 OpenAI 最持久的护城河是他们在全球范围内被广泛使用(拥有大量的数据)、领先的工程人才,并且可以通过未来的模型继续领先于其他人。
+
+我们从许多来源收集了大量有关 GPT-4 的信息,今天我们想分享一下。 这包括模型架构、训练基础设施、推理基础设施、参数计数、训练数据集组成、token数、层数、并行策略、多模态视觉适配、不同工程权衡背后的思维过程、独特的实施技术以及它们如何缓解矩形模型在推理上的巨大瓶颈。
+
+研究 GPT-4 最有趣的方面是,理解他们为什么做出某些架构上的决定。
+
+此外,我们将大致猜测 A100 上 GPT-4 的训练和推理成本,以及如何在下一代模型架构中与 H100 计算卡的情况下进行进一步的扩展。
+
+首先,从 GPT-3 到 GPT-4,OpenAI 希望扩展 100 倍,但成本是很大的问题。 Dense Transformers 模型很难进一步扩大参数量1。 Dense Transformers 是 OpenAI GPT-3、Google PaLM、Meta LLAMA、TII Falcon、MosaicML MPT 等使用的模型架构。 我们可以轻松说出 50 家使用相同架构来训练LLMs(大语言模型)的公司。 这是一个很好的方法,但它在**扩展方面存在缺陷**。
+
+从训练成本的角度来看,可以参阅我们在 GPT-4 之前关于即将推出的密集(同前文中的Dense)模型**The AI Brick Wall**的训练成本讨论1。 在那里,我们揭示了 OpenAI 在 GPT-4 架构方面所做的 high-level 工作以及各种现有模型的训练成本。
+
+但是在过去的 6 个月里,我们意识到**训练成本并不是一个决定性因素**(irrelevant)。
+
+当然,从表面上看,花费数千万甚至数亿美元的计算时间来训练模型似乎很疯狂,但这对于这些公司来说是微不足道的。 它实际上是一个资本支出(Capex line)项目,规模扩大可以持续带来更好的结果。 唯一的限制因素是将计算扩展到一个时间尺度,以便人类可以得到反馈并修改架构。
+
+未来几年,谷歌、Meta、OpenAI/微软等多家公司将在价值超过千亿美元的超级计算机上训练模型。 Meta 每年在“元宇宙(Metaverse)”上烧了超过 160 亿美元,Google 每年在各种永远不会实现成果的项目上浪费 100 亿美元。 亚马逊在 Alexa 上损失了超过 50 亿美元。 加密货币在毫无价值的情况下浪费了超过 1000 亿美元
+
+这些公司和其他组织可以而且将会花费超过一千亿美元来创建可以训练单个大规模模型的超级计算机。 然后可以通过多种方式将这些大型模型产品化。这项工作将在多个国家和公司重复进行。 这是新的军备竞赛。 以前的浪费与现在的区别在于,人工智能可以在取代部分人类的工作上短期内带来有形的价值。
+
+扩展人工智能(真正的人工智能砖墙)的更重要问题是**推理**。 目标是将训练计算与推理计算分离。 这就是为什么训练 Chinchilla 对于任何将要部署的模型来说都是最佳的。 这就是为什么要进行稀疏模型架构; 并不是每个参数在推理过程中都会被使用到。
+
+真正的棘手的问题这些模型的用户**使用的成本太高**。推理成本是训练成本的数倍。这就是OpenAI在模型架构和基础设施方面的创新目标。
+
+大型模型的推理是一个多变量问题,在这个问题中,密集模型的模型大小会成为致命的问题。我们已经在这里2详细讨论了关于边缘计算的问题,但对于数据中心来说,问题是非常相似的。简要概述就是,设备永远无法拥有足够的内存带宽来让大型语言模型达到一定程度的吞吐量。即使它们拥有足够的带宽,边缘计算上硬件计算资源的利用率也会非常低。
+
+在数据中心和云端,利用率是至关重要的。Nvidia因软件优越性而受到赞誉的一半原因是,在GPU的几代生命周期中,Nvidia不断更新底层软件,通过更智能地在芯片内、芯片之间以及内存之间移动数据,从而提高FLOPS利用率。
+
+在大多数当前的应用场景中,LLM推理的作用是作为实时助手,这意味着它必须实现足够高的吞吐量,以便用户实际使用。人类平均每分钟阅读速度约为250个单词,但有些人的阅读速度高达每分钟1000个单词。这意味着你需要每秒输出至少8.33个 token (tokens),但每秒输出33.33个 token 才能覆盖所有极端情况。
+
+由于内存带宽要求,即使在最新的Nvidia H100 GPU服务器上,一个拥有万亿参数的密集型模型也无法实现这种吞吐量。每生成一个 token ,都需要将每个参数从内存加载到芯片上。然后将生成的 token 输入到提示中,并生成下一个 token 。此外,还需要额外的带宽来为注意力机制中的KV缓存进行流式传输。
+
+![](1.webp)
+
+> 图注:该图表假设由于无法融合每个操作、注意力机制所需的内存带宽和硬件开销等低效问题,等同于参数读取。实际上,即使使用像英伟达的FasterTransformer库这样的“优化”库,总开销甚至更大。
+
+上图展示了为了以足够高的吞吐量为单个用户提供LLM推断服务所需的内存带宽。它表明,即使是8个 H100 也无法在每秒33.33个 token 的速度下为1万亿参数的密集模型提供服务。此外,在每秒20个 token 的速度下,8xH100的FLOPS利用率仍然低于5%,导致推断成本极高。实际上,对于目前的8路张量并行H100系统,推断约束在约3000亿前馈参数左右。
+
+然而,OpenAI使用A100实现了人类阅读速度,并使用了超过1万亿参数的模型,而且他们以每1000个 token 仅0.06美元的低价广泛提供。这是因为它是稀疏的,即并非每个参数都被使用。
+
+关于GPT-4模型架构、训练基础设施、推断基础设施、参数数量、训练数据集组成、 token 数量、层数、并行策略、多模态视觉编码器、不同工程权衡背后的思考过程、独特实施技术以及如何减轻与巨型模型推断相关的一些最大瓶颈的问题,我们不再拖泥带水,直接进入主题。
+
+## 模型架构 Model Architecture
+
+GPT-4的大小是GPT-3的10倍以上。我们认为它在120个层次上拥有约1.8万亿个参数,而GPT-3的参数约为1750亿个。
+
+OpenAI通过使用专家混合(MoE, mixture of experts)模型来保持成本合理。如果您不熟悉MoE,请阅读我们6个月前关于广义GPT-4架构和训练成本的文章。
+
+此外,OpenAI在其模型中使用了16个专家模型,每个专家模型的MLP参数约为1110亿个。其中有2个专家模型被路由到每个前向传递中。
+
+虽然文献中谈到了很多高级路由算法,用于选择将每个 token 路由到哪个专家,但OpenAI的路由算法据称对于当前的GPT-4模型来说相当简单。此外,注意力(此处指Transformer的Attention)约有550亿个共享的参数。
+
+每个前向传递推断(生成1个 token )仅使用约2800亿个参数和约560 TFLOP。这与纯密集模型每个前向传递所需的约1.8万亿个参数和约3700 TFLOP形成鲜明对比。
+
+## 数据集构成 Dataset Composition
+
+OpenAI 在大约 13 万亿个 tokens 的数据集上训练了 GPT-4。其中 CommonCrawl RefinedWeb 包含大约 5 万亿个高质量 tokens。作为参考,Deepmind 的 Chinchilla 和 Google 的 PaLM 模型分别在大约 1.4 万亿 tokens 和大约 0.78 万亿 tokens 上进行了训练。甚至据称 PaLM 2 也在大约 5 万亿 tokens 上进行了训练。
+
+> CommonCrawl 是一个非营利组织,它定期从互联网上抓取和存储网页数据。它提供一个包含数十亿个网页的大型公共数据集,这些网页来自于全球各地的多种语言。这个数据集对于研究人员、开发者和企业来说非常有价值,因为它可以用于训练机器学习模型,例如自然语言处理(NLP)任务,以及进行其他大数据分析。在本文中,CommonCrawl 用于提供大量高质量 tokens,以便在训练 GPT-4 时使用。
+
+这个数据集并非 13 万亿个独特的 tokens。相反,由于缺乏高质量 tokens,数据集包含了多个时期。文本数据有 2 个时期,代码数据有 4 个时期。有趣的是,这远远低于 Chinchilla 的最优值,表明需要在双倍的 token 数量上训练模型。这表明网络上缺乏易于获取的 tokens。实际上存在着 1000 倍以上的高质量文本 tokens,甚至还有更多的音频和视觉 tokens,但获取它们并不像网络抓取那样简单。
+
+还有来自 ScaleAI 和内部的数百万行指令微调数据。不幸的是,我们没有找到关于他们 RLHF 数据的更多信息。预训练阶段有 8k 的上下文长度(seqlen)。GPT-4 的 32k seqlen 版本是基于预训练后的 8k 进行微调的。
+
+在集群上,批量大小在数天内逐渐增加,但最后,OpenAI 使用了一个批量大小为 6000 万!当然,由于并非每个专家都能看到所有 tokens,这只是每位专家的 750 万 tokens 的“仅仅”一个批次大小。
+
+## 并行策略 Parallelism Strategies
+
+能够利用所有A100 GPU的并行策略至关重要。他们利用了8路张量并行,因为这是NVLink的极限。除此之外,我们听说他们正在使用15路流水线并行。从理论上讲,考虑到数据通信与计算时间,这是过多的流水线,但如果他们受到内存容量的限制,那么这是有道理的。
+
+在纯粹的流水线+张量并行的情况下,每个GPU在FP16下仅参数就需要约30GB。一旦加上KV缓存和开销,如果OpenAI的大部分GPU都是40GB A100,那么从理论上讲这是有道理的。他们可能使用了ZeRo阶段1。他们可能使用了块级FSDP(全分布式状态并行)或混合共享数据并行。
+
+> ZeRo(Zero Redundancy Optimizer,零冗余优化器)是一种用于降低深度学习训练中内存需求的技术。ZeRo Stage 1 是这个优化策略的第一阶段。在这个阶段,优化器状态和梯度被分布式存储在多个 GPU 或设备上,从而减少了每个设备的内存占用。这使得在有限的硬件资源下训练更大的模型成为可能。
+
+> FSDP(Fully Sharded Data Parallel,全分布式状态并行)和混合共享数据平行(Hybrid Shared Data Parallel)都是用于加速深度学习训练的并行计算策略,但它们的实现和关注点有所不同。
+>
+>FSDP 是一种将模型参数、优化器状态和梯度在多个设备(如 GPU)之间分片的技术。通过将这些状态分布在不同的设备上,FSDP 可以降低每个设备的内存需求,从而使得在有限的硬件资源下训练更大的模型成为可能。FSDP 还可以与其他并行策略(如模型并行和流水线并行)结合使用,以进一步提高训练速度和扩展性。
+>
+>混合共享数据平行(Hybrid Shared Data Parallel)是一种结合了数据并行和模型并行的策略。在数据并行中,每个设备都有一个完整的模型副本,并在不同的数据子集上进行训练。在模型并行中,模型被划分为多个部分,每个部分分布在不同的设备上。混合共享数据平行旨在充分利用这两种策略的优点,通过将模型参数和计算在多个设备上共享,来提高训练速度和扩展性。
+>
+>总之,FSDP 和混合共享数据平行都是为了加速深度学习训练而设计的并行计算策略,但它们关注的优化方向和实现方式有所不同。FSDP 主要关注降低内存需求,而混合共享数据平行则关注在多个设备上共享模型参数和计算。
+
+至于为什么他们没有使用完整模型的FSDP,可能是因为较高的通信开销。虽然OpenAI在大多数节点之间具有高速网络连接,但可能并非所有节点之间都具有这种连接。我们认为至少有一些集群的连接带宽远低于其他集群。
+
+我们不明白他们如何避免在如此高的流水线并行中的每个批次都有巨大的泡沫。他们可能只是承担了成本。
+
+![](2.webp)
+
+## 训练成本 Training Cost
+
+OpenAI的GPT-4训练浮点运算次数约为2.15e25,在大约25,000个A100上进行90到100天,其MFU(机器利用率)约为32%至36%。这种极低的利用率部分原因是由于大量的故障,需要从检查点重新启动。上述提到的泡沫成本非常高。
+
+另一个原因是在如此多的GPU之间进行all-reduce操作非常耗费资源。这尤其是在我们怀疑集群实际上是由一堆较小的集群组成,它们之间的网络连接较弱的情况下。例如,在集群的各个部分之间有800G/1.6T的非阻塞连接,但这些部分之间只有200G/400G的连接。
+
+> 文中提到的all-reduce指的是一种并行计算中常用的通信操作,它在分布式系统中的多个节点之间进行全局归约操作。在深度学习训练过程中,all-reduce操作通常用于在多个GPU或计算节点之间同步参数更新,以便在训练大型模型时确保各个节点的模型参数保持一致。这种操作涉及在所有参与节点之间传输和聚合数据,因此在涉及大量GPU的情况下可能变得非常耗费资源和时间。
+
+如果他们在云中的成本约为每小时1美元的A100,那么仅此次运行的训练成本就约为6300万美元。这还不包括所有的实验、失败的训练运行以及其他成本,如数据收集、RLHF、员工等。由于这些因素,真正的成本要高得多。此外,这意味着您需要有人购买芯片/网络/数据中心,承担资本支出,并将其租给您。
+
+如今,预训练可以在约8,192个H100上进行约55天,每小时2美元的H100成本为2150万美元。我们相信到今年底将有9家公司拥有更多的H1003。并非所有这些公司都会将所有设备用于单次训练运行,但那些这样做的公司将拥有更大的模型。Meta到今年底将拥有超过100,000个H100,但相当数量的H100将分布在他们的数据中心进行推理。他们最大的单个集群仍将远超25k个H100。
+
+到今年底,许多公司将拥有足够的计算资源来训练一个GPT-4大小的模型。
+
+## 权衡 之 混合专家模型 Mixture of Expert Tradeoffs
+
+MoE是在推理过程中减少参数数量的一个很好的方法,同时仍然可以增加参数数量,这是在每个训练token中编码更多信息所必需的。这是非常必要的,因为获取足够多的高质量token非常困难。如果OpenAI真的要尝试达到Chinchilla最优,他们本来需要在2倍的token上进行训练。
+
+话虽如此,OpenAI做出了多种权衡。例如,MoE在推理过程中非常难以处理,因为在每个token生成过程中,并非模型的每个部分都会被利用。这意味着当其他部分被使用时,某些部分可能处于休眠状态。在为用户提供服务时,这会严重影响利用率。
+
+研究人员已经证明,使用64到128个专家比使用16个专家能获得更低的损失,但这仅仅是研究。选择较少专家的原因有很多。OpenAI选择16个专家的一个原因是,更多的专家很难在许多任务中泛化。更多的专家也可能更难以实现收敛。在如此大规模的训练过程中,OpenAI选择在专家数量上更为保守。
+
+此外,使用较少的专家还有助于降低他们的推理基础设施的需求。在转向专家混合推理架构时,存在各种困难的权衡。在讨论OpenAI面临的问题以及他们做出的选择之前,让我们先从LLMs的推理基本权衡开始。
+
+## 权衡 之 推理 Inference Tradeoffs
+
+在开始之前,我们想顺便指出,我们与之交谈过的每一家LLM公司都认为Nvidia的FasterTransformer推理库相当糟糕,而TensorRT更是糟糕透顶。无法对Nvidia的模板进行修改意味着人们需要从头开始创建自己的解决方案。对于正在阅读本文的Nvidia的人员来说,你们需要尽快解决这个问题,以便在LLM推理方面取得优势,否则实际上将成为一个开放的工具,这样可以更容易地添加第三方硬件支持。一波巨型模型即将来临。如果在推理方面没有软件优势,而且还需要手写内核,那么AMD的MI3004和其他硬件的市场将会更大。
+
+在批量大小(同时服务的用户数量)维度和所使用芯片数量方面,大型语言模型推理存在3个主要权衡。
+
+1. **延迟** - 模型必须在合理的延迟内做出响应。人们在聊天应用中等待输出开始流式传输之前,不希望等待太长时间。预填充(输入 tokens )和解码(输出 tokens )需要不同的时间来处理。
+
+2. **吞吐量** - 模型必须每秒输出一定数量的 tokens 。大约每秒30个 tokens 是人类所需的。对于各种其他用例,较低和较高的吞吐量也可以接受。
+
+3. **利用率** - 运行模型的硬件必须实现高利用率,否则成本会太高。虽然可以通过使用更高的延迟和更低的吞吐量将更多的用户请求组合在一起,从而实现更高的利用率,但这会增加难度
+
+LLM推理主要是关于平衡两个主要方面:内存带宽和计算能力。用最简化的术语来说,每个参数都需要被读取,并且与之相关的有2个FLOPs。因此,大多数芯片的比例(H100 SXM只有3TB/s的内存带宽,但有2000 TFLOP/s的FP8计算能力)对于批量大小为1的推理来说是完全不平衡的。如果只有一个用户在使用,批量大小为1,那么将每个参数流式传输到每个 token 生成所需的内存带宽将主导推理时间。计算时间几乎为零。
+
+要将大型语言模型有效地扩展到许多用户,批量大小必须大于1。多个用户分摊参数读取成本。例如,在批量大小为256或512的情况下,每读入一个字节的内存,就有512 FLOP/s或1024 FLOP/s。这个比例更接近H100的内存带宽与FLOPS之间的关系。这有助于实现更高的利用率,但同时也带来了更高的延迟。
+
+许多人认为内存容量是LLM推理的主要瓶颈,因为模型的大小可以适应多个芯片,但这是错误的。虽然大型模型需要多个芯片进行推理,较高的内存容量导致它们适应更少的芯片,但实际上,最好使用比所需容量更多的芯片,以便降低延迟,提高吞吐量,并使用更大的批量大小以实现更高的利用率。
+
+谷歌在他们的PaLM推理论文中展示了这些权衡。然而,值得注意的是,这是针对像PaLM这样的密集模型,而不是像GPT-4这样的稀疏模型。
+
+![](3.webp)
+
+如果一个应用程序要求尽可能低的延迟,我们需要应用更多的芯片并以尽可能多的方式对模型进行划分。较低的延迟通常可以通过较小的批量大小来实现,但较小的批量大小也会导致较差的MFU(利用率),从而导致每个 token 的总成本(以芯片秒或美元计)更高
+
+如果一个应用程序需要离线推理并且延迟不是一个问题,主要目标是最大化每个芯片的吞吐量(即,最小化每个 token 的总成本)。提高批量大小是最有效的方法,因为较大的批量通常会导致更好的MFU(利用率),但是随着批量大小的增加,某些对于小批量大小不高效的划分策略变得高效。
+
+> MFU指的是Memory Functional Unit(内存功能单元),它是计算设备(如GPU或ASIC芯片)中负责处理数据存储和访问的部分。MFU的利用率是指这些内存功能单元在处理任务时的效率。提高MFU利用率意味着计算设备能更有效地处理数据,从而提高性能。
+
+更多的芯片和更高的批量大小是最便宜的,因为它们提高了利用率,但这也引入了第三个变量,网络时间。将模型分割到不同芯片上的某些方法在延迟方面更有效,但会在利用率方面产生权衡。
+
+权重加载部分的内存时间和非注意力计算时间与模型大小成正比,与芯片数量成反比。然而,对于给定的划分布局,芯片间通信所需的时间减少得较慢(或者根本不减少),因此随着芯片数量的增加,它变得越来越重要。
+
+虽然我们今天只会简要讨论这个问题,但应该指出的是,随着批量大小和序列长度的增加,KV缓存的内存需求呈爆炸式增长。
+
+如果一个应用程序需要生成具有长注意力上下文的文本,它会显著增加推理时间。对于一个具有多头注意力的500B+模型,注意力KV缓存变得很大:对于批量大小为512且上下文长度为2048的情况,KV缓存总计为3TB,这是模型参数大小的3倍。芯片的计算核心在此期间基本上是空闲的,因为它需要从片外内存加载这个KV缓存。
+
+较长的序列长度对内存带宽和内存容量的影响尤为恶劣。OpenAI的16k序列长度的GPT 3.5 Turbo和32k序列长度的GPT 4由于内存限制无法使用较大的批量大小,因此价格更高。较低的批量大小导致较低的硬件利用率。此外,随着序列长度的增加,KV缓存膨胀。KV缓存无法在用户之间共享,因此需要单独读取内存,进一步限制内存带宽。稍后会有更多关于MQA(Multi-Query Attention)的内容。
+
+## GPT-4 Inference Tradeoffs And Infrastructure
+
+以上所说到的困难在 GPT-4 推理中都会遇到,但是专家混合(MoE)模型架构的引入会带来新的困难。每个 token 生成的前向传播可以路由到不同的专家集合。这在吞吐量、延迟和更高批量大小的利用率之间达到的权衡中产生了问题。
+
+OpenAI 的 GPT-4 有 16 个专家,每个前向传播有 2 个。这意味着,如果批量大小为 8,每个专家的参数读取可能仅为批量大小 1。更糟糕的是,这可能意味着 1 个专家的批量大小可能为 8,而其他专家的批量大小可能为 4、1 或 0。每一次 token 生成,路由算法都会将前向传播发送到不同的方向,导致 token 到 token 延迟以及专家批量大小的显著变化。
+
+推理基础设施是 OpenAI 选择更少专家数量的主要原因。如果他们选择更多的专家,内存带宽将进一步限制推理。OpenAI 在他们的推理集群上经常达到 4k+ 的批量大小,这意味着即使在专家之间进行最佳负载平衡,专家们的批量大小也只有约 500。实现这一点需要非常大量的使用。
+
+我们了解到,OpenAI 在 128 个 GPU 的集群上运行推理。他们在多个数据中心和地理位置拥有多个这样的集群。推理是在 8 路张量并行和 16 路流水线并行下完成的。每个包含 8 个 GPU 的节点只有约 130B 参数,即每个 GPU 在 FP16 下不到 30GB,在 FP8/int8 下不到 15GB。这使得推理可以在 40GB A100 上运行,只要 KV 缓存大小在所有批次中不会过大。
+
+包含各种专家的单独层不会在不同节点之间拆分,因为这会使网络流量过于不规律,而在每个 token 生成之间重新计算 KV 缓存的成本会非常高。任何未来 MoE 模型扩展和条件路由的最大困难是如何处理绕过 KV 缓存的路由。
+
+层的数量是 120,所以在 15 个不同的节点之间进行划分是简单的,但是因为第一个节点需要进行数据加载和嵌入,所以在推理集群的头节点上放置较少的层是有意义的。此外,还有一些关于投机解码的传言,我们稍后会讨论,但我们不确定是否相信它们。这也解释了为什么头节点需要包含更少的层。
+
+## GPT-4 推理费用 GPT-4 Inference Cost
+
+GPT-4的成本是175B参数Davinchi模型的3倍,尽管其前馈参数仅为1.6倍。这主要是由于GPT-4所需的大型集群更多,以及实现的利用率较低。
+
+我们认为,对于128个A100s进行GPT-4 8k seqlen推理,每1k tokens的成本为0.0049美分;而对于128个H100s进行GPT-4 8k seqlen推理,每1k tokens的成本为0.0021美分。需要注意的是,我们假设较高的利用率,并保持批量大小较高。
+
+这可能是一个错误的假设,因为很明显OpenAI有时利用率非常低。我们认为OpenAI在低谷时段关闭集群,并将这些节点重新用于从检查点恢复训练较小的测试模型,尝试各种新技术。这有助于降低推理成本。如果OpenAI不这样做,他们的利用率会更低,我们的成本估计会翻一番以上。
+
+## Multi-Query Attention
+
+MQA(Multi-Query Attention,多查询注意力)是其他所有人都在做的事情,但我们想指出OpenAI也在做。长话短说,只需要1个头,KV缓存的内存容量可以显著减少。即便如此,32k seqlen的GPT-4肯定无法在40GB的A100s上运行,而8k的最大批量大小受到限制。如果没有它,8k的最大批量大小将受到很大限制,以至于变得不经济。
+
+## 连续批量处理 Continuous batching
+
+OpenAI 实现了可变批量大小和连续批量处理。 这是为了允许一定程度的极大地优化了延迟并优化推理成本。 如果您不熟悉这个概念,AnyScale 上面的这篇文章5值得一读。
+
+![](4.webp)
+![](5.webp)
+
+## 推测性解码 Speculative Decoding
+
+我们从一些可靠的人那里听说,OpenAI在GPT-4推理中使用了推测性解码。我们不确定是否相信这一点。在进行简单检索任务和更复杂任务时,token to token 延迟在不同时间段下会产生变化和差异6似乎表明这是可能的,但有太多的变量无法知道。以防万一,我们将在这里通过使用“加速LLM推理与分阶段推测性解码”的一些文本并进行一些修改/添加一些内容来解释它。
+
+通常将使用LLM分为两个阶段。首先是预填充,将提示通过模型生成KV缓存和第一个输出概率分布(可能的 token 输出)。这通常很快,因为整个提示可以并行处理。
+
+第二阶段是解码。从输出的概率分布中选择一个 token 并将其反馈到模型中,该模型为后续 token 生成概率分布。这一过程重复进行,直到生成所需数量的 token 。由于解码必须按顺序通过计算单元流式传输权重以生成单个 token ,因此这第二阶段的算术强度(arithmetic intensity,计算FLOP /内存带宽字节)在小批量运行时极低。因此,解码通常是自回归生成中最昂贵的部分。
+
+这就是为什么在OpenAI的API调用中,输入 token 比输出 token 便宜得多。推测性解码的基本思想是使用一个更小、更快的草稿模型提前解码几个 token ,然后将它们作为单个批次输入到oracle模型中。如果草稿模型对其预测是正确的——更大的模型同意——可以用单个批次解码几个 token ,从而节省大量的内存带宽,因此每个 token 的时间也节省了。
+
+然而,如果较大的模型拒绝了草稿模型预测的 token ,那么将丢弃批次的其余部分,并且算法自然地恢复到标准的逐 token 解码。推测性解码还可以伴随着拒绝抽样方案,从原始分布中抽样。请注意,这仅在带宽是瓶颈的小批量设置中有用。
+
+推测性解码用计算换带宽。推测性解码是一个有吸引力的性能工程目标的两个关键原因是:首先,它根本不会降低模型质量。其次,它提供的收益通常与其他方法正交,因为其性能来自将顺序执行转换为并行执行。
+
+当前的推测方法为批次预测单个序列。然而,这种方法不能很好地扩展到大批量大小或低草稿模型对齐。直观地说,两个模型对于长连续 token 序列达成一致的概率呈指数级下降,这意味着随着算术强度的增加,推测性解码的收益迅速减小。
+
+我们认为,如果OpenAI使用推测性解码,他们可能只使用它来处理大约4个 token 的序列。另外,关于降低GPT-4质量的整个阴谋可能仅仅是因为他们让oracle模型接受来自推测性解码模型的较低概率序列。另一个插曲是,有些人们猜测bard使用推测性解码,因为Google在将整个序列发送给用户之前等待序列生成,但我们不认为这种猜测是正确的。
+
+总之,推测性解码是一种在不降低模型质量的情况下提高性能的方法,它通过降低内存带宽需求来实现。然而,这种方法在某些情况下可能会受到限制,例如大批量大小或低草稿模型对齐。OpenAI可能在GPT-4中使用了推测性解码,但我们不能确定它们是否确实采用了这种方法。此外,关于GPT-4质量降低的阴谋论可能与推测性解码模型接受较低概率序列有关。尽管有关bard使用推测性解码的猜测存在,但我们不相信这种猜测是正确的。
+
+## 视觉多模态 Vision Multi-Modal
+
+GPT-4 的视觉多模态功能是相对不太令人印象深刻的部分,至少与领先的研究相比。当然,目前还没有人将这些研究商业化为多模态LLM。
+
+视觉编码器与文本编码器是分开的,但存在交叉注意力。我们了解到,其架构类似于Flamingo。这在GPT-4的1.8T参数之上增加了更多参数。在仅文本预训练之后,它会使用另外大约2万亿个 token 进行微调。
+
+在视觉模型方面,OpenAI希望从头开始训练,但成熟度不够,因此他们希望通过从文本开始来降低风险。
+
+他们将训练的下一个模型,GPT-5,据称将从头开始进行视觉训练,并能够自己生成图像。此外,它还将能够处理音频。
+
+这种视觉功能的主要目的之一是用于能够阅读网页并转录图像和视频内容的自主代理。他们训练的一些数据是联合数据(渲染的LaTeX/文本)、网页截图、YouTube 视频中的部分帧及运行 Whisper 以获取语音转文本。
+
+关于LLM过度优化的一个有趣之处是,视觉模型的IO成本与文本模型不同。在文本模型中,正如我们在《亚马逊云危机》7一文中所描述的,它非常便宜。在视觉上,数据加载的IO约为150倍。每个图像 token 600字节,而文本则为4字节。目前正在进行大量的图像压缩工作。
+
+这对于那些针对2-3年后的LLMs优化硬件的硬件供应商非常相关。他们可能会发现自己生活在一个每个模型都具有强大视觉和音频功能的世界里。他们可能会发现自己的架构适应性较差。总的来说,架构肯定会超越我们今天所看到的基于当前简化文本的密集型和/或MoE模型。
+
+## 引用 Reference
+
+- [1] [The AI Brick Wall – A Practical Limit For Scaling Dense Transformer Models, and How GPT 4 Will Break Past It](https://www.semianalysis.com/p/the-ai-brick-wall-a-practical-limit)
+- [2] [On Device AI – Double-Edged Sword](https://www.semianalysis.com/p/on-device-ai-double-edged-sword)
+- [3] [AI Capacity Constraints - CoWoS and HBM Supply Chain](https://www.semianalysis.com/p/ai-capacity-constraints-cowos-and)
+- [4] [AMD MI300 – Taming The Hype – AI Performance, Volume Ramp, Customers, Cost, IO, Networking, Software](https://www.semianalysis.com/p/amd-mi300-taming-the-hype-ai-performance)
+- [5] [How continuous batching enables 23x throughput in LLM inference while reducing p50 latency](https://www.anyscale.com/blog/continuous-batching-llm-inference)
+- [6] [Clearly OpenAI does variable batch size based on usage volume……](https://twitter.com/dylan522p/status/1639791815778873346?s=20)
+- [7] [Amazon’s Cloud Crisis: How AWS Will Lose The Future Of Computing](https://www.semianalysis.com/p/amazons-cloud-crisis-how-aws-will)
\ No newline at end of file
diff --git a/content/post/markdown-syntax/index.en.md b/content/post/markdown-syntax/index.en.md
new file mode 100644
index 00000000..13db6a4b
--- /dev/null
+++ b/content/post/markdown-syntax/index.en.md
@@ -0,0 +1,168 @@
++++
+author = "Hugo Authors"
+title = "Markdown Syntax Guide"
+date = "2000-03-11"
+description = "Sample article showcasing basic Markdown syntax and formatting for HTML elements."
+tags = [
+ "markdown",
+ "css",
+ "html",
+ "themes",
+]
+categories = [
+ "themes",
+ "syntax",
+]
+series = ["Themes Guide"]
+aliases = ["migrate-from-jekyl"]
+image = "pawel-czerwinski-8uZPynIu-rQ-unsplash.jpg"
++++
+
+This article offers a sample of basic Markdown syntax that can be used in Hugo content files, also it shows whether basic HTML elements are decorated with CSS in a Hugo theme.
+
+
+## Headings
+
+The following HTML `
`—`
` elements represent six levels of section headings. `
` is the highest section level while `
` is the lowest.
+
+# H1
+## H2
+### H3
+#### H4
+##### H5
+###### H6
+
+## Paragraph
+
+Xerum, quo qui aut unt expliquam qui dolut labo. Aque venitatiusda cum, voluptionse latur sitiae dolessi aut parist aut dollo enim qui voluptate ma dolestendit peritin re plis aut quas inctum laceat est volestemque commosa as cus endigna tectur, offic to cor sequas etum rerum idem sintibus eiur? Quianimin porecus evelectur, cum que nis nust voloribus ratem aut omnimi, sitatur? Quiatem. Nam, omnis sum am facea corem alique molestrunt et eos evelece arcillit ut aut eos eos nus, sin conecerem erum fuga. Ri oditatquam, ad quibus unda veliamenimin cusam et facea ipsamus es exerum sitate dolores editium rerore eost, temped molorro ratiae volorro te reribus dolorer sperchicium faceata tiustia prat.
+
+Itatur? Quiatae cullecum rem ent aut odis in re eossequodi nonsequ idebis ne sapicia is sinveli squiatum, core et que aut hariosam ex eat.
+
+## Blockquotes
+
+The blockquote element represents content that is quoted from another source, optionally with a citation which must be within a `footer` or `cite` element, and optionally with in-line changes such as annotations and abbreviations.
+
+#### Blockquote without attribution
+
+> Tiam, ad mint andaepu dandae nostion secatur sequo quae.
+> **Note** that you can use *Markdown syntax* within a blockquote.
+
+#### Blockquote with attribution
+
+> Don't communicate by sharing memory, share memory by communicating.
+> — Rob Pike[^1]
+
+[^1]: The above quote is excerpted from Rob Pike's [talk](https://www.youtube.com/watch?v=PAAkCSZUG1c) during Gopherfest, November 18, 2015.
+
+## Tables
+
+Tables aren't part of the core Markdown spec, but Hugo supports supports them out-of-the-box.
+
+ Name | Age
+--------|------
+ Bob | 27
+ Alice | 23
+
+#### Inline Markdown within tables
+
+| Italics | Bold | Code |
+| -------- | -------- | ------ |
+| *italics* | **bold** | `code` |
+
+| A | B | C | D | E | F |
+|----------------------------------------------------------|---------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------|------------------------------------------------------------|----------------------------------------------------------------------|
+| Lorem ipsum dolor sit amet, consectetur adipiscing elit. | Phasellus ultricies, sapien non euismod aliquam, dui ligula tincidunt odio, at accumsan nulla sapien eget ex. | Proin eleifend dictum ipsum, non euismod ipsum pulvinar et. Vivamus sollicitudin, quam in pulvinar aliquam, metus elit pretium purus | Proin sit amet velit nec enim imperdiet vehicula. | Ut bibendum vestibulum quam, eu egestas turpis gravida nec | Sed scelerisque nec turpis vel viverra. Vivamus vitae pretium sapien |
+
+## Code Blocks
+
+#### Code block with backticks
+
+```html
+
+
+
+
+ Example HTML5 Document
+
+
+
Test
+
+
+```
+
+#### Code block indented with four spaces
+
+
+
+
+
+ Example HTML5 Document
+
+
+
Test
+
+
+
+#### Code block with Hugo's internal highlight shortcode
+{{< highlight html >}}
+
+
+
+
+ Example HTML5 Document
+
+
+
Test
+
+
+{{< /highlight >}}
+
+#### Diff code block
+
+```diff
+[dependencies.bevy]
+git = "https://github.com/bevyengine/bevy"
+rev = "11f52b8c72fc3a568e8bb4a4cd1f3eb025ac2e13"
+- features = ["dynamic"]
++ features = ["jpeg", "dynamic"]
+```
+
+## List Types
+
+#### Ordered List
+
+1. First item
+2. Second item
+3. Third item
+
+#### Unordered List
+
+* List item
+* Another item
+* And another item
+
+#### Nested list
+
+* Fruit
+ * Apple
+ * Orange
+ * Banana
+* Dairy
+ * Milk
+ * Cheese
+
+## Other Elements — abbr, sub, sup, kbd, mark
+
+GIF is a bitmap image format.
+
+H2O
+
+Xn + Yn = Zn
+
+Press CTRL+ALT+Delete to end the session.
+
+Most salamanders are nocturnal, and hunt for insects, worms, and other small creatures.
+
+## Hyperlinked image
+
+[![Google](https://www.google.com/images/branding/googlelogo/1x/googlelogo_light_color_272x92dp.png)](https://google.com)
\ No newline at end of file
diff --git a/content/post/markdown-syntax/index.md b/content/post/markdown-syntax/index.md
index 13db6a4b..c88a5841 100644
--- a/content/post/markdown-syntax/index.md
+++ b/content/post/markdown-syntax/index.md
@@ -1,6 +1,6 @@
+++
author = "Hugo Authors"
-title = "Markdown Syntax Guide"
+title = "Markdown 语法指南"
date = "2000-03-11"
description = "Sample article showcasing basic Markdown syntax and formatting for HTML elements."
tags = [
diff --git a/content/post/minesweeper-cli/index.en.md b/content/post/minesweeper-cli/index.en.md
new file mode 100644
index 00000000..d2ce67bc
--- /dev/null
+++ b/content/post/minesweeper-cli/index.en.md
@@ -0,0 +1,278 @@
++++
+author = "Xianfei"
+title = "Minesweeper Command Line Version with C"
+date = "2019-01-11"
+description = "Freshman C Language/Linux Development Practical Course Homework"
+categories = [
+ "CLI","Coding","Coursework"
+]
+image = "cta.png"
+slug = "minesweeper-cli"
++++
+
+# Introduction
+
+A multi-platform console minesweeper software based on the C language, capable of implementing all the functionalities of the Windows system's built-in Minesweeper game. It includes features such as custom game parameters, no mines on the first click, digging, marking, automatic handling of mine-free areas, and quickly opening surrounding areas under certain conditions. The console version is implemented using operation commands combined with row and column coordinates.
+
+# Flowchart
+
+
+
+# Source Code
+
+```c
+#include
+#include
+#include
+#include
+
+// Characters for undug and marked
+#define undigged "▣"
+#define marked "∅"
+
+// Number of rows, columns, and mines
+unsigned int row = 16;
+unsigned int col = 30;
+unsigned int minenum = 9;
+
+// Function to randomly generate mines
+int makemine(int mine[32][32]) {
+ int a[minenum]; // To store mine positions
+ int i, j;
+ for (i = 1; i < row + 1; i++) {
+ for (j = 1; j < col + 1; j++) {
+ mine[i][j] = 0; // Clear for re-mining in case the first click hits a mine
+ }
+ }
+ srand((int) time(0));
+ a[0] = rand() % (row * col);
+ for (i = 1; i < minenum; i++) {
+ a[i] = rand() % (row * col);
+ for (j = 0; j < i; j++) {
+ if (a[i] == a[j]) i--;
+ }
+ }
+ for (i = 0; i < minenum; i++) {
+ // Recording the generated mine positions into the array
+ int x = a[i] / col + 1;
+ int y = a[i] % col + 1;
+ mine[x][y] = 1;
+ }
+ return 0;
+}
+
+// Function to print the minefield
+int output(char show[31][31][4]) {
+ int i, j;
+ for (i = 1; i < row + 1; i++) {
+ printf("%3d ", i);
+ for (j = 1; j < col + 1; j++) {
+ printf("%s ", show[i][j]);
+ }
+ putchar('\n');
+ }
+ return 0;
+}
+
+// Calculate the number of mines around (x, y)
+int scanmine(int mine[32][32], int x, int y) {
+ int n = 0, i, j;
+ for (i = 1; i > -2; i--) {
+ for (j = 1; j > -2; j--) {
+ if (mine[x + i][y + j] == 1) n++;
+ }
+ }
+ return n;
+}
+
+// Function to automatically open areas without mines
+void chuli0(char show[31][31][4], int mine[32][32]) {
+ int x, y, i, j;
+ for (x = 1; x < row + 1; x++) {
+ for (y = 1; y < col + 1; y++) {
+ if (show[x][y][0] == ' ') {
+ for (i = 1; i > -2; i--) {
+ for (j = 1; j > -2; j--) {
+ if (scanmine(mine, x + i, y + j)) show[x + i][y + j][0] = '0' + scanmine(mine, x + i, y + j);
+ else show[x + i][y + j][0] = ' ';
+ show[x + i][y + j][1] = 0;
+ }
+ }
+ }
+ }
+ }
+}
+
+// Calculate the number of remaining undug areas
+int least(char show[31][31][4]) {
+ int i, j, n = 0;
+ for (i = 1; i < row + 1; i++) {
+ for (j = 1; j < col + 1; j++) {
+ if (!(strcmp(show[i
+
+][j], undigged))) n++;
+ if (!(strcmp(show[i][j], marked))) n++;
+ }
+ }
+ return n;
+}
+
+// Execute the game
+int game(char show[31][31][4], int mine[32][32], int flag[31][31]) {
+ while(getchar() != '\n'); // Clear keyboard buffer
+ int i = 0, j = 0, k = 0, x, y;
+ static int flagnum = 0;
+ printf("Undug: %d ", least(show));
+ printf("Remaining mines: %d ", minenum - flagnum);
+ printf("Marked: %d\n", flagnum);
+ printf(" ");
+ for (i = 0; i < col; i++) printf("%c ", i + 'a');
+ putchar('\n');
+ output(show);
+ puts("\nEnter operation and row-column coordinates: (Operations: d - dig, f - flag/unflag, o - try to open surroundings)");
+ int opt = getchar();
+ y = getchar() - 'a' + 1;
+ scanf("%d", &x);
+ if (x < 1 || x > row || y < 1 || y > row) {
+ puts("error");
+ puts("\n\n\n");
+ return 1; // Error checking
+ }
+ switch (opt) {
+ case 'd':
+ if (flag[x][y]) {
+ puts("\n\n");
+ printf("This location is marked. Please unmark it before retrying!!!");
+ puts("\n");
+ break;
+ }
+ else {
+ if (mine[x][y] && (least(show) != col * row)) {
+ printf("You exploded!!!");
+ return 0;
+ }
+ if (mine[x][y] && (least(show) == col * row)) {
+ do { makemine(mine); } // Preventing a mine on the first click
+ while (mine[x][y]);
+ }
+ if (!mine[x][y]) {
+ if (scanmine(mine, x, y)) show[x][y][0] = '0' + scanmine(mine, x, y);
+ else show[x][y][0] = ' ';
+ show[x][y][1] = 0;
+ if (!(scanmine(mine, x, y))) {
+ for (k = 0; k < (col > row ? col : row); k++) chuli0(show, mine);
+ }
+ puts("\n\n\n");
+ break;
+ }
+ }
+ case 'f':
+ flag[x][y] = !flag[x][y];
+ if (flag[x][y]) {
+ strcpy(show[x][y], marked);
+ flagnum++;
+ } else {
+ strcpy(show[x][y], undigged);
+ flagnum--;
+ }
+ puts("\n\n\n");
+ break;
+ case 'o':
+ k = 0;
+ for (i = 1; i > -2; i--) {
+ for (j = 1; j > -2; j--) {
+ if (!(strcmp(show[x + i][y + j], marked))) k++;
+ }
+ }
+ if (k != scanmine(mine, x, y)) {
+ puts("\n\n");
+ puts("Cannot quickly open. Not all surroundings are marked");
+ puts("\n");
+ break;
+ }
+ for (i = 1; i > -2; i--) {
+ for (j = 1; j > -2; j--) {
+ if (!(strcmp(show[x + i][y + j], undigged))) {
+ if (scanmine(mine, x + i, y + j)) show[x + i][y + j][0] = '0' + scanmine(mine, x + i, y + j);
+ else show[x + i][y + j][0] = ' ';
+ show[x + i][y + j][1] = 0;
+ if (!(scanmine(mine, x + i, y + j))) {
+ for (k = 0; k < (col > row ? col : row); k++) chuli0(show, mine);
+ }
+ }
+ }
+ }
+ puts("\n\n\n");
+ break;
+ default:
+ puts("error");
+ puts("\n\n\n");
+ }
+ if (least(show) == minenum) {
+ printf("You win!!!");
+ return 0;
+ }
+ return 1;
+}
+
+// Choose difficulty
+int choose() {
+ puts("Choose difficulty:");
+ puts("1. Easy 9x9 10 mines");
+ puts("2. Medium 16x16 40 mines");
+ puts("3. Advanced 16x30 99 mines");
+ puts("4. Customized");
+ printf("Please enter the option number:");
+ switch (getchar()) {
+ case '1':
+ minenum = 10;
+ row = 9;
+ col = 9;
+ break;
+ case '2':
+ minenum = 40;
+ row = 16;
+ col = 16;
+ break;
+ case '3':
+ minenum = 99;
+ row = 16;
+ col = 30;
+ break;
+ case '4':
+ puts("Please enter the number of lines:");
+ scanf("%d", &row);
+ puts("Please enter the number of columns:");
+ scanf("%d", &col);
+ puts("Please enter the number of mines:");
+ scanf("%d", &minenum);
+ if (row > 24 || col > 30 || minenum > 0.9 * col * row) {
+ puts("The number of rows cannot be greater than 24, and the number of columns cannot be greater than 30");
+ puts("The number of mines cannot be greater than 90% of the number of squares");
+ while(getchar() != '\n');//Clear the keyboard buffer
+ return 1;
+ }
+ break;
+ default:
+ puts("error");
+ while(getchar() != '\n');//Clear the keyboard buffer
+ return 1;
+ }
+ return 0;
+}
+
+int main() {
+ while(choose());//Select the game difficulty. If the correct choice is made, the return value should be 0
+ int mine[32][32] = {0}, flag[31][31] = {0},i,j;
+ makemine(mine);//Make mine
+ putchar('\n');
+ char show[30 + 1][30 + 1][4] = {0}; //Used to store the string displayed by the thunder disk. Special symbols occupy 3 bytes on macos/linux.
+ for (i = 1; i < row + 1; i++) {
+ for (j = 1; j < col + 1; j++) {
+ strcpy(show[i][j], undigged);//Initialize thunder disk
+ }
+ }
+ while (game(show,mine,flag));//Execute the game. The return value should be 0 when the game ends.
+ return 0;
+}
+```
\ No newline at end of file
diff --git a/content/post/minesweeper-cli/pic_en.webp b/content/post/minesweeper-cli/pic_en.webp
new file mode 100644
index 00000000..ef0f99cd
Binary files /dev/null and b/content/post/minesweeper-cli/pic_en.webp differ
diff --git a/content/post/shell-file-trans/index.en.md b/content/post/shell-file-trans/index.en.md
new file mode 100644
index 00000000..74baf19a
--- /dev/null
+++ b/content/post/shell-file-trans/index.en.md
@@ -0,0 +1,159 @@
++++
+author = "Xianfei"
+title = "Shell File Transfer — Local Area Network File Transfer Program for Linux/macOS"
+date = "2019-03-05"
+description = "Linux development practice course project"
+categories = [
+ "CLI","Shell","Linux","Coding"
+]
+image = "1.webp"
+slug = "shell-file-trans"
++++
+
+## Feature Introduction
+
+This tool uses the TCP/IP protocol for local area network file transfer, connecting via IP address and a custom port number.
+
+## Running Environment
+
+Tested running environment: macOS Mojave 10.14 & Fedora Linux Workstation 29
+
+Required software packages: dialog (a graphical library, needs to be installed separately), netcat (nc, for reading and writing TCP/UDP, included in these systems), net-tools (ifconfig, for configuring and displaying network interfaces in the Linux kernel, included in these systems)
+
+Dialog installation methods:
+
+### macOS: Install using the brew package manager. Provide superuser permission if necessary.
+
+Enter the following commands in the terminal:
+
+1. Brew installation command: (Skip this step if already installed)
+
+`/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"`
+
+2. Install using the brew package manager:
+
+`brew install dialog`
+
+### Fedora: Install using the yum package manager.
+
+Enter the following command in the terminal:
+
+`sudo yum install dialog`
+
+## Screenshot of Running
+
+
+
+
+
+## Source Code
+
+```shell
+#!/bin/bash
+send()
+{
+dialog --inputbox "Please drag the file here or enter the absolute path" 20 50 2> temp
+if [ $? != 0 ]
+then
+return 6
+fi
+fileAddr=`cat temp`
+fileName=`echo ${fileAddr##*/}`
+dialog --inputbox "Please enter the port number you wish to use" 20 50 2> temp
+if [ $? != 0 ]
+then
+return 6
+fi
+sendPort=`cat temp`
+dialog --inputbox "Please enter the recipient's IP address" 20 50 2> temp
+if [ $? != 0 ]
+then
+return 6
+fi
+recvIP=`cat temp`
+dialog --title Sending --infobox "Sending file $fileName " 20 50
+echo $fileName | nc $recvIP $sendPort
+if [ $? != 0 ]
+then
+dialog --colors --msgbox "\Z1 Send error! Please check the port number and IP address!" 0 0
+return 6
+fi
+sleep 1
+nc $recvIP $sendPort < $fileAddr
+if [ $? != 0 ]
+then
+dialog --colors --msgbox "\Z1 Send error!" 0 0
+return 6
+fi
+fileSize=`ls -lh $fileAddr | awk '{print $5}'`
+dialog --msgbox "Send complete.\nFile name: $fileName \nSize: ${fileSize}" 20 50
+}
+recv()
+{
+dialog --inputbox "Please enter the port number you wish to use" 20 50 2> temp
+if [ $? != 0 ]
+then
+return 6
+fi
+recvPort=`cat temp`
+dialog --title "Select a reception path" --fselect "${HOME}/" 8 50 2> temp
+if [ $? != 0 ]
+then
+return 6
+fi
+recvAddr=`cat temp`
+localIP=`ifconfig | grep "inet" | awk '{ print $2}' | grep -v "127.0.0.1" | grep -v ":"`
+dialog --title Waiting to Receive --infobox "Local IP: $localIP \nPort number: $recvPort" 20 50
+nc -l $recvPort > temp
+if [ $? != 0 ]
+then
+dialog --colors --msgbox "\Z1 Reception error!" 0 0
+return 6
+fi
+recvFile=`cat temp`
+dialog --infobox "Receiving file $recvFile ……" 20 50
+nc -l $recvPort > filetemp
+if [ $? != 0 ]
+then
+dialog --colors --msgbox "\Z1 Reception error!" 0 0
+return 6
+fi
+mv filetemp ${recvAddr}/${recvFile}
+fileSize=`ls -lh ${recvAddr}/${recvFile} | awk '{print $5}'`
+dialog --msgbox "Reception complete.\nFile name: $recvFile \nSize: ${fileSize}" 20 50
+}
+mainUI()
+{
+dialog --cancel
+
+-label Exit --title Shell File Transfer Assistant --menu " \n " 20 50 8 1 "Send file" 2 "Receive file" 3 "About" 4 "Exit" 2> temp
+a1=`cat temp`
+case $a1 in
+1)
+send
+if [ $? = 6 ]
+then
+mainUI
+fi
+;;
+2)
+recv
+if [ $? = 6 ]
+then
+mainUI
+fi
+;;
+3)
+dialog --colors --msgbox "Shell File Transfer Assistant \n\nBeta Version 2019.3.5\n\n\Z6Developers:\nXianfei, Li Sihan, Hu Jiahui, Ding Ling (王衔飞 李思涵 胡嘉慧 丁玲)" 20 50
+mainUI
+;;
+4)
+return
+;;
+esac
+}
+mainUI
+clear
+rm -f temp
+rm -f filetemp
+```
\ No newline at end of file
diff --git a/content/post/wasm/index.en.md b/content/post/wasm/index.en.md
new file mode 100644
index 00000000..63943c11
--- /dev/null
+++ b/content/post/wasm/index.en.md
@@ -0,0 +1,202 @@
+---
+author: Xianfei
+title: Get to Know WebAssembly!
+date: 2022-06-01
+slug: wasm
+image: wasm_en.png
+color: "#5b66e5"
+categories:
+ - Web
+ - C++
+---
+
+
+## What is WebAssembly?
+
+{{< quote source="webassembly.org" url="https://webassembly.org/" >}}
+WebAssembly (abbreviated Wasm) is a binary instruction format for a stack-based virtual machine. Wasm is designed as a portable compilation target for programming languages, enabling deployment on the web for client and server applications.
+{{< /quote >}}
+
+WebAssembly is a novel coding approach that operates in modern web browsers - it's a low-level, assembly-like language with a compact binary format that runs at near-native performance. It provides a compilation target for languages like C/C++, enabling them to run on the Web. It's also designed to coexist with JavaScript, allowing both to work together.
+
+For the web platform, WebAssembly is of significant importance - it offers a way for code written in various languages to run at near-native speeds on the Web. In this scenario, client-side software that previously couldn't run in this way can now operate on the Web.
+
+WebAssembly is designed to work in tandem with JavaScript - using WebAssembly's JavaScript API, you can load WebAssembly modules into a JavaScript application and share functionality between them. This allows you to leverage the performance and power of WebAssembly along with the expressiveness and flexibility of JavaScript in the same application, even if you may not know how to write WebAssembly code.
+
+The above three sections are from MDN: https://developer.mozilla.org/zh-CN/docs/WebAssembly
+
+## Installing the Compiler
+
+First, we need to install a WebAssembly compiler, **emscripten**. For macOS, you can use brew for installation.
+
+```bash
+brew install emscripten
+```
+
+### Testing if emscripten is Successfully Installed
+
+Here, we can run `emcc` and `emcc --version` to test if it's installed successfully.
+
+
+
+## Running C Code in the Web
+
+C++, due to its support for function overloading, object-oriented programming, and namespaces, has less intuitive symbol names (mangled symbols) compared to C. This article will start with demonstrating using C.
+
+### Writing C Code
+
+Below, we write a very simple C language example that will return the sum of two numbers. In your working directory, create a file named test.c.
+
+```c
+#include
+#include
+
+// The code in main() will execute once the WASM module is loaded
+int main(int argc, char ** argv) {
+ printf("WebAssembly module loaded\n");
+}
+
+// Returns the sum of two numbers
+int EMSCRIPTEN_KEEPALIVE get_sum(int a,int b) {
+ return a + b;
+}
+```
+
+### Compiling C to WebAssembly
+
+Now that we have the C code, the next step is to compile it into wasm. Not only that, but we also need to generate the corresponding JavaScript glue code to get it running.
+
+```bash
+emcc test.c -s WASM=1 -O2 -o index.js
+```
+
+The meanings of each parameter are as follows:
+
+- `emcc` — represents the Emscripten compiler;
+- `test.c` — the file containing C code;
+- `-s WASM=1` — specifies the use of WebAssembly;
+- `-O2` — code optimization level;
+- `-o index.js` — specifies the generation of a JS file containing all the glue code needed for the wasm module;
+
+After compilation, index.js and index.wasm files will be generated.
+
+### Writing HTML Code
+
+There's a powerful API available in browsers for handling WebAssembly. We won't delve deep into it here as it goes beyond the scope of a beginner's tutorial. We only need the Module interface and its ccall method. This method allows us to call a function from the C code by its name, and then use it just like a regular JS function.
+
+```javascript
+var result = Module.ccall(
+ "funcName", // function name
+ "number", // return type
+ ["number"], // parameter types
+ [42] // arguments
+);
+```
+
+After calling this method, `result` will have all the functionality of the corresponding C function. All parameters except the function name are optional.
+
+We can also use a shorthand version (using the symbol name):
+
+```javascript
+var result = _funcName(args...);
+```
+
+Next, we need to write an HTML file, named here as index.html, containing a button and a div block to display the result.
+
+Then
+
+ we add a script tag to write the code for calling the wasm module in JavaScript. Thanks to the presence of the glue code (index.js), this task becomes very simple as it has already handled all the wiring for us.
+
+```html
+
+
+
+
+
+ WebAssembly Example
+
+
+
+
+
+
+
+
+
+
+```
+
+Then access the webpage, here using the Live Server plugin of VSCode for access (other HTTP file servers also work), **do not run by directly opening the html file** (due to CORS local file restrictions).
+
+
+
+## What About C++?
+
+First, convert the above code to C++ and save it as a test.cpp file.
+
+```c++
+#include
+#include
+
+// The code in main() will execute once the WASM module is loaded
+int main(int argc, char ** argv) {
+ std::cout << "WebAssembly module loaded" << std::endl;
+}
+
+// Returns the sum of two numbers
+int EMSCRIPTEN_KEEPALIVE get_sum(int a,int b) {
+ return a + b;
+}
+```
+
+Then compile in the same way.
+```bash
+emcc test.cpp -s WASM=1 -O2 -o index.js
+```
+
+C++ has complex [Symbol Mangling](http://web.mit.edu/tibbetts/Public/inside-c/www/mangling.html) rules. Let's start by analyzing a standard C++ file, compile it with g++ and then analyze the symbols using the nm command, as shown in the image below.
+
+
+
+We see that the symbol name for our written function is `__Z7get_sumii`, so we can call this function using this symbol name.
+
+```html
+
+
+
+
+
+ WebAssembly Example
+
+
+
+
+
+
+
+
+
+
+```
+
+We can also confirm this by looking at the WebAssembly disassembly code through DevTools.
+
+
\ No newline at end of file
diff --git a/content/post/wasm/wasm_en.png b/content/post/wasm/wasm_en.png
new file mode 100644
index 00000000..155f6a20
Binary files /dev/null and b/content/post/wasm/wasm_en.png differ
diff --git a/content/post/yolo/index.en.md b/content/post/yolo/index.en.md
new file mode 100644
index 00000000..0024e784
--- /dev/null
+++ b/content/post/yolo/index.en.md
@@ -0,0 +1,190 @@
+---
+author: Xianfei
+title: Building a Simple Python Backend for Web-Based Object Recognition
+date: 2022-05-08
+slug: yolo
+image: banner.png
+color: "#c35a25"
+categories:
+ - Computer Vision
+ - Machine Learning
+ - Web
+---
+
+## Introduction
+
+After completing my postgraduate exams last year, I decided to learn how to use Python to invoke machine learning libraries, having previously only known some basic Flask operations.
+
+This is a web-based image object recognition project built on the xyolo library, ~~(just for fun)~~. The frontend was developed using pure native techniques (I hadn’t learned Vue back then), and the backend API part is implemented using Flask. The code is extremely simple.
+
+## Preview
+
+Here are a few images to give you an idea of what the final output looks like. The different colored boxes in the images are drawn by frontend divs, and the recognition result tags are also rendered using HTML. Finally, a table is drawn to display the recognition results and accuracy.
+
+![](0.jpg) ![](1.jpg) ![](2.jpg)
+
+## Implementation
+
+### Backend Code
+
+```python
+import numpy
+from xyolo import YOLO, DefaultYolo3Config
+from xyolo import init_yolo_v3
+from flask import Flask, jsonify, request
+from flask_cors import CORS
+from PIL import Image
+
+app = Flask(__name__)
+CORS(app, resources=r'/*')
+
+config = DefaultYolo3Config()
+init_yolo_v3(config)
+yolo = YOLO(config)
+
+@app.route('/postimg', methods=['POST'])
+def hello_world():
+ img = Image.open(request.files['file'].stream)
+ result = yolo.detect_image(img)
+ print(result)
+ return jsonify({'result':numpy.array(result).tolist()})
+
+
+if __name__ == '__main__':
+ app.run(host='0.0.0.0', port=8808, debug=True)
+
+```
+
+### Frontend Code
+
+Took a shortcut here, it's all in HTML
+
+```html
+
+
+
+
+
+
+
+ Let's See What's Here
+
+
+
+
+
+
Let's See What's In Here!
+
Supports dragging and dropping images below this line of text
+
+
+
+
+
+
+
Ready
+
+
Object
Confidence
+
+
by xianfei 2021.12
+
+
+
+
+```
diff --git a/resources/_gen/assets/scss/scss/style.scss_511aa33e99371f93fbf403479ebfd32e.json b/resources/_gen/assets/scss/scss/style.scss_511aa33e99371f93fbf403479ebfd32e.json
index f99ef200..47b43c85 100644
--- a/resources/_gen/assets/scss/scss/style.scss_511aa33e99371f93fbf403479ebfd32e.json
+++ b/resources/_gen/assets/scss/scss/style.scss_511aa33e99371f93fbf403479ebfd32e.json
@@ -1 +1 @@
-{"Target":"scss/style.min.fc9f65ac54356383b07eb825d8f44c14c4e6dd0230ee115ea53c959cfd402796.css","MediaType":"text/css","Data":{"Integrity":"sha256-/J9lrFQ1Y4Owfrgl2PRMFMTm3QIw7hFepTyVnP1AJ5Y="}}
\ No newline at end of file
+{"Target":"scss/style.min.7419d3a38aca85366e24e744cccb945224e7388571ebfda4f22c9fb4f04f6540.css","MediaType":"text/css","Data":{"Integrity":"sha256-dBnTo4rKhTZuJOdEzMuUUiTnOIVx6/2k8iyftPBPZUA="}}
\ No newline at end of file
diff --git a/resources/_gen/images/categories/Test/hutomo-abrianto-l2jk-uxb1BY-unsplash_hu8102ac0a5989befdf52fa5096a373f78_52927_120x120_fill_q75_box_smart1.jpg b/resources/_gen/images/categories/Test/hutomo-abrianto-l2jk-uxb1BY-unsplash_hu8102ac0a5989befdf52fa5096a373f78_52927_120x120_fill_q75_box_smart1.jpg
new file mode 100644
index 00000000..1c73a8ac
Binary files /dev/null and b/resources/_gen/images/categories/Test/hutomo-abrianto-l2jk-uxb1BY-unsplash_hu8102ac0a5989befdf52fa5096a373f78_52927_120x120_fill_q75_box_smart1.jpg differ
diff --git a/resources/_gen/images/post/2022to2023/2023banner.c83dc84236eb71a9b680818eae3193bd_huefcdc23473a1d6462c14827255a34185_108826_250x150_fill_q75_h2_box_smart1_2.webp b/resources/_gen/images/post/2022to2023/2023banner.c83dc84236eb71a9b680818eae3193bd_huefcdc23473a1d6462c14827255a34185_108826_250x150_fill_q75_h2_box_smart1_2.webp
new file mode 100644
index 00000000..bf2c8d91
Binary files /dev/null and b/resources/_gen/images/post/2022to2023/2023banner.c83dc84236eb71a9b680818eae3193bd_huefcdc23473a1d6462c14827255a34185_108826_250x150_fill_q75_h2_box_smart1_2.webp differ
diff --git a/resources/_gen/images/post/2022to2023/2023banner_huefcdc23473a1d6462c14827255a34185_108826_120x120_fill_q75_h2_box_smart1_2.webp b/resources/_gen/images/post/2022to2023/2023banner_huefcdc23473a1d6462c14827255a34185_108826_120x120_fill_q75_h2_box_smart1_2.webp
new file mode 100644
index 00000000..df80fdcb
Binary files /dev/null and b/resources/_gen/images/post/2022to2023/2023banner_huefcdc23473a1d6462c14827255a34185_108826_120x120_fill_q75_h2_box_smart1_2.webp differ
diff --git a/resources/_gen/images/post/2022to2023/2023banner_huefcdc23473a1d6462c14827255a34185_108826_1600x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/2022to2023/2023banner_huefcdc23473a1d6462c14827255a34185_108826_1600x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..c20e5ab7
Binary files /dev/null and b/resources/_gen/images/post/2022to2023/2023banner_huefcdc23473a1d6462c14827255a34185_108826_1600x0_resize_q75_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/2022to2023/2023banner_huefcdc23473a1d6462c14827255a34185_108826_800x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/2022to2023/2023banner_huefcdc23473a1d6462c14827255a34185_108826_800x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..2a526699
Binary files /dev/null and b/resources/_gen/images/post/2022to2023/2023banner_huefcdc23473a1d6462c14827255a34185_108826_800x0_resize_q75_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/2023spring/DSC00507.f3ee7438aad3b4d659c1b39b96163a16_hu7ea319dd03bf41013c65479097a0d5a6_1164302_250x150_fill_q75_h2_box_smart1_2.webp b/resources/_gen/images/post/2023spring/DSC00507.f3ee7438aad3b4d659c1b39b96163a16_hu7ea319dd03bf41013c65479097a0d5a6_1164302_250x150_fill_q75_h2_box_smart1_2.webp
new file mode 100644
index 00000000..f2a60169
Binary files /dev/null and b/resources/_gen/images/post/2023spring/DSC00507.f3ee7438aad3b4d659c1b39b96163a16_hu7ea319dd03bf41013c65479097a0d5a6_1164302_250x150_fill_q75_h2_box_smart1_2.webp differ
diff --git a/resources/_gen/images/post/2023spring/DSC00507_hu7ea319dd03bf41013c65479097a0d5a6_1164302_120x120_fill_q75_h2_box_smart1_2.webp b/resources/_gen/images/post/2023spring/DSC00507_hu7ea319dd03bf41013c65479097a0d5a6_1164302_120x120_fill_q75_h2_box_smart1_2.webp
new file mode 100644
index 00000000..1e953c97
Binary files /dev/null and b/resources/_gen/images/post/2023spring/DSC00507_hu7ea319dd03bf41013c65479097a0d5a6_1164302_120x120_fill_q75_h2_box_smart1_2.webp differ
diff --git a/resources/_gen/images/post/2023spring/DSC00507_hu7ea319dd03bf41013c65479097a0d5a6_1164302_1600x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/2023spring/DSC00507_hu7ea319dd03bf41013c65479097a0d5a6_1164302_1600x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..ffeb774a
Binary files /dev/null and b/resources/_gen/images/post/2023spring/DSC00507_hu7ea319dd03bf41013c65479097a0d5a6_1164302_1600x0_resize_q75_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/2023spring/DSC00507_hu7ea319dd03bf41013c65479097a0d5a6_1164302_800x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/2023spring/DSC00507_hu7ea319dd03bf41013c65479097a0d5a6_1164302_800x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..5de92e16
Binary files /dev/null and b/resources/_gen/images/post/2023spring/DSC00507_hu7ea319dd03bf41013c65479097a0d5a6_1164302_800x0_resize_q75_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/bishe/p5_hue35d2e5eda17a96103b7a7da494aa79b_559606_1536x0_resize_q60_box_3.png b/resources/_gen/images/post/bishe/p5_hue35d2e5eda17a96103b7a7da494aa79b_559606_1536x0_resize_q60_box_3.png
new file mode 100644
index 00000000..0929bf33
Binary files /dev/null and b/resources/_gen/images/post/bishe/p5_hue35d2e5eda17a96103b7a7da494aa79b_559606_1536x0_resize_q60_box_3.png differ
diff --git a/resources/_gen/images/post/bishe/p5_hue35d2e5eda17a96103b7a7da494aa79b_559606_480x0_resize_q60_box_3.png b/resources/_gen/images/post/bishe/p5_hue35d2e5eda17a96103b7a7da494aa79b_559606_480x0_resize_q60_box_3.png
new file mode 100644
index 00000000..4ffdfc1b
Binary files /dev/null and b/resources/_gen/images/post/bishe/p5_hue35d2e5eda17a96103b7a7da494aa79b_559606_480x0_resize_q60_box_3.png differ
diff --git a/resources/_gen/images/post/bishe/sysmocap.d8ad22afd3a5f5264218defc4545496d_hu556f2aae40812163569276577bc9cdb6_1336970_250x150_fill_box_smart1_3.png b/resources/_gen/images/post/bishe/sysmocap.d8ad22afd3a5f5264218defc4545496d_hu556f2aae40812163569276577bc9cdb6_1336970_250x150_fill_box_smart1_3.png
new file mode 100644
index 00000000..f29243ba
Binary files /dev/null and b/resources/_gen/images/post/bishe/sysmocap.d8ad22afd3a5f5264218defc4545496d_hu556f2aae40812163569276577bc9cdb6_1336970_250x150_fill_box_smart1_3.png differ
diff --git a/resources/_gen/images/post/dice/1_hu4dc332bfa548ad6bb992f9085d9ead4f_57166_1536x0_resize_q60_h2_box_2.webp b/resources/_gen/images/post/dice/1_hu4dc332bfa548ad6bb992f9085d9ead4f_57166_1536x0_resize_q60_h2_box_2.webp
new file mode 100644
index 00000000..216fbf31
Binary files /dev/null and b/resources/_gen/images/post/dice/1_hu4dc332bfa548ad6bb992f9085d9ead4f_57166_1536x0_resize_q60_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/dice/1_hu4dc332bfa548ad6bb992f9085d9ead4f_57166_480x0_resize_q60_h2_box_2.webp b/resources/_gen/images/post/dice/1_hu4dc332bfa548ad6bb992f9085d9ead4f_57166_480x0_resize_q60_h2_box_2.webp
new file mode 100644
index 00000000..5037d3af
Binary files /dev/null and b/resources/_gen/images/post/dice/1_hu4dc332bfa548ad6bb992f9085d9ead4f_57166_480x0_resize_q60_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/gpt4/banner.cd0172768892d15be651d4cc36e24c61_hu7c14224a355d073e1e2d02b641289c29_175300_250x150_fill_q75_h2_box_smart1_2.webp b/resources/_gen/images/post/gpt4/banner.cd0172768892d15be651d4cc36e24c61_hu7c14224a355d073e1e2d02b641289c29_175300_250x150_fill_q75_h2_box_smart1_2.webp
new file mode 100644
index 00000000..4e155e2c
Binary files /dev/null and b/resources/_gen/images/post/gpt4/banner.cd0172768892d15be651d4cc36e24c61_hu7c14224a355d073e1e2d02b641289c29_175300_250x150_fill_q75_h2_box_smart1_2.webp differ
diff --git a/resources/_gen/images/post/gpt4/banner_hu7c14224a355d073e1e2d02b641289c29_175300_120x120_fill_q75_h2_box_smart1_2.webp b/resources/_gen/images/post/gpt4/banner_hu7c14224a355d073e1e2d02b641289c29_175300_120x120_fill_q75_h2_box_smart1_2.webp
new file mode 100644
index 00000000..37e208c2
Binary files /dev/null and b/resources/_gen/images/post/gpt4/banner_hu7c14224a355d073e1e2d02b641289c29_175300_120x120_fill_q75_h2_box_smart1_2.webp differ
diff --git a/resources/_gen/images/post/gpt4/banner_hu7c14224a355d073e1e2d02b641289c29_175300_1600x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/gpt4/banner_hu7c14224a355d073e1e2d02b641289c29_175300_1600x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..d4e576ba
Binary files /dev/null and b/resources/_gen/images/post/gpt4/banner_hu7c14224a355d073e1e2d02b641289c29_175300_1600x0_resize_q75_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/gpt4/banner_hu7c14224a355d073e1e2d02b641289c29_175300_800x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/gpt4/banner_hu7c14224a355d073e1e2d02b641289c29_175300_800x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..481b4859
Binary files /dev/null and b/resources/_gen/images/post/gpt4/banner_hu7c14224a355d073e1e2d02b641289c29_175300_800x0_resize_q75_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/markdown-syntax/pawel-czerwinski-8uZPynIu-rQ-unsplash_hud7e36f7e20e71be184458283bdae4646_55974_120x120_fill_q75_box_smart1.jpg b/resources/_gen/images/post/markdown-syntax/pawel-czerwinski-8uZPynIu-rQ-unsplash_hud7e36f7e20e71be184458283bdae4646_55974_120x120_fill_q75_box_smart1.jpg
new file mode 100644
index 00000000..0707d566
Binary files /dev/null and b/resources/_gen/images/post/markdown-syntax/pawel-czerwinski-8uZPynIu-rQ-unsplash_hud7e36f7e20e71be184458283bdae4646_55974_120x120_fill_q75_box_smart1.jpg differ
diff --git a/resources/_gen/images/post/markdown-syntax/pawel-czerwinski-8uZPynIu-rQ-unsplash_hud7e36f7e20e71be184458283bdae4646_55974_1600x0_resize_q75_box.jpg b/resources/_gen/images/post/markdown-syntax/pawel-czerwinski-8uZPynIu-rQ-unsplash_hud7e36f7e20e71be184458283bdae4646_55974_1600x0_resize_q75_box.jpg
new file mode 100644
index 00000000..7c6ce44e
Binary files /dev/null and b/resources/_gen/images/post/markdown-syntax/pawel-czerwinski-8uZPynIu-rQ-unsplash_hud7e36f7e20e71be184458283bdae4646_55974_1600x0_resize_q75_box.jpg differ
diff --git a/resources/_gen/images/post/markdown-syntax/pawel-czerwinski-8uZPynIu-rQ-unsplash_hud7e36f7e20e71be184458283bdae4646_55974_800x0_resize_q75_box.jpg b/resources/_gen/images/post/markdown-syntax/pawel-czerwinski-8uZPynIu-rQ-unsplash_hud7e36f7e20e71be184458283bdae4646_55974_800x0_resize_q75_box.jpg
new file mode 100644
index 00000000..2bc41625
Binary files /dev/null and b/resources/_gen/images/post/markdown-syntax/pawel-czerwinski-8uZPynIu-rQ-unsplash_hud7e36f7e20e71be184458283bdae4646_55974_800x0_resize_q75_box.jpg differ
diff --git a/resources/_gen/images/post/my2021/_hub4793d9bcd0c763f6c3c447eae921687_1203292_be80c689503e627267ba55e1dbd6c98e.jpg b/resources/_gen/images/post/my2021/_hub4793d9bcd0c763f6c3c447eae921687_1203292_be80c689503e627267ba55e1dbd6c98e.jpg
new file mode 100644
index 00000000..bd86bef2
Binary files /dev/null and b/resources/_gen/images/post/my2021/_hub4793d9bcd0c763f6c3c447eae921687_1203292_be80c689503e627267ba55e1dbd6c98e.jpg differ
diff --git a/resources/_gen/images/post/my2021/clay-banks-HyczMwZbdLg-unsplash_hub4793d9bcd0c763f6c3c447eae921687_1203292_120x120_fill_q75_box_smart1.jpg b/resources/_gen/images/post/my2021/clay-banks-HyczMwZbdLg-unsplash_hub4793d9bcd0c763f6c3c447eae921687_1203292_120x120_fill_q75_box_smart1.jpg
new file mode 100644
index 00000000..8f4552bc
Binary files /dev/null and b/resources/_gen/images/post/my2021/clay-banks-HyczMwZbdLg-unsplash_hub4793d9bcd0c763f6c3c447eae921687_1203292_120x120_fill_q75_box_smart1.jpg differ
diff --git a/resources/_gen/images/post/my2021/clay-banks-HyczMwZbdLg-unsplash_hub4793d9bcd0c763f6c3c447eae921687_1203292_1600x0_resize_q75_box.jpg b/resources/_gen/images/post/my2021/clay-banks-HyczMwZbdLg-unsplash_hub4793d9bcd0c763f6c3c447eae921687_1203292_1600x0_resize_q75_box.jpg
new file mode 100644
index 00000000..88a1563b
Binary files /dev/null and b/resources/_gen/images/post/my2021/clay-banks-HyczMwZbdLg-unsplash_hub4793d9bcd0c763f6c3c447eae921687_1203292_1600x0_resize_q75_box.jpg differ
diff --git a/resources/_gen/images/post/my2021/clay-banks-HyczMwZbdLg-unsplash_hub4793d9bcd0c763f6c3c447eae921687_1203292_800x0_resize_q75_box.jpg b/resources/_gen/images/post/my2021/clay-banks-HyczMwZbdLg-unsplash_hub4793d9bcd0c763f6c3c447eae921687_1203292_800x0_resize_q75_box.jpg
new file mode 100644
index 00000000..b6ec81d0
Binary files /dev/null and b/resources/_gen/images/post/my2021/clay-banks-HyczMwZbdLg-unsplash_hub4793d9bcd0c763f6c3c447eae921687_1203292_800x0_resize_q75_box.jpg differ
diff --git a/resources/_gen/images/post/shell-file-trans/1.1eff5ae1917545b948be0d8d8a6dec77_huf95364caf60a3b9d7580a3cfd70ebe86_78402_250x150_fill_q75_h2_box_smart1_2.webp b/resources/_gen/images/post/shell-file-trans/1.1eff5ae1917545b948be0d8d8a6dec77_huf95364caf60a3b9d7580a3cfd70ebe86_78402_250x150_fill_q75_h2_box_smart1_2.webp
new file mode 100644
index 00000000..8bd43b45
Binary files /dev/null and b/resources/_gen/images/post/shell-file-trans/1.1eff5ae1917545b948be0d8d8a6dec77_huf95364caf60a3b9d7580a3cfd70ebe86_78402_250x150_fill_q75_h2_box_smart1_2.webp differ
diff --git a/resources/_gen/images/post/shell-file-trans/1_huf95364caf60a3b9d7580a3cfd70ebe86_78402_120x120_fill_q75_h2_box_smart1_2.webp b/resources/_gen/images/post/shell-file-trans/1_huf95364caf60a3b9d7580a3cfd70ebe86_78402_120x120_fill_q75_h2_box_smart1_2.webp
new file mode 100644
index 00000000..d81d15a7
Binary files /dev/null and b/resources/_gen/images/post/shell-file-trans/1_huf95364caf60a3b9d7580a3cfd70ebe86_78402_120x120_fill_q75_h2_box_smart1_2.webp differ
diff --git a/resources/_gen/images/post/shell-file-trans/1_huf95364caf60a3b9d7580a3cfd70ebe86_78402_1600x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/shell-file-trans/1_huf95364caf60a3b9d7580a3cfd70ebe86_78402_1600x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..ca48be0e
Binary files /dev/null and b/resources/_gen/images/post/shell-file-trans/1_huf95364caf60a3b9d7580a3cfd70ebe86_78402_1600x0_resize_q75_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/shell-file-trans/1_huf95364caf60a3b9d7580a3cfd70ebe86_78402_800x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/shell-file-trans/1_huf95364caf60a3b9d7580a3cfd70ebe86_78402_800x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..f9021686
Binary files /dev/null and b/resources/_gen/images/post/shell-file-trans/1_huf95364caf60a3b9d7580a3cfd70ebe86_78402_800x0_resize_q75_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/taper/banner_hu35ba7308684e9631a26113d051d46675_35878_120x120_fill_q75_h2_box_smart1_2.webp b/resources/_gen/images/post/taper/banner_hu35ba7308684e9631a26113d051d46675_35878_120x120_fill_q75_h2_box_smart1_2.webp
new file mode 100644
index 00000000..537bbb4d
Binary files /dev/null and b/resources/_gen/images/post/taper/banner_hu35ba7308684e9631a26113d051d46675_35878_120x120_fill_q75_h2_box_smart1_2.webp differ
diff --git a/resources/_gen/images/post/taper/banner_hu35ba7308684e9631a26113d051d46675_35878_1600x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/taper/banner_hu35ba7308684e9631a26113d051d46675_35878_1600x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..da2fc3ed
Binary files /dev/null and b/resources/_gen/images/post/taper/banner_hu35ba7308684e9631a26113d051d46675_35878_1600x0_resize_q75_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/taper/banner_hu35ba7308684e9631a26113d051d46675_35878_800x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/taper/banner_hu35ba7308684e9631a26113d051d46675_35878_800x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..d470ae77
Binary files /dev/null and b/resources/_gen/images/post/taper/banner_hu35ba7308684e9631a26113d051d46675_35878_800x0_resize_q75_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/thinkaboutai/banner_huc8a375384e5b63ad140c58eca88663ff_140028_120x120_fill_q75_h2_box_smart1_2.webp b/resources/_gen/images/post/thinkaboutai/banner_huc8a375384e5b63ad140c58eca88663ff_140028_120x120_fill_q75_h2_box_smart1_2.webp
new file mode 100644
index 00000000..f22aac60
Binary files /dev/null and b/resources/_gen/images/post/thinkaboutai/banner_huc8a375384e5b63ad140c58eca88663ff_140028_120x120_fill_q75_h2_box_smart1_2.webp differ
diff --git a/resources/_gen/images/post/thinkaboutai/banner_huc8a375384e5b63ad140c58eca88663ff_140028_1600x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/thinkaboutai/banner_huc8a375384e5b63ad140c58eca88663ff_140028_1600x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..1a7e4257
Binary files /dev/null and b/resources/_gen/images/post/thinkaboutai/banner_huc8a375384e5b63ad140c58eca88663ff_140028_1600x0_resize_q75_h2_box_2.webp differ
diff --git a/resources/_gen/images/post/thinkaboutai/banner_huc8a375384e5b63ad140c58eca88663ff_140028_800x0_resize_q75_h2_box_2.webp b/resources/_gen/images/post/thinkaboutai/banner_huc8a375384e5b63ad140c58eca88663ff_140028_800x0_resize_q75_h2_box_2.webp
new file mode 100644
index 00000000..c869142c
Binary files /dev/null and b/resources/_gen/images/post/thinkaboutai/banner_huc8a375384e5b63ad140c58eca88663ff_140028_800x0_resize_q75_h2_box_2.webp differ