Procházet zdrojové kódy

用trace的日志,不用log,修复

skyffire před 9 měsíci
rodič
revize
653ba847df

+ 152 - 4
Cargo.lock

@@ -860,6 +860,15 @@ dependencies = [
  "cfg-if",
 ]
 
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471"
+dependencies = [
+ "crossbeam-utils",
+]
+
 [[package]]
 name = "crossbeam-deque"
 version = "0.8.5"
@@ -1332,7 +1341,6 @@ dependencies = [
  "hyper-util",
  "iced",
  "iced_futures 0.13.2",
- "log",
  "native-tls",
  "ordered-float",
  "regex",
@@ -1344,10 +1352,14 @@ dependencies = [
  "sha2",
  "sonic-rs",
  "thiserror 1.0.69",
+ "time",
  "tokio",
  "tokio-native-tls",
  "tokio-rustls 0.24.1",
  "tokio-tungstenite",
+ "tracing",
+ "tracing-appender-timezone",
+ "tracing-subscriber",
  "tungstenite",
  "url",
  "uuid",
@@ -2423,6 +2435,12 @@ dependencies = [
  "smallvec",
 ]
 
+[[package]]
+name = "lazy_static"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
+
 [[package]]
 name = "lebe"
 version = "0.5.2"
@@ -2595,6 +2613,15 @@ dependencies = [
  "libc",
 ]
 
+[[package]]
+name = "matchers"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+dependencies = [
+ "regex-automata 0.1.10",
+]
+
 [[package]]
 name = "memchr"
 version = "2.7.4"
@@ -2767,6 +2794,16 @@ dependencies = [
  "memoffset",
 ]
 
+[[package]]
+name = "nu-ansi-term"
+version = "0.46.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+dependencies = [
+ "overload",
+ "winapi",
+]
+
 [[package]]
 name = "num-conv"
 version = "0.1.0"
@@ -2814,6 +2851,15 @@ dependencies = [
  "syn 2.0.90",
 ]
 
+[[package]]
+name = "num_threads"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9"
+dependencies = [
+ "libc",
+]
+
 [[package]]
 name = "objc"
 version = "0.2.7"
@@ -3168,6 +3214,12 @@ dependencies = [
  "syn 2.0.90",
 ]
 
+[[package]]
+name = "overload"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+
 [[package]]
 name = "owned_ttf_parser"
 version = "0.24.0"
@@ -3651,8 +3703,17 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
 dependencies = [
  "aho-corasick",
  "memchr",
- "regex-automata",
- "regex-syntax",
+ "regex-automata 0.4.9",
+ "regex-syntax 0.8.5",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+dependencies = [
+ "regex-syntax 0.6.29",
 ]
 
 [[package]]
@@ -3663,9 +3724,15 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
 dependencies = [
  "aho-corasick",
  "memchr",
- "regex-syntax",
+ "regex-syntax 0.8.5",
 ]
 
+[[package]]
+name = "regex-syntax"
+version = "0.6.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
+
 [[package]]
 name = "regex-syntax"
 version = "0.8.5"
@@ -4126,6 +4193,15 @@ dependencies = [
  "digest",
 ]
 
+[[package]]
+name = "sharded-slab"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
+dependencies = [
+ "lazy_static",
+]
+
 [[package]]
 name = "signal-hook-registry"
 version = "1.4.2"
@@ -4539,6 +4615,16 @@ dependencies = [
  "syn 2.0.90",
 ]
 
+[[package]]
+name = "thread_local"
+version = "1.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+]
+
 [[package]]
 name = "tiff"
 version = "0.9.1"
@@ -4557,10 +4643,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21"
 dependencies = [
  "deranged",
+ "itoa",
+ "libc",
  "num-conv",
+ "num_threads",
  "powerfmt",
  "serde",
  "time-core",
+ "time-macros",
 ]
 
 [[package]]
@@ -4569,6 +4659,16 @@ version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
 
+[[package]]
+name = "time-macros"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de"
+dependencies = [
+ "num-conv",
+ "time-core",
+]
+
 [[package]]
 name = "tiny-skia"
 version = "0.11.4"
@@ -4763,6 +4863,17 @@ dependencies = [
  "tracing-core",
 ]
 
+[[package]]
+name = "tracing-appender-timezone"
+version = "0.1.0"
+source = "git+https://github.com/skyfffire/tracing-appender-timezone.git#3e154006db8fcefb873b23f43b099e9cc6a88d56"
+dependencies = [
+ "crossbeam-channel",
+ "thiserror 1.0.69",
+ "time",
+ "tracing-subscriber",
+]
+
 [[package]]
 name = "tracing-attributes"
 version = "0.1.27"
@@ -4781,6 +4892,37 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
 dependencies = [
  "once_cell",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
+dependencies = [
+ "log",
+ "once_cell",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
+dependencies = [
+ "matchers",
+ "nu-ansi-term",
+ "once_cell",
+ "regex",
+ "sharded-slab",
+ "smallvec",
+ "thread_local",
+ "time",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
 ]
 
 [[package]]
@@ -4955,6 +5097,12 @@ dependencies = [
  "getrandom",
 ]
 
+[[package]]
+name = "valuable"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
+
 [[package]]
 name = "vcpkg"
 version = "0.2.15"

+ 8 - 1
Cargo.toml

@@ -32,7 +32,14 @@ tokio-rustls = "0.24.1"
 webpki-roots = "0.23.1"
 rustc-hash = "2.0.0"
 fern = "0.6.2"
-log = "0.4.22"
+tracing = "0.1"
+tracing-subscriber = { version = "0.3.17", features = [
+    "env-filter",
+    "time",
+    "local-time"
+] }
+time = { version = "0.3.7", features = ["macros"] }
+tracing-appender-timezone = { git = "https://github.com/skyfffire/tracing-appender-timezone.git" }
 thiserror = "1.0.68"
 ordered-float = "4.5.0"
 regex = "1.11.1"

+ 6 - 4
src/charts.rs

@@ -15,6 +15,8 @@ use crate::{
     tooltip::{self, tooltip}, widget::hsplit::HSplit
 };
 
+use tracing::{warn};
+
 mod scales;
 pub mod candlestick;
 pub mod footprint;
@@ -623,7 +625,7 @@ impl CommonChartData {
                 time += interval;
             }
             
-            log::warn!("Integrity check failed: missing {} klines", missing_keys.len());
+            warn!("Integrity check failed: missing {} klines", missing_keys.len());
             return Some(missing_keys);
         }
 
@@ -653,9 +655,9 @@ fn request_fetch(handler: &mut RequestHandler, range: FetchRange) -> Option<Task
         Ok(req_id) => Some(Task::done(Message::NewDataRange(req_id, range))),
         Err(e) => {
             match e {
-                ReqError::Overlaps => log::warn!("Request overlaps with existing request"),
-                ReqError::Failed(msg) => log::warn!("Request already failed: {}", msg),
-                ReqError::Completed => log::warn!("Request already completed"),
+                ReqError::Overlaps => warn!("Request overlaps with existing request"),
+                ReqError::Failed(msg) => warn!("Request already failed: {}", msg),
+                ReqError::Completed => warn!("Request already completed"),
             }
             None
         }

+ 5 - 4
src/data_providers.rs

@@ -11,6 +11,7 @@ use rust_decimal::{
 };
 use serde::{Deserialize, Deserializer, Serialize};
 use serde_json::Value;
+use tracing::{error};
 
 pub mod binance;
 pub mod china_futures;
@@ -228,14 +229,14 @@ impl TickMultiplier {
         let multiplier = if let Some(m) = Decimal::from_f32(f32::from(self.0)) {
             m
         } else {
-            log::error!("Failed to convert multiplier: {}", self.0);
+            error!("Failed to convert multiplier: {}", self.0);
             return f32::from(self.0) * min_tick_size;
         };
 
         let decimal_min_tick_size = if let Some(d) = Decimal::from_f32(min_tick_size) {
             d
         } else {
-            log::error!("Failed to convert min_tick_size: {}", min_tick_size);
+            error!("Failed to convert min_tick_size: {}", min_tick_size);
             return f32::from(self.0) * min_tick_size;
         };
 
@@ -244,7 +245,7 @@ impl TickMultiplier {
             let decimal_places = calculate_decimal_places(min_tick_size);
             round_to_decimal_places(tick_size, decimal_places)
         } else {
-            log::error!("Failed to calculate tick size for multiplier: {}", self.0);
+            error!("Failed to calculate tick size for multiplier: {}", self.0);
             f32::from(self.0) * min_tick_size
         }
     }
@@ -608,7 +609,7 @@ async fn setup_websocket_connection(
 
 fn str_f32_parse(s: &str) -> f32 {
     s.parse::<f32>().unwrap_or_else(|e| {
-        log::error!("Failed to parse float: {}, error: {}", s, e);
+        error!("Failed to parse float: {}, error: {}", s, e);
         0.0
     })
 }

+ 20 - 19
src/data_providers/binance.rs

@@ -8,6 +8,7 @@ use iced_futures::stream;
 use regex::Regex;
 use serde::{Deserialize, Serialize};
 use sonic_rs::{to_object_iter_unchecked, FastStr};
+use tracing::{error, info, warn};
 
 use super::{
     deserialize_string_to_f32, setup_tcp_connection, setup_tls_connection, setup_websocket_connection, str_f32_parse, 
@@ -205,11 +206,11 @@ fn feed_de(slice: &[u8], market: MarketType) -> Result<StreamData, StreamError>
                     ));
                 }
                 _ => {
-                    log::error!("Unknown stream type");
+                    error!("Unknown stream type");
                 }
             }
         } else {
-            log::error!("Unknown data: {:?}", k);
+            error!("Unknown data: {:?}", k);
         }
     }
 
@@ -344,7 +345,7 @@ pub fn connect_market_stream(ticker: Ticker) -> impl Stream<Item = Event> {
                                         }
                                         StreamData::Depth(depth_type) => {
                                             if already_fetching {
-                                                log::warn!("Already fetching...\n");
+                                                warn!("Already fetching...\n");
                                                 continue;
                                             }
 
@@ -362,7 +363,7 @@ pub fn connect_market_stream(ticker: Ticker) -> impl Stream<Item = Event> {
                                                         && (de_depth.first_id > last_update_id + 1)
                                                         || (last_update_id + 1 > de_depth.final_id)
                                                     {
-                                                        log::warn!("Out of sync at first event. Trying to resync...\n");
+                                                        warn!("Out of sync at first event. Trying to resync...\n");
 
                                                         try_resync(
                                                             ticker, 
@@ -411,7 +412,7 @@ pub fn connect_market_stream(ticker: Ticker) -> impl Stream<Item = Event> {
                                                         && (de_depth.first_id > last_update_id + 1)
                                                         || (last_update_id + 1 > de_depth.final_id)
                                                     {
-                                                        log::warn!("Out of sync at first event. Trying to resync...\n");
+                                                        warn!("Out of sync at first event. Trying to resync...\n");
 
                                                         try_resync(
                                                             ticker, 
@@ -745,7 +746,7 @@ pub async fn fetch_ticksize(market_type: MarketType) -> Result<HashMap<Ticker, O
         .and_then(|x| x["limit"].as_i64())
         .ok_or_else(|| StreamError::ParseError("Missing request weight limit".to_string()))?;
 
-    log::info!(
+    info!(
         "Binance req. weight limit per minute {}: {:?}", 
         match market_type {
             MarketType::Spot => "Spot",
@@ -872,15 +873,15 @@ async fn handle_rate_limit(headers: &hyper::HeaderMap, max_limit: f32) -> Result
 
     match usage_percentage {
         p if p >= 95.0 => {
-            log::warn!("Rate limit critical ({:.1}%), sleeping for 10s", p);
+            warn!("Rate limit critical ({:.1}%), sleeping for 10s", p);
             tokio::time::sleep(tokio::time::Duration::from_secs(10)).await;
         }
         p if p >= 90.0 => {
-            log::warn!("Rate limit high ({:.1}%), sleeping for 5s", p);
+            warn!("Rate limit high ({:.1}%), sleeping for 5s", p);
             tokio::time::sleep(tokio::time::Duration::from_secs(5)).await;
         }
         p if p >= 80.0 => {
-            log::warn!("Rate limit warning ({:.1}%), sleeping for 3s", p);
+            warn!("Rate limit warning ({:.1}%), sleeping for 3s", p);
             tokio::time::sleep(tokio::time::Duration::from_secs(3)).await;
         }
         _ => (),
@@ -910,7 +911,7 @@ pub async fn fetch_trades(
     match get_hist_trades(ticker, from_date).await {
         Ok(trades) => Ok(trades),
         Err(e) => {
-            log::warn!("Historical trades fetch failed: {}, falling back to intraday fetch", e);
+            warn!("Historical trades fetch failed: {}, falling back to intraday fetch", e);
             fetch_intraday_trades(ticker, from_time).await
         }
     }
@@ -981,11 +982,11 @@ pub async fn get_hist_trades(
     );
     
     if std::fs::metadata(&zip_path).is_ok() {
-        log::info!("Using cached {}", zip_path);
+        info!("Using cached {}", zip_path);
     } else {
         let url = format!("https://data.binance.vision/{zip_path}");
 
-        log::info!("Downloading from {}", url);
+        info!("Downloading from {}", url);
         
         let resp = reqwest::get(&url).await.map_err(StreamError::FetchError)?;
         
@@ -1046,7 +1047,7 @@ pub async fn get_hist_trades(
                         trades.extend(intraday_trades);
                     }
                     Err(e) => {
-                        log::error!("Failed to fetch intraday trades: {}", e);
+                        error!("Failed to fetch intraday trades: {}", e);
                     }
                 }
             }
@@ -1085,7 +1086,7 @@ pub async fn fetch_historical_oi(
         Timeframe::H4 => "4h",
         _ => {
             let err_msg = format!("Unsupported timeframe for open interest: {}", period);
-            log::error!("{}", err_msg);
+            error!("{}", err_msg);
             return Err(StreamError::UnknownError(err_msg));
         }
     };
@@ -1106,12 +1107,12 @@ pub async fn fetch_historical_oi(
             let err_msg = format!(
                 "Requested end time {} is before available data (30 days is the API limit)", end
             );
-            log::error!("{}", err_msg);
+            error!("{}", err_msg);
             return Err(StreamError::UnknownError(err_msg));
         }
 
         let adjusted_start = if start < thirty_days_ago {
-            log::warn!("Adjusting start time from {} to {} (30 days limit)", start, thirty_days_ago);
+            warn!("Adjusting start time from {} to {} (30 days limit)", start, thirty_days_ago);
             thirty_days_ago
         } else {
             start
@@ -1134,20 +1135,20 @@ pub async fn fetch_historical_oi(
     let response = reqwest::get(&url)
         .await
         .map_err(|e| {
-            log::error!("Failed to fetch from {}: {}", url, e);
+            error!("Failed to fetch from {}: {}", url, e);
             StreamError::FetchError(e)
         })?;
         
     let text = response.text()
         .await
         .map_err(|e| {
-            log::error!("Failed to get response text from {}: {}", url, e);
+            error!("Failed to get response text from {}: {}", url, e);
             StreamError::FetchError(e)
         })?;
 
     let binance_oi: Vec<DeOpenInterest> = serde_json::from_str(&text)
         .map_err(|e| {
-            log::error!("Failed to parse response from {}: {}\nResponse: {}", url, e, text);
+            error!("Failed to parse response from {}: {}\nResponse: {}", url, e, text);
             StreamError::ParseError(format!("Failed to parse open interest: {e}"))
         })?;
 

+ 54 - 145
src/data_providers/china_futures.rs

@@ -1,3 +1,4 @@
+use tracing::warn;
 use crate::data_providers::deserialize_string_to_f32;
 use crate::data_providers::deserialize_string_to_i64;
 use std::collections::HashMap;
@@ -28,6 +29,8 @@ use crate::{Ticker, Timeframe};
 use super::str_f32_parse;
 use super::OpenInterest;
 
+use tracing::{error};
+
 #[derive(Serialize, Deserialize, Debug)]
 struct SonicDepth {
     #[serde(rename = "u")]
@@ -58,29 +61,29 @@ struct SonicTrade {
     pub is_sell: String,
 }
 
-#[derive(Deserialize, Debug, Clone)]
-pub struct SonicKline {
-    #[serde(rename = "start")]
-    pub time: u64,
-    #[serde(rename = "open")]
-    pub open: String,
-    #[serde(rename = "high")]
-    pub high: String,
-    #[serde(rename = "low")]
-    pub low: String,
-    #[serde(rename = "close")]
-    pub close: String,
-    #[serde(rename = "volume")]
-    pub volume: String,
-    #[serde(rename = "interval")]
-    pub interval: String,
-}
+// #[derive(Deserialize, Debug, Clone)]
+// pub struct SonicKline {
+//     #[serde(rename = "start")]
+//     pub time: u64,
+//     #[serde(rename = "open")]
+//     pub open: String,
+//     #[serde(rename = "high")]
+//     pub high: String,
+//     #[serde(rename = "low")]
+//     pub low: String,
+//     #[serde(rename = "close")]
+//     pub close: String,
+//     #[serde(rename = "volume")]
+//     pub volume: String,
+//     #[serde(rename = "interval")]
+//     pub interval: String,
+// }
 
 #[derive(Debug)]
 enum StreamData {
     Trade(Vec<SonicTrade>),
     Depth(SonicDepth, String, i64),
-    Kline(Ticker, Vec<SonicKline>),
+    // Kline(Ticker, Vec<SonicKline>),
 }
 
 #[derive(Debug)]
@@ -126,7 +129,7 @@ fn feed_de(
     let mut depth_wrap: Option<SonicDepth> = None;
 
     let mut data_type = String::new();
-    let mut topic_ticker = Ticker::default();
+    // let mut topic_ticker = Ticker::default();
 
     let iter: sonic_rs::ObjectJsonIter = unsafe { to_object_iter_unchecked(slice) };
 
@@ -142,23 +145,23 @@ fn feed_de(
                 }
 
                 match StreamName::from_topic(val, is_ticker, market_type) {
-                    StreamName::Depth(ticker) => {
+                    StreamName::Depth(_ticker) => {
                         stream_type = Some(StreamWrapper::Depth);
 
-                        topic_ticker = ticker;
+                        // topic_ticker = ticker;
                     }
-                    StreamName::Trade(ticker) => {
+                    StreamName::Trade(_ticker) => {
                         stream_type = Some(StreamWrapper::Trade);
 
-                        topic_ticker = ticker;
+                        // topic_ticker = ticker;
                     }
-                    StreamName::Kline(ticker) => {
+                    StreamName::Kline(_ticker) => {
                         stream_type = Some(StreamWrapper::Kline);
 
-                        topic_ticker = ticker;
+                        // topic_ticker = ticker;
                     }
                     _ => {
-                        log::error!("Unknown stream name");
+                        error!("Unknown stream name");
                     }
                 }
             }
@@ -185,14 +188,14 @@ fn feed_de(
                             .map_err(|e| StreamError::ParseError(e.to_string()))?,
                     );
                 }
-                Some(StreamWrapper::Kline) => {
-                    let kline_wrap: Vec<SonicKline> = sonic_rs::from_str(&v.as_raw_faststr())
-                        .map_err(|e| StreamError::ParseError(e.to_string()))?;
-
-                    return Ok(StreamData::Kline(topic_ticker, kline_wrap));
-                }
+                // Some(StreamWrapper::Kline) => {
+                //     let kline_wrap: Vec<SonicKline> = sonic_rs::from_str(&v.as_raw_faststr())
+                //         .map_err(|e| StreamError::ParseError(e.to_string()))?;
+                //
+                //     return Ok(StreamData::Kline(topic_ticker, kline_wrap));
+                // }
                 _ => {
-                    log::error!("Unknown stream type");
+                    error!("Unknown stream type");
                 }
             }
         } else if k == "cts" {
@@ -210,27 +213,19 @@ fn feed_de(
 }
 
 async fn connect(
-    domain: &str, 
-    market_type: MarketType
+    domain: &str,
 ) -> Result<FragmentCollector<TokioIo<Upgraded>>, StreamError> {
     let tcp_stream = setup_tcp_connection(domain).await?;
     let tls_stream = setup_tls_connection(domain, tcp_stream).await?;
-    let url = format!(
-        "wss://stream.bybit.com/v5/public/{}",
-        match market_type {
-            MarketType::Spot => "spot",
-            MarketType::LinearPerps => "linear",
-        }
-    );
+    let url = "ws://127.0.0.1:6789";
     setup_websocket_connection(domain, tls_stream, &url).await
 }
 
 async fn try_connect(
     streams: &Value,
-    market_type: MarketType,
     output: &mut futures::channel::mpsc::Sender<Event>,
 ) -> State {
-    match connect("stream.bybit.com", market_type).await {
+    match connect("127.0.0.1:6789").await {
         Ok(mut websocket) => {
             if let Err(e) = websocket
                 .write_frame(Frame::text(fastwebsockets::Payload::Borrowed(
@@ -274,7 +269,7 @@ pub fn connect_market_stream(ticker: Ticker) -> impl Stream<Item = Event> {
             symbol_str,
         );
 
-        let subscribe_message = serde_json::json!({
+        let subscribe_message = json!({
             "op": "subscribe",
             "args": [stream_1, stream_2]
         });
@@ -286,8 +281,7 @@ pub fn connect_market_stream(ticker: Ticker) -> impl Stream<Item = Event> {
             match &mut state {
                 State::Disconnected => {
                     state = try_connect(
-                        &subscribe_message, 
-                        market_type,
+                        &subscribe_message,
                         &mut output
                     ).await;
                 }
@@ -347,9 +341,6 @@ pub fn connect_market_stream(ticker: Ticker) -> impl Stream<Item = Event> {
                                                 .await;
                                         }
                                     }
-                                    _ => {
-                                        log::warn!("Unknown data: {:?}", &data);
-                                    }
                                 }
                             }
                         }
@@ -375,94 +366,12 @@ pub fn connect_market_stream(ticker: Ticker) -> impl Stream<Item = Event> {
     })
 }
 
-pub fn connect_kline_stream(
-    streams: Vec<(Ticker, Timeframe)>, 
-    market_type: MarketType
-) -> impl Stream<Item = Event> {
-    stream::channel(100, move |mut output| async move {
-        let mut state = State::Disconnected;
-
-        let stream_str = streams
-            .iter()
-            .map(|(ticker, timeframe)| {
-                let timeframe_str = timeframe.to_minutes().to_string();
-                format!("kline.{timeframe_str}.{}", ticker.get_string().0)
-            })
-            .collect::<Vec<String>>();
-    
-        let subscribe_message = serde_json::json!({
-            "op": "subscribe",
-            "args": stream_str
-        });
-
-        loop {
-            match &mut state {
-                State::Disconnected => {
-                    state = try_connect(
-                        &subscribe_message, 
-                        market_type,
-                        &mut output
-                    ).await;
-                }
-                State::Connected(websocket) => match websocket.read_frame().await {
-                    Ok(msg) => match msg.opcode {
-                        OpCode::Text => {
-                            if let Ok(StreamData::Kline(ticker, de_kline_vec)) =
-                                feed_de(&msg.payload[..], None, market_type)
-                            {
-                                for de_kline in &de_kline_vec {
-                                    let kline = Kline {
-                                        time: de_kline.time,
-                                        open: str_f32_parse(&de_kline.open),
-                                        high: str_f32_parse(&de_kline.high),
-                                        low: str_f32_parse(&de_kline.low),
-                                        close: str_f32_parse(&de_kline.close),
-                                        volume: (-1.0, str_f32_parse(&de_kline.volume)),
-                                    };
-
-                                    if let Some(timeframe) = string_to_timeframe(&de_kline.interval)
-                                    {
-                                        let _ = output
-                                            .send(Event::KlineReceived(ticker, kline, timeframe))
-                                            .await;
-                                    } else {
-                                        log::error!(
-                                            "Failed to find timeframe: {}, {:?}",
-                                            &de_kline.interval,
-                                            streams
-                                        );
-                                    }
-                                }
-                            }
-                        }
-                        OpCode::Close => {
-                            state = State::Disconnected;
-                            let _ = output
-                                .send(Event::Disconnected("Connection closed".to_string()))
-                                .await;
-                        }
-                        _ => {}
-                    }
-                    Err(e) => {
-                        state = State::Disconnected;
-                        let _ = output
-                            .send(Event::Disconnected(
-                                "Error reading frame: ".to_string() + &e.to_string(),
-                            ))
-                            .await;
-                    }
-                },
-            }
-        }
-    })
-}
-
-fn string_to_timeframe(interval: &str) -> Option<Timeframe> {
-    Timeframe::ALL
-        .iter()
-        .find(|&tf| tf.to_minutes().to_string() == interval)
-        .copied()
-}
+// fn string_to_timeframe(interval: &str) -> Option<Timeframe> {
+//     Timeframe::ALL
+//         .iter()
+//         .find(|&tf| tf.to_minutes().to_string() == interval)
+//         .copied()
+// }
 
 #[derive(Debug, Clone, Copy, PartialEq, Deserialize)]
 #[serde(rename_all = "camelCase")]
@@ -488,7 +397,7 @@ pub async fn fetch_historical_oi(
         Timeframe::H4 => "4h",
         _ => {
             let err_msg = format!("Unsupported timeframe for open interest: {}", period);
-            log::error!("{}", err_msg);
+            error!("{}", err_msg);
             return Err(StreamError::UnknownError(err_msg));
         }
     };
@@ -516,33 +425,33 @@ pub async fn fetch_historical_oi(
     let response = reqwest::get(&url)
         .await
         .map_err(|e| {
-            log::error!("Failed to fetch from {}: {}", url, e);
+            error!("Failed to fetch from {}: {}", url, e);
             StreamError::FetchError(e)
         })?;
         
     let text = response.text()
         .await
         .map_err(|e| {
-            log::error!("Failed to get response text from {}: {}", url, e);
+            error!("Failed to get response text from {}: {}", url, e);
             StreamError::FetchError(e)
         })?;
 
     let content: Value = sonic_rs::from_str(&text)
         .map_err(|e| {
-            log::error!("Failed to parse JSON from {}: {}\nResponse: {}", url, e, text);
+            error!("Failed to parse JSON from {}: {}\nResponse: {}", url, e, text);
             StreamError::ParseError(e.to_string())
         })?;
 
     let result_list = content["result"]["list"]
         .as_array()
         .ok_or_else(|| {
-            log::error!("Result list is not an array in response: {}", text);
+            error!("Result list is not an array in response: {}", text);
             StreamError::ParseError("Result list is not an array".to_string())
         })?;
     
     let bybit_oi: Vec<DeOpenInterest> = serde_json::from_value(json!(result_list))
         .map_err(|e| {
-            log::error!("Failed to parse open interest array: {}\nResponse: {}", e, text);
+            error!("Failed to parse open interest array: {}\nResponse: {}", e, text);
             StreamError::ParseError(format!("Failed to parse open interest: {e}"))
         })?;
 
@@ -555,7 +464,7 @@ pub async fn fetch_historical_oi(
         .collect();
 
     if open_interest.is_empty() {
-        log::warn!("No open interest data found for {}, from url: {}", ticker_str, url);
+        warn!("No open interest data found for {}, from url: {}", ticker_str, url);
     }
 
     Ok(open_interest)

+ 4 - 2
src/data_providers/fetcher.rs

@@ -1,6 +1,8 @@
 use uuid::Uuid;
 use std::collections::HashMap;
 
+use tracing::{error, warn};
+
 #[derive(thiserror::Error, Debug, Clone)]
 pub enum ReqError {
     #[error("Request is already completed")]
@@ -51,7 +53,7 @@ impl RequestHandler {
             let timestamp = chrono::Utc::now().timestamp_millis() as u64;
             request.status = RequestStatus::Completed(timestamp);
         } else {
-            log::warn!("Request not found: {:?}", id);
+            warn!("Request not found: {:?}", id);
         }
     }
 
@@ -59,7 +61,7 @@ impl RequestHandler {
         if let Some(request) = self.requests.get_mut(&id) {
             request.status = RequestStatus::Failed(error);
         } else {
-            log::warn!("Request not found: {:?}", id);
+            warn!("Request not found: {:?}", id);
         }
     }
 }

+ 13 - 10
src/layout.rs

@@ -1,3 +1,6 @@
+use tracing::warn;
+use tracing::error;
+use tracing::info;
 use regex::Regex;
 use chrono::NaiveDate;
 use serde::{Deserialize, Serialize};
@@ -409,7 +412,7 @@ pub fn load_saved_state(file_path: &str) -> SavedState {
                                 settings,
                             ))
                         } else {
-                            log::info!("Skipping a CandlestickChart initialization due to missing ticker info");
+                            info!("Skipping a CandlestickChart initialization due to missing ticker info");
                             Configuration::Pane(PaneState::new(vec![], PaneSettings::default()))
                         }
                     }
@@ -440,7 +443,7 @@ pub fn load_saved_state(file_path: &str) -> SavedState {
                                 settings,
                             ))
                         } else {
-                            log::info!("Skipping a FootprintChart initialization due to missing ticker info");
+                            info!("Skipping a FootprintChart initialization due to missing ticker info");
                             Configuration::Pane(PaneState::new(vec![], PaneSettings::default()))
                         }
                     }
@@ -469,7 +472,7 @@ pub fn load_saved_state(file_path: &str) -> SavedState {
                                 settings,
                             ))
                         } else {
-                            log::info!("Skipping a HeatmapChart initialization due to missing ticker info");
+                            info!("Skipping a HeatmapChart initialization due to missing ticker info");
                             Configuration::Pane(PaneState::new(vec![], PaneSettings::default()))
                         }
                     }
@@ -505,7 +508,7 @@ pub fn load_saved_state(file_path: &str) -> SavedState {
             de_state
         }
         Err(e) => {
-            log::error!(
+            error!(
                 "Failed to load/find layout state: {}. Starting with a new layout.",
                 e
             );
@@ -533,7 +536,7 @@ pub fn read_from_file(file_path: &str) -> Result<SerializableState, Box<dyn std:
 
 pub fn cleanup_old_data(data_path: &std::path::Path) -> usize {
     if !data_path.exists() {
-        log::warn!("Data path {:?} does not exist, skipping cleanup", data_path);
+        warn!("Data path {:?} does not exist, skipping cleanup", data_path);
         return 0;
     }
 
@@ -545,7 +548,7 @@ pub fn cleanup_old_data(data_path: &std::path::Path) -> usize {
     let entries = match std::fs::read_dir(data_path) {
         Ok(entries) => entries,
         Err(e) => {
-            log::error!("Failed to read data directory: {}", e);
+            error!("Failed to read data directory: {}", e);
             return 0;
         }
     };
@@ -554,7 +557,7 @@ pub fn cleanup_old_data(data_path: &std::path::Path) -> usize {
         let symbol_dir = match std::fs::read_dir(entry.path()) {
             Ok(dir) => dir,
             Err(e) => {
-                log::error!("Failed to read symbol directory {:?}: {}", entry.path(), e);
+                error!("Failed to read symbol directory {:?}: {}", entry.path(), e);
                 continue;
             }
         };
@@ -571,10 +574,10 @@ pub fn cleanup_old_data(data_path: &std::path::Path) -> usize {
                     let days_old = today.signed_duration_since(file_date).num_days();
                     if days_old > 4 {
                         if let Err(e) = std::fs::remove_file(&path) {
-                            log::error!("Failed to remove old file {}: {}", filename, e);
+                            error!("Failed to remove old file {}: {}", filename, e);
                         } else {
                             deleted_files.push(filename.to_string());
-                            log::info!("Removed old file: {}", filename);
+                            info!("Removed old file: {}", filename);
                         }
                     }
                 }
@@ -582,6 +585,6 @@ pub fn cleanup_old_data(data_path: &std::path::Path) -> usize {
         }
     }
     
-    log::info!("File cleanup completed. Deleted {} files", deleted_files.len());
+    info!("File cleanup completed. Deleted {} files", deleted_files.len());
     deleted_files.len()
 }

+ 49 - 66
src/logger.rs

@@ -1,67 +1,50 @@
-use chrono::Local;
-use std::{
-    fs::{self, File},
-    process,
-};
-
-const MAX_LOG_FILE_SIZE: u64 = 10_000_000; // 10 MB
-
-pub fn setup(is_debug: bool, log_trace: bool) -> Result<(), fern::InitError> {
-    let log_level = if log_trace {
-        log::LevelFilter::Trace
-    } else {
-        log::LevelFilter::Info
-    };
-
-    let mut logger = fern::Dispatch::new()
-        .format(|out, message, record| {
-            out.finish(format_args!(
-                "{}:{} [{}:{}] -- {}",
-                Local::now().format("%H:%M:%S%.3f"),
-                record.level(),
-                record.file().unwrap_or("unknown"),
-                record.line().unwrap_or(0),
-                message
-            ));
-        })
-        .level(log_level);
-
-    if is_debug {
-        logger = logger.chain(std::io::stdout());
-    } else {
-        let log_file_path = "output.log";
-        let log_file = File::create(log_file_path)?;
-        log_file.set_len(0)?;
-
-        std::thread::spawn(move || {
-            monitor_file_size(log_file_path, MAX_LOG_FILE_SIZE);
-        });
-
-        let log_file = fern::log_file(log_file_path)?;
-        logger = logger.chain(log_file);
-    }
-
-    logger.apply()?;
-    Ok(())
-}
-
-fn monitor_file_size(file_path: &str, max_size_bytes: u64) {
-    loop {
-        match fs::metadata(file_path) {
-            Ok(metadata) => {
-                if metadata.len() > max_size_bytes {
-                    eprintln!(
-                        "Things went south. Log file size caused panic exceeding {} MB",
-                        metadata.len() / 1_000_000,
-                    );
-                    process::exit(1);
-                }
-            }
-            Err(err) => {
-                eprintln!("Error reading log file metadata: {err}");
-                process::exit(1);
-            }
-        }
-        std::thread::sleep(std::time::Duration::from_secs(30));
-    }
+use std::io;
+use tracing_appender_timezone::non_blocking::WorkerGuard;
+use tracing_subscriber::{fmt};
+use tracing_subscriber::layer::{SubscriberExt};
+use tracing_appender_timezone::rolling::{RollingFileAppender, Rotation};
+
+pub fn final_init(level: &str) -> WorkerGuard {
+    let mut path = String::new();
+    path.push_str("./logs");
+
+    let file_appender = RollingFileAppender::builder()
+        .time_zone(8)
+        .rotation(Rotation::DAILY)
+        .filename_suffix("log")
+        .build(path)
+        .expect("initializing rolling file appender failed");
+    let (non_blocking, guard) = tracing_appender_timezone::non_blocking(file_appender);
+
+    use time::{macros::format_description, UtcOffset};
+    use tracing_subscriber::{fmt::time::OffsetTime};
+    let local_time = OffsetTime::new(
+        UtcOffset::from_hms(8, 0, 0).unwrap(),
+        format_description!("[month]-[day] [hour]:[minute]:[second].[subsecond digits:3]"),
+    );
+
+    let fmt_layer = fmt::layer()
+        .with_timer(local_time.clone())
+        .with_target(true)
+        .with_line_number(true)
+        .with_level(true)
+        .with_writer(io::stdout)
+        .with_span_events(fmt::format::FmtSpan::FULL);
+
+    let file_layer = fmt::layer()
+        .with_timer(local_time.clone())
+        .with_target(true)
+        .with_ansi(false)
+        .with_level(true)
+        .with_writer(non_blocking.clone())
+        .with_span_events(fmt::format::FmtSpan::FULL);
+
+    let layer = tracing_subscriber::Registry::default()
+        .with(fmt_layer)
+        .with(file_layer)
+        .with(tracing_subscriber::EnvFilter::new(level));
+
+    tracing::subscriber::set_global_default(layer).unwrap();
+
+    guard
 }

+ 12 - 9
src/main.rs

@@ -1,4 +1,4 @@
-#![windows_subsystem = "windows"]
+// #![windows_subsystem = "windows"]
 
 mod style;
 mod charts;
@@ -31,9 +31,12 @@ use iced::{
 use iced_futures::MaybeSend;
 use futures::{StreamExt, TryFutureExt};
 use std::{collections::HashMap, vec, future::Future};
+use tracing::{error, info};
 
 fn main() {
-    logger::setup(false, false).expect("Failed to initialize logger");
+    let _guard = logger::final_init("info");
+
+    info!("----------------------------面板已启动----------------------");
 
     std::thread::spawn(|| {
         let data_dir_path = std::path::Path::new("data/futures/um/daily/aggTrades");
@@ -217,7 +220,7 @@ impl State {
     fn update(&mut self, message: Message) -> Task<Message> {
         match message {
             Message::SetTickersInfo(exchange, tickers_info) => {
-                log::info!("Received tickers info for {exchange}, len: {}", tickers_info.len());
+                info!("Received tickers info for {exchange}, len: {}", tickers_info.len());
                 self.tickers_info.insert(exchange, tickers_info);
             }
             Message::MarketWsEvent(exchange, event) => {
@@ -226,10 +229,10 @@ impl State {
 
                 match event {
                     data_providers::Event::Connected(_) => {
-                        log::info!("a stream connected to {exchange} WS");
+                        info!("a stream connected to {exchange} WS");
                     }
                     data_providers::Event::Disconnected(reason) => {
-                        log::info!("a stream disconnected from {exchange} WS: {reason:?}");
+                        info!("a stream disconnected from {exchange} WS: {reason:?}");
                     }
                     data_providers::Event::DepthReceived(
                         ticker,
@@ -337,12 +340,12 @@ impl State {
                         if let Err(e) =
                             layout::write_json_to_file(&layout_str, "dashboard_state.json")
                         {
-                            log::error!("Failed to write layout state to file: {}", e);
+                            error!("Failed to write layout state to file: {}", e);
                         } else {
-                            log::info!("Successfully wrote layout state to dashboard_state.json");
+                            info!("Successfully wrote layout state to dashboard_state.json");
                         }
                     }
-                    Err(e) => log::error!("Failed to serialize layout: {}", e),
+                    Err(e) => error!("Failed to serialize layout: {}", e),
                 }
 
                 return iced::exit();
@@ -957,7 +960,7 @@ impl State {
                             Subscription::run_with_id(kline_streams_id, stream)
                         },
                         Exchange::ChinaFutures => {
-                            let stream = china_futures::connect_kline_stream(
+                            let stream = binance::connect_kline_stream(
                                 kline_streams,
                                 market_type,
                             )

+ 3 - 2
src/screen.rs

@@ -1,3 +1,4 @@
+use tracing::error;
 use std::{collections::HashMap, fmt};
 
 use iced::{
@@ -95,7 +96,7 @@ where
     F: Fn(Notification) -> M + Send + 'static,
     M: MaybeSend + 'static,
 {
-    log::error!("{err}: {report}");
+    error!("{err}: {report}");
 
     Task::done(message(
         Notification::Error(report.to_string())
@@ -217,7 +218,7 @@ impl NotificationManager {
 
     /// Handle error notifications with special fetch error logic
     pub fn handle_error(&mut self, window: window::Id, pane: pane_grid::Pane, err: DashboardError) {
-        log::error!("{:?}", err);
+        error!("{:?}", err);
 
         let notification_list = self.get_or_create_notifications(window, pane);
         notification_list.push(Notification::Error(err.to_string()));

+ 8 - 6
src/screen/dashboard.rs

@@ -3,6 +3,8 @@ pub mod pane;
 use futures::TryFutureExt;
 pub use pane::{PaneState, PaneContent, PaneSettings};
 
+use tracing::{error, info};
+
 use crate::{
     charts::{
         candlestick::CandlestickChart, footprint::FootprintChart, Message as ChartMessage
@@ -378,7 +380,7 @@ impl Dashboard {
                             }
                         }
 
-                        log::info!("{:?}", &self.pane_streams);
+                        info!("{:?}", &self.pane_streams);
 
                         // get fetch tasks for pane's content
                         if ["footprint", "candlestick", "heatmap"]
@@ -566,7 +568,7 @@ impl Dashboard {
                     }
                 }
                 Err(err) => {
-                    log::error!("{err}");
+                    error!("{err}");
                 }
             }
             Message::FetchTrades(
@@ -1248,7 +1250,7 @@ impl Dashboard {
             });
 
         if !found_match {
-            log::error!("No matching pane found for the stream: {stream_type:?}");
+            error!("No matching pane found for the stream: {stream_type:?}");
             tasks.push(Task::done(Message::RefreshStreams));
         }
 
@@ -1279,7 +1281,7 @@ impl Dashboard {
                             chart.update(&trades_buffer);
                         }
                         _ => {
-                            log::error!("No chart found for the stream: {stream_type:?}");
+                            error!("No chart found for the stream: {stream_type:?}");
                         }
                     }
                     found_match = true;
@@ -1289,7 +1291,7 @@ impl Dashboard {
         if found_match {
             Task::none()
         } else {
-            log::error!("No matching pane found for the stream: {stream_type:?}");
+            error!("No matching pane found for the stream: {stream_type:?}");
             Task::done(Message::RefreshStreams)
         }
     }
@@ -1399,7 +1401,7 @@ fn get_oi_fetch_task(
                 move |oi| Message::OIFetchEvent(req_id, oi, stream, pane, window_id),
             ),
             _ => {
-                log::error!("No OI fetch support for {exchange:?}");
+                error!("No OI fetch support for {exchange:?}");
                 Task::none()
             },
         },

+ 9 - 6
src/screen/dashboard/pane.rs

@@ -15,6 +15,9 @@ use crate::{
     }, style::{self, get_icon_text, Icon}, window::{self, Window}, StreamType
 };
 
+
+use tracing::{error};
+
 #[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq)]
 pub enum PaneModal {
     StreamModifier,
@@ -259,7 +262,7 @@ impl PaneState {
             }
             "time&sales" => PaneContent::TimeAndSales(TimeAndSales::new()),
             _ => {
-                log::error!("content not found: {}", content_str);
+                error!("content not found: {}", content_str);
                 return Err(DashboardError::PaneSet("content not found: ".to_string() + content_str));
             }
         };
@@ -280,7 +283,7 @@ impl PaneState {
                 chart.insert_open_interest(req_id, oi);
             }
             _ => {
-                log::error!("pane content not candlestick");
+                error!("pane content not candlestick");
             }
         }
     }
@@ -326,7 +329,7 @@ impl PaneState {
                 }
             }
             _ => {
-                log::error!("pane content not candlestick or footprint");
+                error!("pane content not candlestick or footprint");
             }
         }
     }
@@ -1023,7 +1026,7 @@ impl PaneContent {
                 let indicator = match indicator_str.as_str() {
                     "Volume" => HeatmapIndicator::Volume,
                     _ => {
-                        log::error!("indicator not found: {}", indicator_str);
+                        error!("indicator not found: {}", indicator_str);
                         return
                     },
                 };
@@ -1041,7 +1044,7 @@ impl PaneContent {
                     "Volume" => FootprintIndicator::Volume,
                     "Open Interest" => FootprintIndicator::OpenInterest,
                     _ => {
-                        log::error!("indicator not found: {}", indicator_str);
+                        error!("indicator not found: {}", indicator_str);
                         return
                     },
                 };
@@ -1059,7 +1062,7 @@ impl PaneContent {
                     "Volume" => CandlestickIndicator::Volume,
                     "Open Interest" => CandlestickIndicator::OpenInterest,
                     _ => {
-                        log::error!("indicator not found: {}", indicator_str);
+                        error!("indicator not found: {}", indicator_str);
                         return
                     },
                 };

+ 4 - 2
src/widget/hsplit.rs

@@ -11,6 +11,8 @@ use std::fmt::{Debug, Formatter};
 
 use crate::style;
 
+use tracing::{error};
+
 const DRAG_SIZE: f32 = 1.0;
 
 struct State {
@@ -141,7 +143,7 @@ impl<Message> Widget<Message, Theme, Renderer> for HSplit<'_, Message, Theme, Re
         let dragger_bounds = match layout.children().nth(1) {
             Some(dragger) => dragger.bounds().expand(4.0),
             None => {
-                log::error!("Failed to find dragger bounds in HSplit layout");
+                error!("Failed to find dragger bounds in HSplit layout");
                 return;
             }
         };
@@ -227,7 +229,7 @@ impl<Message> Widget<Message, Theme, Renderer> for HSplit<'_, Message, Theme, Re
         let dragger_bounds = match layout.children().nth(1) {
             Some(dragger) => dragger.bounds().expand(4.0),
             None => {
-                log::error!("Failed to find dragger bounds in HSplit layout");
+                error!("Failed to find dragger bounds in HSplit layout");
                 return Interaction::default();
             }
         };