2020import src .TLE_constellation .constellation_entity .orbit as ORBIT
2121import matplotlib .pyplot as plt
2222import numpy as np
23+ import ruptures as rpt
24+ from scipy .ndimage import gaussian_filter1d
25+ from scipy .signal import find_peaks
26+ from sklearn .cluster import DBSCAN
2327
2428# Function to automatically detect the number of steps/orbits in RAAN data
2529def detect_orbit_number (raans ):
2630 """
2731 Automatically detect the number of orbits based on RAAN distribution steps
32+ Using multiple advanced methods for maximum accuracy
2833 """
2934 if len (raans ) < 10 :
3035 return 1
3136
32- # Calculate differences to find jumps/steps
33- diffs = np .diff (raans )
37+ raans = np . array ( raans )
38+ unique_raans = len ( np .unique (raans ) )
3439
35- # Find significant jumps (larger than median + 2*std)
36- diff_threshold = np .median (diffs ) + 2 * np .std (diffs )
37- significant_jumps = np .where (diffs > diff_threshold )[0 ]
40+ # If all values are the same, return 1
41+ if unique_raans <= 1 :
42+ return 1
43+
44+ # Method 1: Enhanced Ruptures with optimal parameters
45+ def ruptures_detection ():
46+ results = []
47+
48+ # Try different models and parameters
49+ models = ["rbf" , "l1" , "l2" , "normal" ]
50+
51+ for model in models :
52+ try :
53+ # PELT with adaptive penalty
54+ algo = rpt .Pelt (model = model , min_size = max (3 , len (raans ) // 100 ))
55+ algo .fit (raans .reshape (- 1 , 1 ))
56+
57+ # Try different penalty strategies
58+ penalties = [
59+ np .log (len (raans )) * 2 , # BIC-like
60+ len (raans ) ** 0.5 , # Square root penalty
61+ len (raans ) * 0.01 , # Linear penalty
62+ ]
63+
64+ for pen in penalties :
65+ try :
66+ changepoints = algo .predict (pen = pen )
67+ if len (changepoints ) > 1 : # Exclude trivial case
68+ results .append (len (changepoints ))
69+ except Exception :
70+ continue
71+
72+ except Exception :
73+ continue
74+
75+ return results
76+
77+ # Method 2: Histogram-based plateau detection with smoothing
78+ def histogram_detection ():
79+ # Create adaptive number of bins
80+ n_bins = max (20 , min (len (raans ) // 10 , 100 ))
81+ hist , bin_edges = np .histogram (raans , bins = n_bins )
82+
83+ # Apply Gaussian smoothing
84+ smoothed = gaussian_filter1d (hist .astype (float ), sigma = 1.0 )
85+
86+ # Find local maxima (peaks)
87+ peaks , _ = find_peaks (smoothed ,
88+ height = np .max (smoothed ) * 0.05 , # 5% of max height
89+ distance = max (1 , len (smoothed ) // 50 ))
90+
91+ return len (peaks ) if len (peaks ) > 0 else 1
92+
93+ # Method 3: Gap analysis with statistical significance
94+ def gap_analysis ():
95+ if len (raans ) < 20 :
96+ return 1
97+
98+ # Calculate gaps
99+ sorted_raans = np .sort (raans )
100+ gaps = np .diff (sorted_raans )
101+
102+ # Use statistical tests to find significant gaps
103+ # Method: Find gaps that are outliers using z-score
104+ if len (gaps ) > 0 :
105+ z_scores = np .abs ((gaps - np .mean (gaps )) / np .std (gaps ))
106+ significant_gaps = np .sum (z_scores > 2.0 ) # 2 standard deviations
107+ return max (1 , significant_gaps + 1 )
108+
109+ return 1
110+
111+ # Method 4: Density-based clustering approach
112+ def density_clustering ():
113+ try :
114+ # Reshape for clustering
115+ raans_reshaped = raans .reshape (- 1 , 1 )
116+
117+ # Use DBSCAN to find dense regions
118+ eps = np .std (raans ) * 0.1 # Adaptive epsilon
119+ db = DBSCAN (eps = eps , min_samples = max (2 , len (raans ) // 100 ))
120+ labels = db .fit_predict (raans_reshaped )
121+
122+ # Count number of clusters (excluding noise: label -1)
123+ n_clusters = len (set (labels )) - (1 if - 1 in labels else 0 )
124+ return max (1 , n_clusters )
125+ except Exception :
126+ return 1
127+
128+ # Method 5: Jenks optimization with adaptive classes
129+ def jenks_optimization ():
130+ best_gvf = 0
131+ best_k = 1
132+
133+ max_k = min (len (np .unique (raans )), len (raans ) // 5 )
134+
135+ for k in range (1 , max_k + 1 ):
136+ try :
137+ breaks = jenkspy .jenks_breaks (raans , n_classes = k )
138+ gvf = jenkspy .goodness_of_variance_fit (raans , breaks )
139+
140+ # Look for elbow point
141+ if k == 1 :
142+ best_gvf = gvf
143+ best_k = k
144+ elif gvf - best_gvf > 0.03 : # Significant improvement
145+ best_gvf = gvf
146+ best_k = k
147+ elif gvf > 0.85 and (gvf - best_gvf ) < 0.01 : # Diminishing returns
148+ break
149+ except Exception :
150+ continue
151+
152+ return best_k
153+
154+ # Run all methods
155+ all_results = []
156+
157+ try :
158+ # Ruptures (multiple results)
159+ ruptures_results = ruptures_detection ()
160+ all_results .extend (ruptures_results )
161+
162+ # Histogram
163+ hist_result = histogram_detection ()
164+ all_results .extend ([hist_result ] * 2 ) # Give it some weight
165+
166+ # Gap analysis
167+ gap_result = gap_analysis ()
168+ all_results .append (gap_result )
169+
170+ # Density clustering
171+ density_result = density_clustering ()
172+ all_results .append (density_result )
173+
174+ # Jenks optimization
175+ jenks_result = jenks_optimization ()
176+ all_results .extend ([jenks_result ] * 2 ) # Give it more weight
177+
178+ except Exception as e :
179+ print (f"Error in advanced detection: { e } " )
180+
181+ # Robust combination of results
182+ if all_results :
183+ # Remove extreme outliers using IQR
184+ all_results = np .array (all_results )
185+ q75 , q25 = np .percentile (all_results , [75 , 25 ])
186+ iqr = q75 - q25
187+ lower_bound = q25 - 1.5 * iqr
188+ upper_bound = q75 + 1.5 * iqr
189+
190+ filtered_results = all_results [(all_results >= lower_bound ) & (all_results <= upper_bound )]
191+
192+ if len (filtered_results ) > 0 :
193+ # Use median for robustness
194+ final_result = int (np .median (filtered_results ))
195+ return max (1 , final_result )
38196
39- # Number of orbits = number of significant jumps + 1
40- orbits_number = len (significant_jumps ) + 1
197+ # Final fallback: simple but reliable method
198+ sorted_raans = np .sort (raans )
199+ gaps = np .diff (sorted_raans )
41200
42- # Ensure reasonable bounds (1-20 orbits)
43- orbits_number = max (1 , min (orbits_number , 20 ))
201+ if len (gaps ) > 0 :
202+ # Use 95th percentile as threshold
203+ threshold = np .percentile (gaps , 95 )
204+ significant_gaps = np .sum (gaps > threshold )
205+ return max (1 , significant_gaps + 1 )
44206
45- return orbits_number
207+ return 1
46208
47209# Parameter :
48210# shells : a collection of shell objects that have established corresponding relationships
@@ -57,9 +219,9 @@ def satellite_to_orbit_mapping(shells):
57219 raans .append (sat .tle_json ["RA_OF_ASC_NODE" ])
58220 raans = sorted (raans )
59221
60- plt .plot (raans )
61- plt .ylabel ('RAANS' )
62- plt .show ()
222+ # plt.plot(raans)
223+ # plt.ylabel('RAANS')
224+ # plt.show()
63225
64226 # Automatically detect the number of orbits
65227 orbits_number = detect_orbit_number (raans )
0 commit comments