-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path3 python code for global EWM analysis(3).txt
More file actions
97 lines (80 loc) · 3.24 KB
/
3 python code for global EWM analysis(3).txt
File metadata and controls
97 lines (80 loc) · 3.24 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import os
import numpy as np
import pandas as pd
from IPython.display import display
# ======================
# 1. Configuration
# ======================
BASE_DIR = r"C:/python/spatial/"
INFILE = os.path.join(BASE_DIR, "data1_spatial(1).xlsx")
OUT_RESULTS = os.path.join(BASE_DIR, "global_indices_for_moran.xlsx")
ID_COLS = ["city_code", "city_name", "year"]
TRANSPORT_COLS = ["T1", "T2"]
POP_COLS = ["Ln(P1)", "P2", "P3"]
INDUSTRY_COLS = ["I1", "I2"]
EPS = 1e-12
# ======================
# 2. Logic (Global EWM + CCD)
# ======================
def minmax_normalize(df, cols):
out = df.copy()
for c in cols:
x = pd.to_numeric(out[c], errors="coerce").astype(float)
xmin, xmax = np.nanmin(x), np.nanmax(x)
out[c] = (x - xmin) / (xmax - xmin) if (xmax - xmin) > EPS else 0.0
return out
def entropy_weights(norm_df, cols):
X = norm_df[cols].astype(float).to_numpy()
n = X.shape[0]
col_sums = np.where(np.nansum(X, axis=0) <= EPS, EPS, np.nansum(X, axis=0))
P = np.where((X / col_sums) <= EPS, EPS, X / col_sums)
e = -(1.0 / np.log(n)) * np.nansum(P * np.log(P), axis=0)
d = 1.0 - e
return pd.Series(d / np.sum(d), index=cols)
def coupling_coordination(u1, u2, u3):
u1, u2, u3 = map(lambda x: np.asarray(x, dtype=float), [u1, u2, u3])
mean_u = (u1 + u2 + u3) / 3.0
denom = np.power(np.maximum(mean_u, EPS), 3)
C = np.clip(np.power(np.maximum(u1 * u2 * u3, 0.0) / denom, 1/3), 0, 1)
T = (u1 + u2 + u3) / 3.0
D = np.sqrt(C * T)
return C, D
def get_coord_level(d):
"""Categorizes the D_coordination value based on your screenshot levels"""
if d >= 0.7: return "High [0.7, 1]"
elif d >= 0.6: return "Medium-High [0.6, 0.7)"
elif d >= 0.5: return "Medium [0.5, 0.6)"
else: return "Low [< 0.5)"
# ======================
# 3. Execution
# ======================
def main():
# Load data
df = pd.read_excel(INFILE).dropna(subset=["city_code", "year"])
df = df[(df["year"] >= 2011) & (df["year"] <= 2023)].copy()
# Calculate Global Indices
df_norm_T = minmax_normalize(df, TRANSPORT_COLS)
df_norm_P = minmax_normalize(df, POP_COLS)
df_norm_I = minmax_normalize(df, INDUSTRY_COLS)
wT = entropy_weights(df_norm_T, TRANSPORT_COLS)
wP = entropy_weights(df_norm_P, POP_COLS)
wI = entropy_weights(df_norm_I, INDUSTRY_COLS)
df["T_index"] = (df_norm_T[TRANSPORT_COLS] * wT).sum(axis=1)
df["P_index"] = (df_norm_P[POP_COLS] * wP).sum(axis=1)
df["I_index"] = (df_norm_I[INDUSTRY_COLS] * wI).sum(axis=1)
# Calculate C and D
df["C_coupling"], df["D_coordination"] = coupling_coordination(
df["T_index"], df["P_index"], df["I_index"]
)
# Add labels
df["Coordination_Level"] = df["D_coordination"].apply(get_coord_level)
# Save final results to Excel
out_cols = ID_COLS + ["T_index", "P_index", "I_index", "C_coupling", "D_coordination", "Coordination_Level"]
df_final = df[out_cols]
df_final.to_excel(OUT_RESULTS, index=False)
# DISPLAY IN JUPYTER
print(f"Success! Result saved to: {OUT_RESULTS}")
display(df_final.head(15))
return df_final
if __name__ == "__main__":
processed_df = main()