收藏本站 劰载中...网站公告 | 吾爱海洋论坛交流QQ群:835383472

Datawhale 智慧海洋建设-Task2 数据分析

[复制链接]
- L; g( J7 Y9 C9 e! k' @0 ~" f& }

此部分为智慧海洋建设竞赛的数据分析模块,通过数据分析,可以熟悉数据,为后面的特征工程做准备,欢迎大家后续多多交流。

赛题:智慧海洋建设 1 d6 k1 ?! R; T4 C0 ~, M$ U

数据分析的目的:

# s: J0 _/ f+ c9 f7 ~1 R+ b2 y3 G; b EDA的主要价值在于熟悉整个数据集的基本情况(缺失值、异常值),来确定所获得数据集可以用于接下来的机器学习或者深度学习使用。了解特征之间的相关性、分布,以及特征与预测值之间的关系。为进行特征工程提供理论依据。

项目地址:https://github.com/datawhalechina/team-learning-data-mining/tree/master/wisdomOcean比赛地址:https://tianchi.aliyun.com/competition/entrance/231768/introduction?spm=5176.12281957.1004.8.4ac63eafE1rwsY

* x2 E# u& C u! o) V7 _7 u8 b1 w& M

2.1 学习目标

学习如何对数据集整体概况进行分析,包括数据集的基本情况(缺失值、异常值)学习了解变量之间的相互关系、变量与预测值之间的存在关系。完成相应学习打卡任务

2.2 内容介绍

数据总体了解读取数据集并了解数据集的大小,原始特征维度;通过info了解数据类型;粗略查看数据集中各特征的基本统计量缺失值和唯一值查看数据缺失值情况查看唯一值情况

数据特性和特征分布

' p) S8 s2 ]: T9 @4 I/ q# G' \

三类渔船轨迹的可视化坐标序列可视化三类渔船速度和方向序列可视化三类渔船速度和方向的数据分布

作业一:剔除异常点后画图

import pandas as pd

1 ]- d2 S1 F2 n0 p

import geopandas as gpd

& V* ^* l0 y- t9 z% g! m

from pyproj import Proj

# B: n6 \4 {) t0 ^* n

from keplergl import KeplerGl

" e0 m1 z6 J7 w! A3 y

from tqdm import tqdm

" }/ I6 \ r4 w: l3 w$ P J

import os

9 ^2 S" Z# ^7 H! Q! R% l! H/ [2 C

import matplotlib.pyplot as plt

$ ?5 A% Q$ ?& P# m* [1 N1 U

import shapely

; X3 N0 q8 }3 m( m% A+ {

import numpy as np

, s8 U* {' P1 X9 I$ l2 t. J

from datetime import datetime

- H0 l, A2 n' ~$ g+ N

import warnings

6 b* \* M4 k( w, b" u `

warnings.filterwarnings(ignore)

( ^# @0 m8 `# s1 L

plt.rcParams[font.sans-serif] = [SimSun] # 指定默认字体为新宋体。

/ ?; \" w" D4 s: M' r& }/ J

plt.rcParams[axes.unicode_minus] = False # 解决保存图像时 负号- 显示为方块和报错的问题。

! w4 M# o; J. ]$ L

#获取文件夹中的数据

. U' I1 l6 ^8 {4 N' r% D

def get_data(file_path,model):

! j- Y$ ? p0 N. {

assert model in [train, test], {} Not Support this type of file.format(model)

+ g, J! [, J) v) L7 u( x

paths = os.listdir(file_path)

: @. U% X' p8 T* @

# print(len(paths))

/ ]; X2 q8 D) C1 Y. @

tmp = []

2 M$ @! {% q* g

for t in tqdm(range(len(paths))):

1 u, j" ~: i* i4 v6 F& m

p = paths[t]

* H6 L3 J% E' q+ ~- P5 G# E

with open({}/{}.format(file_path, p), encoding=utf-8) as f:

2 {6 P3 w9 t! U3 P2 J0 P

next(f)

. a+ [% n" T$ {0 A

for line in f.readlines():

/ Y3 _: }: Y6 S3 {

tmp.append(line.strip().split(,))

, V) g! X8 A, K* N: @9 Y3 |

tmp_df = pd.DataFrame(tmp)

+ `) ?: D5 E. q, Q7 d

if model == train:

. M; G* u( Z, S& K

tmp_df.columns = [ID, lat, lon, speed, direction, time, type]

$ x, U* r( q+ z+ _9 ^9 ]% q

else:

0 M6 y% z5 f" `

tmp_df[type] = unknown

" L2 R9 k, ?. q+ K1 V+ q% Q. A' A

tmp_df.columns = [ID, lat, lon, speed, direction, time, type]

" z5 m0 h9 y4 I) N# B

tmp_df[lat] = tmp_df[lat].astype(float)

7 w- d& ^/ _" K/ Z" N8 G

tmp_df[lon] = tmp_df[lon].astype(float)

2 Q3 d# I' B4 t/ M N+ n

tmp_df[speed] = tmp_df[speed].astype(float)

* J) q4 [ D% O# J

tmp_df[direction] = tmp_df[direction].astype(int)#如果该行代码运行失败,请尝试更新pandas的版本

& w: K+ `# F% K. I$ ?5 b+ G8 D

return tmp_df

+ T- M8 a: {0 a) ]& D+ X

# 平面坐标转经纬度,供初赛数据使用

; q# B6 M" G5 _7 g7 h4 ?5 n

# 选择标准为NAD83 / California zone 6 (ftUS) (EPSG:2230),查询链接:CS2CS - Transform Coordinates On-line - MyGeodata Cloud

# Y! R7 u2 d" G; e

def transform_xy2lonlat(df):

1 U' o2 t% A3 g: _1 c0 b2 X9 j

x = df[lat].values

+ B4 f9 I* h+ P0 _5 e( [1 U

y = df[lon].values

5 F$ O8 m/ k) Z! l

p=Proj(+proj=lcc +lat_1=33.88333333333333 +lat_2=32.78333333333333 +lat_0=32.16666666666666 +lon_0=-116.25 +x_0=2000000.0001016 +y_0=500000.0001016001 +datum=NAD83 +units=us-ft +no_defs )

R+ ?8 c" {) ]( D3 x

df[lon], df[lat] = p(y, x, inverse=True)

4 ], K! R0 h' d, M! F& {- z

return df

% R4 {0 \" Q0 N/ g$ @

#修改数据的时间格式

# K5 K, s7 {$ x0 B" V

def reformat_strtime(time_str=None, START_YEAR="2019"):

( k1 v( j) U( I% }. G

"""Reformat the strtime with the form 08 14 to START_YEAR-08-14 """

! R; u% j% M- |

time_str_split = time_str.split(" ")

9 D4 E! C) A$ f( q

time_str_reformat = START_YEAR + "-" + time_str_split[0][:2] + "-" + time_str_split[0][2:4]

6 A* d- a) z6 o8 e( K

time_str_reformat = time_str_reformat + " " + time_str_split[1]

) s3 `: I" u; A9 e5 G! J

# time_reformat=datetime.strptime(time_str_reformat,%Y-%m-%d %H:%M:%S)

7 p* {( I |% H. A9 C: @% P

return time_str_reformat

. \4 o1 b- b, c1 L* c8 Y# a

#计算两个点的距离

, r0 y+ O3 K H9 c+ V2 k. r

def haversine_np(lon1, lat1, lon2, lat2):

( A, \6 \* l, c1 E. A& F/ V7 t

lon1, lat1, lon2, lat2 = map(np.radians, [lon1, lat1, lon2, lat2])

6 k- y. d! Q; f/ d/ v0 a w; P) P

dlon = lon2 - lon1

8 r# N }1 t, W4 z2 g! x2 H

dlat = lat2 - lat1

! s4 T/ i* v+ S9 p; d0 O9 Y- J

a = np.sin(dlat/2.0)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2.0)**2

' ]; _5 f, C! D1 T" T/ P

c = 2 * np.arcsin(np.sqrt(a))

2 u I( ]& P. d1 a+ j7 ~

km = 6367 * c

! _& p# y! z9 C( G1 s" m* e. d

return km * 1000

; m7 O. z. N6 F0 ]

def compute_traj_diff_time_distance(traj=None):

3 V7 k" a1 V* U& I2 n* N4 \- L

"""Compute the sampling time and the coordinate distance."""

' a X# F0 B3 E9 J2 } M# i

# 计算时间的差值

9 j7 S9 D9 a) m; B" B0 M p& J6 G

time_diff_array = (traj["time"].iloc[1:].reset_index(drop=True) - traj[

7 w/ W0 g- q/ c0 J1 Z8 ?

"time"].iloc[:-1].reset_index(drop=True)).dt.total_seconds() / 60

/ ]. D& W9 f, R. B: V- G% R

# 计算坐标之间的距离

; w/ x( ?# ?! S# N. W {% K

dist_diff_array = haversine_np(traj["lon"].values[1:], # lon_0

6 c9 R/ u. D1 t* b: f

traj["lat"].values[1:], # lat_0

5 X0 F5 v% N( w9 J B/ [: K7 X' U3 g6 h

traj["lon"].values[:-1], # lon_1

* R9 }4 D" V; x, o! S9 G+ n

traj["lat"].values[:-1] # lat_1

5 q3 \9 W! r* c& s

)

8 N1 D8 W7 f( N) }# H& u' \6 t

# 填充第一个值

" |$ Y9 Q E% ^8 w

time_diff_array = [time_diff_array.mean()] + time_diff_array.tolist()

! e" @: @# C' D0 w' b3 Q

dist_diff_array = [dist_diff_array.mean()] + dist_diff_array.tolist()

2 d% X1 T' o1 ]) O) Q6 g; t

traj.loc[list(traj.index),time_array] = time_diff_array

1 a+ O# C3 [1 \7 N

traj.loc[list(traj.index),dist_array] = dist_diff_array

; F) e5 E% N: \: i

return traj

, S' }: q# B3 {5 A! q* ?$ @, R0 i

#对轨迹进行异常点的剔除

: K' S" }9 f4 Q w7 k" u+ k

def assign_traj_anomaly_points_nan(traj=None, speed_maximum=23,

`* V c5 B7 v1 D" s9 @

time_interval_maximum=200,

; W$ M9 Y8 b& ^# W: R

coord_speed_maximum=700):

4 T* S7 K- U& ~& J6 B0 q* j

"""Assign the anomaly points in traj to np.nan."""

3 P7 y Q) R- U: `9 W& k' P. } w

def thigma_data(data_y,n):

5 Y1 H. F; U' @8 n' K8 l6 a

data_x =[i for i in range(len(data_y))]

3 v9 l) z! H8 X( U! Q

ymean = np.mean(data_y)

3 n; A4 O: e7 b4 Y

ystd = np.std(data_y)

1 N ^2 H y1 m% N6 q

threshold1 = ymean - n * ystd

6 c; ?7 m9 @( i+ ~8 H

threshold2 = ymean + n * ystd

: Y) ]" K- y+ j. d

judge=[]

0 R+ l, O9 E6 }1 D# c+ X

for data in data_y:

! _- U7 B. l* F+ H1 t$ t3 ~

if (data < threshold1)|(data> threshold2):

) t# }6 ?" U% Y: Z4 M+ u3 ~0 i

judge.append(True)

1 c k2 F Q1 |6 m% `5 u

else:

. Y% `1 I6 ]! ?% h4 \

judge.append(False)

5 T! b. s' y9 M' |3 _

return judge

8 C; G2 Z' I0 {- U

# Step 1: The speed anomaly repairing

8 ^ ~0 P# Z" z% i* [

is_speed_anomaly = (traj["speed"] > speed_maximum) | (traj["speed"] < 0)

[) W* ^; J& e3 N% B. [- W) W

traj["speed"][is_speed_anomaly] = np.nan

1 K/ t; G7 {! ]$ i

# Step 2: 根据距离和时间计算速度

. P5 ^/ o7 c# I, z! c! w

is_anomaly = np.array([False] * len(traj))

4 i, h5 h3 w0 H* r: j9 g/ N6 u" M

traj["coord_speed"] = traj["dist_array"] / traj["time_array"]

4 D C* t$ d8 F0 t7 G- ?- @

# Condition 1: 根据3-sigma算法剔除coord speed以及较大时间间隔的点

3 y2 O) Z" t7 M* ~5 s; D+ k

is_anomaly_tmp = pd.Series(thigma_data(traj["time_array"],3)) | pd.Series(thigma_data(traj["coord_speed"],3))

9 D6 w0 M" c+ Q @! b3 N c

is_anomaly = is_anomaly | is_anomaly_tmp

' e8 Z6 O c8 l2 l1 t

is_anomaly.index=traj.index

1 j8 q- ]1 C+ x7 y# _

# Condition 2: 轨迹点的3-sigma异常处理

/ Z+ i3 F, s) M( E0 I" v7 f

traj = traj[~is_anomaly].reset_index(drop=True)

% R% Q5 z5 t5 B+ H: }4 U! I

is_anomaly = np.array([False] * len(traj))

# Q! p( E6 [3 L/ l) J2 Y8 m

if len(traj) != 0:

" A$ d, M$ Z4 i! ^( e

lon_std, lon_mean = traj["lon"].std(), traj["lon"].mean()

* h3 e: ]5 }! {; r2 @! u: U

lat_std, lat_mean = traj["lat"].std(), traj["lat"].mean()

2 ~4 S5 y& Z% J6 N) x8 {8 K

lon_low, lon_high = lon_mean - 3 * lon_std, lon_mean + 3 * lon_std

0 E% y* u3 K. p1 F$ p( k" g; R" P9 N5 b

lat_low, lat_high = lat_mean - 3 * lat_std, lat_mean + 3 * lat_std

% w8 F$ N, d0 |0 t$ `5 S1 `$ F" @+ z

is_anomaly = is_anomaly | (traj["lon"] > lon_high) | ((traj["lon"] < lon_low))

$ K, ]* E0 N' D7 B5 D

is_anomaly = is_anomaly | (traj["lat"] > lat_high) | ((traj["lat"] < lat_low))

0 }) j. ^% C, O0 \: f U

traj = traj[~is_anomaly].reset_index(drop=True)

6 m7 x' W: S6 Q4 n

return traj, [len(is_speed_anomaly) - len(traj)]

8 G) z6 C3 o9 ^- m

df=get_data(rC:\Users\admin\hy_round1_train_20200102,train)

$ X/ f9 c3 [1 D; v

#对轨迹进行异常点剔除,对nan值进行线性插值

2 ~ E7 K' c) G" J

ID_list=list(pd.DataFrame(df[ID].value_counts()).index)

+ O' h( I+ A) @! v; u

DF_NEW=[]

& C T) }: f" P6 S' \; Z

Anomaly_count=[]

' T D( {" ?! P' w9 T; m6 P

for ID in tqdm(ID_list):

4 G( \/ Y7 U% n7 [- v7 g7 |

df_id=compute_traj_diff_time_distance(df[df[ID]==ID])

) b7 M9 D! w- z" C$ W1 j

df_new,count=assign_traj_anomaly_points_nan(df_id)

) B$ a8 C, x2 S! v8 L7 l% V

df_new["speed"] = df_new["speed"].interpolate(method="linear", axis=0)

: S2 T/ G- i$ w* E; n) M

df_new = df_new.fillna(method="bfill")

; Y- p5 T$ K k1 i8 W/ O( E7 C

df_new = df_new.fillna(method="ffill")

. L- B! J$ E G* e

df_new["speed"] = df_new["speed"].clip(0, 23)

0 N! U$ q6 I6 u# z* ?* I

Anomaly_count.append(count)#统计每个id异常点的数量有多少

5 n$ G K0 k3 n, e; O: z

DF_NEW.append(df_new)

- q0 A1 g* J3 I9 s! @, e- n

#将数据写入到pkl格式

O, Z9 R4 E* n Q* d5 B9 y3 T' }

load_save = Load_Save_Data()

8 k! Y; x& |! g0 p9 a5 F! L

load_save.save_data(DF_NEW,"C:/Users/admin/wisdomOcean/data_tmp1/total_data.pkl")

. k) h1 T. R, j

#### 三类渔船速度和方向可视化

9 n% b2 L& v/ J3 d- _) ]" W

# 把训练集的所有数据,根据类别存放到不同的数据文件中

( D' G/ A6 R2 P, l2 Y6 T8 R& \9 ^

def get_diff_data():

, t V# d8 w" \. ]

Path = "C:/Users/admin/wisdomOcean/data_tmp1/total_data.pkl"

4 ]4 I4 e! ?, t; M( j

with open(Path,"rb") as f:

R9 F, G3 e$ \; s. s: N1 }3 \

total_data = pickle.load(f)

( G0 L8 P s: }0 u

load_save = Load_Save_Data()

$ w+ q2 [3 ~, N* c3 m- S0 ?

kind_data = ["刺网","围网","拖网"]

9 ^8 [( L: l. N. D. a

file_names = ["ciwang_data.pkl","weiwang_data.pkl","tuowang_data.pkl"]

; f! W: A5 F5 q0 b$ d4 D

for i,datax in enumerate(kind_data):

2 ]& q3 U# \ a/ `6 @* d4 P' l- l3 e

data_type = [data for data in total_data if data["type"].unique()[0] == datax]

, D* @% G6 ^7 A; u; Q& Z: c

load_save.save_data(data_type,"C:/Users/admin/wisdomOcean/data_tmp1/" + file_names[i])

: m; P* u n1 i+ h' S, m L

get_diff_data()

4 X$ c- d- t9 {

#对轨迹进行异常点剔除,对nan值进行线性插值

; }: s9 P; ?# S x

ID_list=list(pd.DataFrame(df[ID].value_counts()).index)

) X( [) U2 W: }* u9 W D5 `

DF_NEW=[]

- t1 X: G* v/ h2 D8 u$ A# F

Anomaly_count=[]

$ p* D! G9 ]0 s$ e' ]" ]! B

for ID in tqdm(ID_list):

4 g* p0 f# H* ~! K, o

df_id=compute_traj_diff_time_distance(df[df[ID]==ID])

4 F0 ]9 l. }$ I$ y& O

df_new,count=assign_traj_anomaly_points_nan(df_id)

' F$ v4 @( n0 E" O

df_new["speed"] = df_new["speed"].interpolate(method="linear", axis=0)

3 n. U# W+ ?: w5 Z1 N

df_new = df_new.fillna(method="bfill")

4 X8 @; L' {; i8 S

df_new = df_new.fillna(method="ffill")

/ G7 P, E" u/ C- ~+ @# B2 @+ T2 t

df_new["speed"] = df_new["speed"].clip(0, 23)

; |* {/ c6 [& V! J& \

Anomaly_count.append(count)#统计每个id异常点的数量有多少

! }' N6 y8 ~+ j C

DF_NEW.append(df_new)

# J! p9 l7 J# ^7 T. O1 D1 k

# 每类轨迹,随机选取某个渔船,可视化速度序列和方向序列

% \2 C% s4 I! X+ n

def visualize_three_traj_speed_direction():

4 n8 T7 V1 p" W

fig,axes = plt.subplots(nrows=3,ncols=2,figsize=(20,15))

6 f J/ _: X1 ^6 u" G

plt.subplots_adjust(wspace=0.3,hspace=0.3)

5 \2 O) r5 x+ E

# 随机选出刺网的三条轨迹进行可视化

3 k+ W; c( i, Z# t. }

file_types = ["ciwang_data","weiwang_data","tuowang_data"]

7 o; [1 d3 X: q, z0 D+ O/ I

speed_types = ["ciwang_speed","weiwang_speed","tuowang_speed"]

0 c& `3 [" D- e6 ^

doirections = ["ciwang_direction","weiwang_direction","tuowang_direction"]

$ S/ a# f( N( Z+ \0 J

colors = [pink, lightblue, lightgreen]

& H5 P4 K, R B# a# \; i

for i,file_name in tqdm(enumerate(file_types)):

9 k8 _8 w& T9 {+ A# g

datax = get_random_one_traj(type=file_name)

6 N9 L* |4 h& z% {8 w* d

x_data = datax["速度"].loc[-1:].values

& }5 h0 d+ _0 `/ R& ^2 _, B4 n3 o% I

y_data = datax["方向"].loc[-1:].values

% w7 Q1 j+ C* g3 G4 v

axes[i][0].plot(range(len(x_data)), x_data, label=speed_types[i], color=colors[i])

" r1 Y$ F3 p- b# K$ x

axes[i][0].grid(alpha=2)

" c+ L* _( X T& k

axes[i][0].legend(loc="best")

8 P4 H* p# Y9 ]( E) F8 N! K& B7 q$ [

axes[i][1].plot(range(len(y_data)), y_data, label=doirections[i], color=colors[i])

n6 ]& C0 e8 Q6 O9 d4 v/ n5 E

axes[i][1].grid(alpha=2)

6 Y6 j5 D0 g7 m( [0 _

axes[i][1].legend(loc="best")

1 P+ e- Q7 G$ E6 _

plt.show()

$ S' P( T- U4 Y2 j/ v: p) Z

visualize_three_traj_speed_direction()

; p8 V& Y4 R. F: F$ c
, ^$ Z \# d8 S

作业二:相关性分析。

' F2 `1 K' X& V9 V/ k9 Z( p" E

data_train.loc[data_train[type]==刺网,type_id]=1

9 _- x, c- E0 ~; {4 s- U1 C6 p- p) N; r( o

data_train.loc[data_train[type]==围网,type_id]=2

. F8 _ t- [' x1 A( m

data_train.loc[data_train[type]==拖网,type_id]=3

}9 i8 P) W+ X9 L2 i

f, ax = plt.subplots(figsize=(9, 6))

- N; j2 K2 w5 L* l5 T- f3 ]4 K

ax = sns.heatmap(np.abs(df.corr()),annot=True)

: D$ J$ L3 u0 e0 Z

plt.show()

. D; u4 S. }, B 2 R0 \; P I5 ]9 V; P2 {

从图中可以清楚看到,经纬度和速度跟类型相关性比较大。

Z( r3 @0 P' l 9 C2 q' H& j4 H* w% @. v * P0 w; M# g B, B / k+ A2 q6 q* m + d0 h! Z+ b$ |% E* b
回复

举报 使用道具

全部回帖
暂无回帖,快来参与回复吧
懒得打字?点击右侧快捷回复 【吾爱海洋论坛发文有奖】
您需要登录后才可以回帖 登录 | 立即注册
陌羡尘
活跃在昨天 09:36
快速回复 返回顶部 返回列表