08-MLOps与工程落地——特征存储:Feast
特征存储Feast在线/离线特征存储、特征复用、训练服务一致性一、Feast概述1.1 什么是特征存储importmatplotlib.pyplotaspltfrommatplotlib.patchesimportRectangle,FancyBboxPatchimportwarnings warnings.filterwarnings(ignore)print(*60)print(Feast特征存储系统)print(*60)# Feast架构图fig,axplt.subplots(figsize(14,8))ax.axis(off)# 组件components{离线存储:(0.2,0.7),在线存储:(0.5,0.7),注册表:(0.8,0.7),特征定义:(0.2,0.4),特征服务:(0.5,0.4),训练数据:(0.8,0.4),}forname,(x,y)incomponents.items():circleplt.Circle((x,y),0.08,colorlightblue,ecblack)ax.add_patch(circle)ax.text(x,y,name,hacenter,vacenter,fontsize7)# 连接ax.annotate(,xy(0.5,0.62),xytext(0.28,0.7),arrowpropsdict(arrowstyle-,lw1))ax.annotate(,xy(0.72,0.7),xytext(0.58,0.7),arrowpropsdict(arrowstyle-,lw1))ax.annotate(,xy(0.5,0.32),xytext(0.5,0.48),arrowpropsdict(arrowstyle-,lw1))ax.set_xlim(0,1)ax.set_ylim(0,1)ax.set_title(Feast架构,fontsize14)plt.tight_layout()plt.show()print(\n Feast核心价值:)print( - 训练/服务特征一致性)print( - 特征复用和共享)print( - 低延迟在线特征获取)print( - 大规模离线特征处理)print( - 特征血缘追踪)二、Feast安装与配置2.1 安装配置deffeast_setup():Feast安装配置print(\n*60)print(Feast安装配置)print(*60)code # 1. 安装Feast # pip install feast # 2. 安装特定存储后端 # pip install feast[redis] # Redis在线存储 # pip install feast[aws] # AWS S3/Redshift # pip install feast[gcp] # GCP BigQuery/GCS # pip install feast[postgres] # PostgreSQL # 3. 初始化Feast仓库 # feast init my_feature_repo # cd my_feature_repo # 4. 项目结构 # my_feature_repo/ # ├── feature_store.yaml # ├── features.py # └── data/ # └── driver_stats.parquet # 5. feature_store.yaml配置 project: my_feature_repo registry: data/registry.db provider: local online_store: type: redis connection_string: localhost:6379 offline_store: type: file print(code)feast_setup()三、特征定义3.1 定义特征deffeature_definition():特征定义print(\n*60)print(特征定义)print(*60)code # features.py from feast import Entity, FeatureView, Feature, FileSource, ValueType from datetime import timedelta # 1. 定义实体 user Entity( nameuser_id, value_typeValueType.INT64, descriptionUser identifier, labels{team: marketing} ) item Entity( nameitem_id, value_typeValueType.INT64, descriptionItem identifier ) # 2. 定义数据源 user_features_source FileSource( pathdata/user_features.parquet, event_timestamp_columnevent_timestamp, created_timestamp_columncreated_timestamp, date_partition_columndate, descriptionUser features data source ) item_features_source FileSource( pathdata/item_features.parquet, event_timestamp_columnevent_timestamp ) # 3. 定义特征视图 user_features FeatureView( nameuser_features, entities[user_id], ttltimedelta(days30), features[ Feature(nameage, dtypeValueType.INT64), Feature(namegender, dtypeValueType.STRING), Feature(namecity, dtypeValueType.STRING), Feature(nameuser_activity_score, dtypeValueType.FLOAT), Feature(nameuser_engagement_level, dtypeValueType.INT64), Feature(nameregistration_days, dtypeValueType.INT64), ], batch_sourceuser_features_source, onlineTrue, tags{team: marketing}, descriptionUser demographic and behavioral features ) item_features FeatureView( nameitem_features, entities[item_id], ttltimedelta(days7), features[ Feature(namecategory, dtypeValueType.STRING), Feature(nameprice, dtypeValueType.FLOAT), Feature(nameitem_rating, dtypeValueType.FLOAT), Feature(nameinventory_count, dtypeValueType.INT64), Feature(namepopularity_score, dtypeValueType.FLOAT), ], batch_sourceitem_features_source, onlineTrue ) # 4. 定义特征服务 feature_service FeatureService( namerecommendation_service, features[ user_features[[age, gender, user_activity_score]], item_features[[category, price, item_rating]] ], tags{team: recommendation}, descriptionFeatures for recommendation service ) # 5. 高级特征请求时特征 from feast import RequestSource request_source RequestSource( namerequest_features, schema{ current_timestamp: ValueType.INT64, user_location: ValueType.STRING } ) # 6. 特征变换On-demand from feast import on_demand_feature_view from feast.types import Float32, Int64 on_demand_feature_view( namederived_features, sources[user_features, request_source], features[ Feature(nameage_squared, dtypeInt64), Feature(namenormalized_score, dtypeFloat32) ] ) def derived_features(inputs: dict): return { age_squared: inputs[user_features][age] ** 2, normalized_score: inputs[user_features][user_activity_score] / 100.0 } print(code)feature_definition()四、数据导入4.1 离线数据导入defdata_ingestion():数据导入print(\n*60)print(数据导入)print(*60)code import pandas as pd import numpy as np from datetime import datetime, timedelta # 1. 生成示例数据 def generate_sample_data(): np.random.seed(42) n_samples 10000 data pd.DataFrame({ user_id: np.random.randint(1, 1001, n_samples), age: np.random.randint(18, 70, n_samples), gender: np.random.choice([M, F], n_samples), city: np.random.choice([北京, 上海, 广州, 深圳], n_samples), user_activity_score: np.random.uniform(0, 100, n_samples), event_timestamp: [datetime.now() - timedelta(daysnp.random.randint(0, 30)) for _ in range(n_samples)] }) return data # 2. 保存数据 data generate_sample_data() data.to_parquet(data/user_features.parquet, indexFalse) # 3. 应用特征定义 from feast import FeatureStore store FeatureStore(repo_path.) # 4. 应用特征定义到注册表 store.apply() # 5. 导入历史数据离线存储 store.materialize( start_datedatetime.now() - timedelta(days30), end_datedatetime.now() ) # 6. 导入到在线存储 store.materialize_incremental(end_datedatetime.now()) print(code)data_ingestion()五、特征获取5.1 训练数据获取deftraining_data_retrieval():训练数据获取print(\n*60)print(训练数据获取)print(*60)code from feast import FeatureStore import pandas as pd # 初始化 store FeatureStore(repo_path.) # 1. 获取历史特征训练数据 entity_df pd.DataFrame({ user_id: [1, 2, 3, 4, 5], item_id: [100, 200, 300, 400, 500], event_timestamp: pd.date_range(start2024-01-01, periods5, freqD) }) training_features store.get_historical_features( entity_dfentity_df, features[ user_features:age, user_features:gender, user_features:user_activity_score, item_features:category, item_features:price, item_features:item_rating ] ).to_df() print(training_features.head()) # 2. 获取特征服务 feature_service store.get_feature_service(recommendation_service) training_data store.get_historical_features( entity_dfentity_df, featuresfeature_service ).to_df() # 3. 获取训练数据带标签 entity_df_with_label pd.DataFrame({ user_id: [1, 2, 3, 4, 5], item_id: [100, 200, 300, 400, 500], event_timestamp: pd.date_range(start2024-01-01, periods5, freqD), label: [1, 0, 1, 0, 1] # 点击标签 }) training_df store.get_historical_features( entity_dfentity_df_with_label, featuresfeature_service ).to_df() # 4. 特征验证 from feast.infra.offline_stores.file import FileOfflineStoreConfig # 检查特征完整性 missing_features training_df.isnull().sum() print(fMissing features:\\n{missing_features}) # 特征统计 feature_stats training_df.describe() print(fFeature statistics:\\n{feature_stats}) print(code)training_data_retrieval()5.2 在线特征获取defonline_features():在线特征获取print(\n*60)print(在线特征获取)print(*60)code from feast import FeatureStore import time store FeatureStore(repo_path.) # 1. 单个实体特征 features store.get_online_features( features[ user_features:age, user_features:gender, user_features:user_activity_score, item_features:price, item_features:item_rating ], entity_rows[ {user_id: 123, item_id: 456}, ] ).to_dict() print(fOnline features: {features}) # 2. 批量获取 entity_rows [ {user_id: 123, item_id: 456}, {user_id: 124, item_id: 457}, {user_id: 125, item_id: 458}, ] batch_features store.get_online_features( featuresfeature_service, entity_rowsentity_rows ).to_df() print(fBatch features shape: {batch_features.shape}) # 3. 性能测试 def benchmark_online_features(n_requests100): start time.time() for i in range(n_requests): features store.get_online_features( features[user_features:age], entity_rows[{user_id: i}] ) elapsed time.time() - start print(fAverage latency: {elapsed/n_requests*1000:.2f}ms) print(fThroughput: {n_requests/elapsed:.2f} req/s) benchmark_online_features() # 4. 缓存策略 from functools import lru_cache lru_cache(maxsize1000) def get_cached_features(user_id, item_id): 缓存特征结果 return store.get_online_features( featuresfeature_service, entity_rows[{user_id: user_id, item_id: item_id}] ).to_dict() print(code)online_features()六、特征服务部署6.1 特征服务deffeature_server():特征服务部署print(\n*60)print(特征服务部署)print(*60)code # 1. 启动特征服务 # feast serve -h 0.0.0.0 -p 6566 # 2. gRPC客户端 import grpc from feast.serving import ServingService_pb2, ServingService_pb2_grpc channel grpc.insecure_channel(localhost:6566) stub ServingService_pb2_grpc.ServingServiceStub(channel) # 构建请求 request ServingService_pb2.GetOnlineFeaturesRequest( features[user_features:age, user_features:gender], entities[ ServingService_pb2.GetOnlineFeaturesRequest.EntityRow( fields{user_id: grpc.Value(int64_value123)} ) ] ) response stub.GetOnlineFeatures(request) # 3. HTTP/REST API import requests response requests.post( http://localhost:6566/api/v1/features, json{ features: [user_features:age, user_features:gender], entities: [{user_id: 123}] } ) print(response.json()) # 4. Docker部署 # Dockerfile FROM feastdev/feature-server:latest COPY feature_store.yaml /feature_store.yaml # 启动服务 CMD [feast, serve, -h, 0.0.0.0, -p, 6566] # docker build -t feature-server . # docker run -p 6566:6566 feature-server # 5. Kubernetes部署 apiVersion: apps/v1 kind: Deployment metadata: name: feature-server spec: replicas: 3 selector: matchLabels: app: feature-server template: metadata: labels: app: feature-server spec: containers: - name: feature-server image: feature-server:latest ports: - containerPort: 6566 env: - name: REDIS_HOST value: redis-service - name: REDIS_PORT value: 6379 resources: requests: memory: 512Mi cpu: 250m limits: memory: 1Gi cpu: 500m --- apiVersion: v1 kind: Service metadata: name: feature-server spec: selector: app: feature-server ports: - port: 6566 targetPort: 6566 type: LoadBalancer print(code)feature_server()七、完整工作流7.1 端到端示例defcomplete_workflow():完整工作流print(\n*60)print(完整工作流示例)print(*60)code from feast import FeatureStore import pandas as pd from sklearn.ensemble import RandomForestClassifier import mlflow # 1. 初始化 store FeatureStore(repo_path.) # 2. 定义训练实体 entity_df pd.DataFrame({ user_id: [1, 2, 3, 4, 5], item_id: [100, 200, 300, 400, 500], event_timestamp: pd.date_range(start2024-01-01, periods5, freqD), label: [1, 0, 1, 0, 1] }) # 3. 获取特征 feature_service store.get_feature_service(recommendation_service) training_df store.get_historical_features( entity_dfentity_df, featuresfeature_service ).to_df() # 4. 准备训练数据 X training_df.drop([label, event_timestamp, user_id, item_id], axis1) y training_df[label] # 5. 训练模型 model RandomForestClassifier(n_estimators100) model.fit(X, y) # 6. 保存模型 with mlflow.start_run(): mlflow.log_param(model_type, random_forest) mlflow.log_metric(accuracy, model.score(X, y)) mlflow.sklearn.log_model(model, model) # 7. 在线推理 def predict(user_id, item_id): # 获取在线特征 features store.get_online_features( featuresfeature_service, entity_rows[{user_id: user_id, item_id: item_id}] ).to_df() # 预测 X_pred features.drop([user_id, item_id], axis1) prediction model.predict(X_pred) return prediction[0] # 8. 推理示例 result predict(123, 456) print(fPrediction: {result}) print(code)complete_workflow()八、总结组件存储类型用途离线存储数据湖/仓库批量特征处理在线存储Redis/DynamoDB低延迟特征服务注册表数据库特征元数据管理Feast最佳实践合理设置TTL避免数据过期使用特征服务管理特征组实施特征监控和验证优化在线存储性能定期清理过期特征