Loading...
Loading...
Data analysis and statistical computation. Use when user needs "数据分析/统计/计算指标/数据洞察". Supports general analysis, financial data (stocks, returns), business data (sales, users), and scientific research. Uses pandas/numpy/scikit-learn for processing. Automatically activates data-base for data acquisition.
npx skill4agent add jinfanzheng/kode-sdk-csharp data-analysisdata.csvsales_report_2025.xlsxanalysis_results.json销售数据.csv数据文件.xlsx報表.jsondata-basecd skills/data-analysis
if [ ! -f ".venv/bin/python" ]; then
echo "Creating Python environment..."
./setup.sh
fi
.venv/bin/python your_script.pyimport pandas as pd
# Load and summarize
df = pd.read_csv('data.csv')
summary = df.describe()
correlations = df.corr()# Calculate returns
df['return'] = df['price'].pct_change()
# Risk metrics
volatility = df['return'].std() * (252 ** 0.5)
sharpe = df['return'].mean() / df['return'].std() * (252 ** 0.5)# Group by category
grouped = df.groupby('category').agg({
'revenue': ['sum', 'mean', 'count']
})
# Growth rate
df['growth'] = df['revenue'].pct_change()from scipy import stats
# T-test
t_stat, p_value = stats.ttest_ind(group_a, group_b)
# Regression
from sklearn.linear_model import LinearRegression
model = LinearRegression()
model.fit(X, y)import time
from datetime import datetime
# Use timestamp for unique filenames (avoid conflicts)
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
# Charts and temporary files
plt.savefig(f'analysis_{timestamp}.png') # → $KODE_AGENT_DIR/analysis_20250115_143022.png
df.to_csv(f'results_{timestamp}.csv') # → $KODE_AGENT_DIR/results_20250115_143022.csvanalysis_20250115_143022.pngsales_report_q1_2025.csvscript_{random.randint(1000,9999)}.py$KODE_USER_DIRimport os
user_dir = os.getenv('KODE_USER_DIR')
# Save to user memory
memory_file = f"{user_dir}/.memory/facts/preferences.jsonl"
# Read from knowledge base
knowledge_dir = f"{user_dir}/.knowledge/docs"KODE_AGENT_DIRKODE_USER_DIRdf.head()df.info()df.describe()$KODE_USER_DIR# Navigate to the skill directory
cd apps/assistant/skills/data-analysis
# Run the setup script (creates venv and installs dependencies)
./setup.sh
# Activate the environment
source .venv/bin/activate.venv/# Use the virtual environment's Python
.venv/bin/python script.py
# Or activate first, then run normally
source .venv/bin/activate
python script.py