后端代码
This commit is contained in:
parent
023cc64d20
commit
62e0ab2da7
@ -8,5 +8,12 @@
|
||||
<jdbc-url>jdbc:mysql://lawyer5.cn:3308/wangche</jdbc-url>
|
||||
<working-dir>$ProjectFileDir$</working-dir>
|
||||
</data-source>
|
||||
<data-source source="LOCAL" name="@localhost" uuid="7b52e754-e1c2-4a7c-ae09-3c939af31a22">
|
||||
<driver-ref>mysql.8</driver-ref>
|
||||
<synchronize>true</synchronize>
|
||||
<jdbc-driver>com.mysql.cj.jdbc.Driver</jdbc-driver>
|
||||
<jdbc-url>jdbc:mysql://localhost:3306</jdbc-url>
|
||||
<working-dir>$ProjectFileDir$</working-dir>
|
||||
</data-source>
|
||||
</component>
|
||||
</project>
|
@ -3,5 +3,5 @@
|
||||
<component name="Black">
|
||||
<option name="sdkName" value="Python 3.7" />
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.10 (wance_data)" project-jdk-type="Python SDK" />
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.11 (testenv) (2)" project-jdk-type="Python SDK" />
|
||||
</project>
|
@ -4,7 +4,7 @@
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="jdk" jdkName="Python 3.11 (testenv) (2)" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
<component name="PackageRequirementsSettings">
|
||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
0
src/akshare_data/__init__.py
Normal file
0
src/akshare_data/__init__.py
Normal file
915
src/akshare_data/router.py
Normal file
915
src/akshare_data/router.py
Normal file
@ -0,0 +1,915 @@
|
||||
from fastapi import APIRouter, Query, FastAPI, Body
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
import akshare as ak
|
||||
import pymysql
|
||||
import json
|
||||
from pydantic import BaseModel
|
||||
from typing import List, Dict
|
||||
import math
|
||||
router = APIRouter() # 创建一个 FastAPI 路由器实例
|
||||
|
||||
|
||||
# 数据库测试
|
||||
@router.get("/userstrategy")
|
||||
async def userstrategy():
|
||||
# 创建数据库连接
|
||||
conn = pymysql.connect(
|
||||
host='cqxqg.tech', # MySQL服务器地址
|
||||
user='wangche', # MySQL用户名
|
||||
password='fN7sXX8saiQKXWbG', # MySQL密码
|
||||
database='wangche', # 要连接的数据库名
|
||||
port=3308,
|
||||
charset='utf8mb4', # 字符集,确保支持中文等
|
||||
cursorclass=pymysql.cursors.DictCursor # 使用字典形式返回结果
|
||||
)
|
||||
|
||||
# 使用 cursor() 方法创建一个游标对象 cursor
|
||||
cursor = conn.cursor()
|
||||
|
||||
# 使用 execute()方法执行 SQL 查询
|
||||
# 通配符 *,意思是查询表里所有内容
|
||||
cursor.execute("select * from user_strategy where user_id = 100096")
|
||||
|
||||
# 使用 fetchone() 方法获取一行数据.
|
||||
# data = cursor.fetchone()
|
||||
data = cursor.fetchall()
|
||||
# print(data)
|
||||
|
||||
# 关闭数据库连接
|
||||
cursor.close()
|
||||
# 将strategy_request字段中的JSON字符串转换为Python字典
|
||||
for i in range(0, len(data)):
|
||||
strategy_request = data[i]['strategy_request']
|
||||
# 将JSON字符串转换为Python字典
|
||||
data_dict = json.loads(strategy_request)
|
||||
data[i]['strategy_request'] = data_dict
|
||||
return data
|
||||
|
||||
|
||||
# 定义Pydantic模型
|
||||
class InfoItem(BaseModel):
|
||||
code: str
|
||||
name: str
|
||||
market: str
|
||||
newprice: float
|
||||
amplitudetype: bool
|
||||
amplitude: float
|
||||
type: str
|
||||
|
||||
class MessageItem(BaseModel):
|
||||
id: str
|
||||
label: str
|
||||
value: str
|
||||
state: bool
|
||||
info: List[InfoItem]
|
||||
inputClass: str
|
||||
fixchange: bool
|
||||
|
||||
class StrategyRequest(BaseModel):
|
||||
id: int
|
||||
strategy_name: str
|
||||
message: List[MessageItem] or None
|
||||
info: List[InfoItem] or None
|
||||
|
||||
class MyData(BaseModel):
|
||||
mes: List[StrategyRequest]
|
||||
|
||||
# 新加的new_user_strategy数据库表
|
||||
@router.post("/newuserstrategy")
|
||||
async def newuserstrategy(strategy: StrategyRequest = Body(...)):
|
||||
# 创建数据库连接
|
||||
conn = pymysql.connect(
|
||||
host='cqxqg.tech', # MySQL服务器地址
|
||||
user='wangche', # MySQL用户名
|
||||
password='fN7sXX8saiQKXWbG', # MySQL密码
|
||||
database='wangche', # 要连接的数据库名
|
||||
port=3308,
|
||||
charset='utf8mb4', # 字符集,确保支持中文等
|
||||
cursorclass=pymysql.cursors.DictCursor # 使用字典形式返回结果
|
||||
)
|
||||
# 使用 cursor() 方法创建一个游标对象 cursor
|
||||
cursor = conn.cursor()
|
||||
# cursor1 = conn.cursor()
|
||||
# cursor2 = conn.cursor()
|
||||
# --------------------new_user_strategy数据------------------
|
||||
# SQL 查询语句
|
||||
sql1 = "SELECT strategy_name FROM new_user_strategy WHERE id = %s"
|
||||
# 执行查询
|
||||
cursor.execute(sql1, ('100096',))
|
||||
# 获取所有数据
|
||||
result1 = cursor.fetchall()
|
||||
# 提取 strategy_name 列并转换为列表
|
||||
strategy_names1 = [row['strategy_name'] for row in result1]
|
||||
# print(strategy_names1)
|
||||
|
||||
# --------------------user_strategy数据------------------
|
||||
# SQL 查询语句
|
||||
sql2 = "SELECT strategy_name FROM user_strategy WHERE user_id = %s"
|
||||
# 执行查询
|
||||
cursor.execute(sql2, ('100096',))
|
||||
# 获取所有数据
|
||||
result2 = cursor.fetchall()
|
||||
# 提取 strategy_name 列并转换为列表
|
||||
strategy_names2 = [row['strategy_name'] for row in result2]
|
||||
# print(strategy_names2)
|
||||
|
||||
|
||||
# --------------------获取整个请求数据--------------------
|
||||
request_data = strategy.dict()
|
||||
# print(request_data)
|
||||
|
||||
# 准备SQL插入语句(注意:没有包含id列,因为它可能是自动递增的)
|
||||
sql = "INSERT INTO new_user_strategy (id, strategy_name, message, info) VALUES (%s, %s, %s, %s)"
|
||||
# 要插入的数据(确保数据类型与数据库表列匹配)
|
||||
# 将message和info转换为JSON字符串
|
||||
import json
|
||||
message_json = json.dumps(request_data['message'])
|
||||
info_json = json.dumps(request_data['info'])
|
||||
values = (request_data['id'], request_data['strategy_name'], message_json, info_json)
|
||||
|
||||
# 是否进行写入数据库表中
|
||||
set_strategy_names1 = set(strategy_names1)
|
||||
set_strategy_names2 = set(strategy_names2)
|
||||
|
||||
if request_data['strategy_name'] in strategy_names1:
|
||||
# print("信息已存在")
|
||||
conn.close()
|
||||
else:
|
||||
# 执行SQL语句
|
||||
cursor.execute(sql, values)
|
||||
# 提交事务到数据库执行
|
||||
conn.commit()
|
||||
print("数据插入成功")
|
||||
# 关闭数据库连接
|
||||
conn.close()
|
||||
return {"message": "数据插入成功"}
|
||||
# for i in range(0, len(result2)):
|
||||
# if result2[i] not in strategy_names1:
|
||||
# # 执行SQL语句
|
||||
# cursor.execute(sql, values)
|
||||
# # 提交事务到数据库执行
|
||||
# conn.commit()
|
||||
# print("数据插入成功")
|
||||
# # 关闭数据库连接
|
||||
# conn.close()
|
||||
# return {"message": "数据插入成功"}
|
||||
|
||||
|
||||
# conn.close()
|
||||
|
||||
@router.post("/newadd")
|
||||
async def newadd(strategy: StrategyRequest = Body(...)):
|
||||
conn = pymysql.connect(
|
||||
host='cqxqg.tech', # MySQL服务器地址
|
||||
user='wangche', # MySQL用户名
|
||||
password='fN7sXX8saiQKXWbG', # MySQL密码
|
||||
database='wangche', # 要连接的数据库名
|
||||
port=3308,
|
||||
charset='utf8mb4', # 字符集,确保支持中文等
|
||||
cursorclass=pymysql.cursors.DictCursor # 使用字典形式返回结果
|
||||
)
|
||||
|
||||
# 使用 cursor() 方法创建一个游标对象 cursor
|
||||
cursor = conn.cursor()
|
||||
|
||||
# SQL 查询语句
|
||||
sql1 = "SELECT strategy_name FROM new_user_strategy WHERE id = %s"
|
||||
# 执行查询
|
||||
cursor.execute(sql1, ('100096',))
|
||||
# 获取所有数据
|
||||
result1 = cursor.fetchall()
|
||||
|
||||
# 获取整个请求数据
|
||||
request_data = strategy.dict()
|
||||
print(request_data)
|
||||
|
||||
import json
|
||||
message_json = json.dumps(request_data['message'])
|
||||
info_json = json.dumps(request_data['info'])
|
||||
|
||||
print(result1)
|
||||
for item in result1:
|
||||
if request_data['strategy_name'] == item['strategy_name']:
|
||||
return {
|
||||
"code": 204,
|
||||
"message": "该分组已经存在"
|
||||
}
|
||||
|
||||
sql = "INSERT INTO new_user_strategy (id, strategy_name, message, info) VALUES (%s, %s, %s, %s)"
|
||||
# # 执行 SQL
|
||||
cursor.execute(sql, (request_data["id"], request_data['strategy_name'], message_json, info_json))
|
||||
#
|
||||
# # 提交事务到数据库执行
|
||||
conn.commit()
|
||||
# print("更新数据成功")
|
||||
# 关闭数据库连接
|
||||
conn.close()
|
||||
return {
|
||||
"code": 200,
|
||||
"message": "新建分组成功!"
|
||||
}
|
||||
|
||||
|
||||
|
||||
# 获取数据
|
||||
@router.get("/newget")
|
||||
async def newget():
|
||||
# 创建数据库连接
|
||||
conn = pymysql.connect(
|
||||
host='cqxqg.tech', # MySQL服务器地址
|
||||
user='wangche', # MySQL用户名
|
||||
password='fN7sXX8saiQKXWbG', # MySQL密码
|
||||
database='wangche', # 要连接的数据库名
|
||||
port=3308,
|
||||
charset='utf8mb4', # 字符集,确保支持中文等
|
||||
cursorclass=pymysql.cursors.DictCursor # 使用字典形式返回结果
|
||||
)
|
||||
|
||||
# 使用 cursor() 方法创建一个游标对象 cursor
|
||||
cursor = conn.cursor()
|
||||
|
||||
# 使用 execute()方法执行 SQL 查询
|
||||
# 通配符 *,意思是查询表里所有内容
|
||||
cursor.execute("select * from new_user_strategy where id = 100096")
|
||||
|
||||
# 使用 fetchone() 方法获取一行数据.
|
||||
# data = cursor.fetchone()
|
||||
data = cursor.fetchall()
|
||||
# print(data)
|
||||
|
||||
# 关闭数据库连接
|
||||
cursor.close()
|
||||
# 将strategy_request字段中的JSON字符串转换为Python字典
|
||||
for i in range(0, len(data)):
|
||||
strategy_request1 = data[i]['message']
|
||||
strategy_request2 = data[i]['info']
|
||||
# 将JSON字符串转换为Python字典
|
||||
data_dict1 = json.loads(strategy_request1)
|
||||
data_dict2 = json.loads(strategy_request2)
|
||||
data[i]['message'] = data_dict1
|
||||
data[i]['info'] = data_dict2
|
||||
return {
|
||||
"code": 200,
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
|
||||
# 新增分组
|
||||
@router.post("/newupdata")
|
||||
async def newupdata(strategy: StrategyRequest = Body(...)):
|
||||
# 创建数据库连接
|
||||
conn = pymysql.connect(
|
||||
host='cqxqg.tech', # MySQL服务器地址
|
||||
user='wangche', # MySQL用户名
|
||||
password='fN7sXX8saiQKXWbG', # MySQL密码
|
||||
database='wangche', # 要连接的数据库名
|
||||
port=3308,
|
||||
charset='utf8mb4', # 字符集,确保支持中文等
|
||||
cursorclass=pymysql.cursors.DictCursor # 使用字典形式返回结果
|
||||
)
|
||||
|
||||
# 使用 cursor() 方法创建一个游标对象 cursor
|
||||
cursor = conn.cursor()
|
||||
|
||||
# 获取整个请求数据
|
||||
request_data = strategy.dict()
|
||||
print(request_data)
|
||||
|
||||
import json
|
||||
message_json = json.dumps(request_data['message'])
|
||||
info_json = json.dumps(request_data['info'])
|
||||
|
||||
|
||||
# SQL 语句
|
||||
sql = """
|
||||
UPDATE new_user_strategy
|
||||
SET message = %s, info = %s
|
||||
WHERE strategy_name = %s;
|
||||
"""
|
||||
|
||||
# 执行 SQL
|
||||
cursor.execute(sql, (message_json, info_json, request_data['strategy_name']))
|
||||
|
||||
|
||||
# 提交事务到数据库执行
|
||||
conn.commit()
|
||||
print("更新数据成功")
|
||||
# 关闭数据库连接
|
||||
conn.close()
|
||||
return "更新成功"
|
||||
|
||||
|
||||
|
||||
class delItem(BaseModel):
|
||||
strategy_name: str
|
||||
# 删除分组
|
||||
@router.post("/newdel")
|
||||
async def newdel(delitem: delItem):
|
||||
delitem = delitem.strategy_name
|
||||
# 创建数据库连接
|
||||
conn = pymysql.connect(
|
||||
host='cqxqg.tech', # MySQL服务器地址
|
||||
user='wangche', # MySQL用户名
|
||||
password='fN7sXX8saiQKXWbG', # MySQL密码
|
||||
database='wangche', # 要连接的数据库名
|
||||
port=3308,
|
||||
charset='utf8mb4', # 字符集,确保支持中文等
|
||||
cursorclass=pymysql.cursors.DictCursor # 使用字典形式返回结果
|
||||
)
|
||||
|
||||
# 使用 cursor() 方法创建一个游标对象 cursor
|
||||
cursor = conn.cursor()
|
||||
|
||||
sql1 = "DELETE FROM new_user_strategy WHERE strategy_name = %s"
|
||||
|
||||
sql2 = "DELETE FROM user_strategy WHERE strategy_name = %s"
|
||||
|
||||
cursor.execute(sql1, (delitem,))
|
||||
cursor.execute(sql2, (delitem,))
|
||||
# 提交事务到数据库执行
|
||||
conn.commit()
|
||||
print("数据删除成功")
|
||||
# 关闭数据库连接
|
||||
conn.close()
|
||||
return "删除成功"
|
||||
|
||||
|
||||
|
||||
@router.get("/newmodify")
|
||||
async def newmodify(
|
||||
strategy_name: str = Query(..., description="原始值"),
|
||||
new_strategy_name: str = Query(..., description="更改值")
|
||||
):
|
||||
print(strategy_name)
|
||||
print(new_strategy_name)
|
||||
# return "success"
|
||||
# pass
|
||||
# 创建数据库连接
|
||||
conn = pymysql.connect(
|
||||
host='cqxqg.tech', # MySQL服务器地址
|
||||
user='wangche', # MySQL用户名
|
||||
password='fN7sXX8saiQKXWbG', # MySQL密码
|
||||
database='wangche', # 要连接的数据库名
|
||||
port=3308,
|
||||
charset='utf8mb4', # 字符集,确保支持中文等
|
||||
cursorclass=pymysql.cursors.DictCursor # 使用字典形式返回结果
|
||||
)
|
||||
|
||||
# 使用 cursor() 方法创建一个游标对象 cursor
|
||||
cursor = conn.cursor()
|
||||
|
||||
# 更新 strategy_name
|
||||
update_sql = "UPDATE new_user_strategy SET strategy_name = %s WHERE strategy_name = %s"
|
||||
cursor.execute(update_sql, (new_strategy_name, strategy_name))
|
||||
# 提交事务到数据库执行
|
||||
conn.commit()
|
||||
print("重命名成功")
|
||||
# 关闭数据库连接
|
||||
conn.close()
|
||||
return "重命名成功"
|
||||
|
||||
|
||||
|
||||
|
||||
# 侧边栏webview数据
|
||||
@router.post("/asidestrinfo/")
|
||||
async def asidestrinfo():
|
||||
pass
|
||||
|
||||
|
||||
|
||||
# 股票数据
|
||||
stock_data = None
|
||||
@router.get("/stock")
|
||||
async def stock(
|
||||
symbol: str = Query(..., description="股票代码"),
|
||||
start_date: str = Query(..., description="起始日期"),
|
||||
end_date: str = Query(..., description="结束日期"),
|
||||
):
|
||||
# 获取股票日线行情数据
|
||||
# print(symbol, start_date, end_date)
|
||||
# print(symbol)
|
||||
global stock_data
|
||||
try:
|
||||
stock_zh_a_daily_df = ak.stock_zh_a_daily(symbol=symbol, start_date=start_date, end_date=end_date, adjust="qfq")
|
||||
|
||||
# 获取所有的code
|
||||
all_dates = stock_zh_a_daily_df['date']
|
||||
# 如果你想要一个列表而不是Pandas Series
|
||||
dates_list = all_dates.tolist()
|
||||
|
||||
all_opens = stock_zh_a_daily_df['open']
|
||||
opens_list = all_opens.tolist()
|
||||
cleaned_opens_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in opens_list
|
||||
]
|
||||
|
||||
all_closes = stock_zh_a_daily_df['close']
|
||||
close_list = all_closes.tolist()
|
||||
cleaned_close_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in close_list
|
||||
]
|
||||
|
||||
all_highs = stock_zh_a_daily_df['high']
|
||||
high_list = all_highs.tolist()
|
||||
cleaned_high_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in high_list
|
||||
]
|
||||
|
||||
all_lows = stock_zh_a_daily_df['low']
|
||||
low_list = all_lows.tolist()
|
||||
cleaned_low_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in low_list
|
||||
]
|
||||
|
||||
all_volumes = stock_zh_a_daily_df['volume']
|
||||
volume_list = all_volumes.tolist()
|
||||
cleaned_volume_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in volume_list
|
||||
]
|
||||
|
||||
all_amounts = stock_zh_a_daily_df['amount']
|
||||
amount_lists = all_amounts.tolist()
|
||||
cleaned_amount_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in amount_lists
|
||||
]
|
||||
global stock_data
|
||||
stock_data = {
|
||||
"amount": cleaned_amount_list,
|
||||
"close": cleaned_close_list,
|
||||
"date": dates_list,
|
||||
"high": cleaned_high_list,
|
||||
"low": cleaned_low_list,
|
||||
"open": cleaned_opens_list,
|
||||
"outstanding_share": [],
|
||||
"turnover": [],
|
||||
"volume": cleaned_volume_list
|
||||
}
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("无法使用该方式请求股票数据,当前股票可能不是A股")
|
||||
stock_data = {
|
||||
"amount": [],
|
||||
"close": [],
|
||||
"date": [],
|
||||
"high": [],
|
||||
"low": [],
|
||||
"open": [],
|
||||
"outstanding_share": [],
|
||||
"turnover": [],
|
||||
"volume": []
|
||||
}
|
||||
finally:
|
||||
return {"message": stock_data}
|
||||
|
||||
# 前端获取数据接口
|
||||
@router.get("/kdata")
|
||||
async def kdata():
|
||||
global stock_data
|
||||
if stock_data is None:
|
||||
stock_data = {
|
||||
"amount": [],
|
||||
"close": [],
|
||||
"date": [],
|
||||
"high": [],
|
||||
"low": [],
|
||||
"open": [],
|
||||
"outstanding_share": [],
|
||||
"turnover": [],
|
||||
"volume": []
|
||||
}
|
||||
return {"message": stock_data}
|
||||
else:
|
||||
return {"message": stock_data}
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# 港股代码数据
|
||||
@router.get("/ganggudata")
|
||||
async def ganggudata():
|
||||
stock_hk_spot_em_df = ak.stock_hk_spot_em()
|
||||
# print(stock_hk_spot_em_df)
|
||||
# 获取所有的code
|
||||
all_codes = stock_hk_spot_em_df['代码']
|
||||
# 如果你想要一个列表而不是Pandas Series
|
||||
codes_list = all_codes.tolist()
|
||||
|
||||
all_names = stock_hk_spot_em_df['名称']
|
||||
names_list = all_names.tolist()
|
||||
|
||||
all_prices = stock_hk_spot_em_df['最新价']
|
||||
price_list = all_prices.tolist()
|
||||
# 清理非法浮点数值(NaN, Infinity, -Infinity)
|
||||
cleaned_price_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in price_list
|
||||
]
|
||||
|
||||
all_amplitudes = stock_hk_spot_em_df['涨跌幅']
|
||||
amplitudes_list = all_amplitudes.tolist()
|
||||
# 清理非法浮点数值(NaN, Infinity, -Infinity)
|
||||
cleaned_amplitudes_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in amplitudes_list
|
||||
]
|
||||
|
||||
# 返回的数据
|
||||
ggstocking = []
|
||||
for i in range(9):
|
||||
if cleaned_price_list[i] >= 0:
|
||||
flag = True
|
||||
else:
|
||||
flag = False
|
||||
ggstocking.append({
|
||||
'code': codes_list[i],
|
||||
'name': names_list[i],
|
||||
'market': '港股',
|
||||
'newprice': cleaned_price_list[i],
|
||||
'amplitudetype': flag,
|
||||
'amplitude': cleaned_amplitudes_list[i],
|
||||
'type': 'ganggu'
|
||||
})
|
||||
|
||||
# 返回清理后的列表
|
||||
return ggstocking
|
||||
|
||||
# 港股K线图历史数据
|
||||
@router.get("/ganggudataK")
|
||||
async def ganggudataK(
|
||||
symbol: str = Query(..., description="股票代码"),
|
||||
start_date: str = Query(..., description="起始日期"),
|
||||
end_date: str = Query(..., description="结束日期"),
|
||||
):
|
||||
try:
|
||||
stock_hk_hist_df = ak.stock_hk_hist(symbol=symbol, period="daily", start_date=start_date, end_date=end_date,
|
||||
adjust="")
|
||||
|
||||
# 获取所有的code
|
||||
all_dates = stock_hk_hist_df['日期']
|
||||
# 如果你想要一个列表而不是Pandas Series
|
||||
dates_list = all_dates.tolist()
|
||||
|
||||
all_opens = stock_hk_hist_df['开盘']
|
||||
opens_list = all_opens.tolist()
|
||||
cleaned_opens_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in opens_list
|
||||
]
|
||||
|
||||
all_closes = stock_hk_hist_df['收盘']
|
||||
close_list = all_closes.tolist()
|
||||
cleaned_close_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in close_list
|
||||
]
|
||||
|
||||
all_highs = stock_hk_hist_df['最高']
|
||||
high_list = all_highs.tolist()
|
||||
cleaned_high_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in high_list
|
||||
]
|
||||
|
||||
all_lows = stock_hk_hist_df['最低']
|
||||
low_list = all_lows.tolist()
|
||||
cleaned_low_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in low_list
|
||||
]
|
||||
|
||||
all_volumes = stock_hk_hist_df['成交量']
|
||||
volume_list = all_volumes.tolist()
|
||||
cleaned_volume_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in volume_list
|
||||
]
|
||||
|
||||
all_amounts = stock_hk_hist_df['成交额']
|
||||
amount_list = all_amounts.tolist()
|
||||
cleaned_amount_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in amount_list
|
||||
]
|
||||
global stock_data
|
||||
stock_data = {
|
||||
"amount": cleaned_amount_list,
|
||||
"close": cleaned_close_list,
|
||||
"date": dates_list,
|
||||
"high": cleaned_high_list,
|
||||
"low": cleaned_low_list,
|
||||
"open": cleaned_opens_list,
|
||||
"outstanding_share": [],
|
||||
"turnover": [],
|
||||
"volume": cleaned_volume_list
|
||||
}
|
||||
except Exception as e:
|
||||
print(e)
|
||||
stock_data = {
|
||||
"amount": [],
|
||||
"close": [],
|
||||
"date": [],
|
||||
"high": [],
|
||||
"low": [],
|
||||
"open": [],
|
||||
"outstanding_share": [],
|
||||
"turnover": [],
|
||||
"volume": []
|
||||
}
|
||||
finally:
|
||||
return {"message": stock_data}
|
||||
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# 美股代码数据
|
||||
@router.get("/meigudata")
|
||||
async def meigudata():
|
||||
stock_us_spot_em_df = ak.stock_us_spot_em()
|
||||
# print(stock_us_spot_em_df)
|
||||
all_codes = stock_us_spot_em_df['代码']
|
||||
# 如果你想要一个列表而不是Pandas Series
|
||||
codes_list = all_codes.tolist()
|
||||
|
||||
all_names = stock_us_spot_em_df['名称']
|
||||
names_list = all_names.tolist()
|
||||
|
||||
all_prices = stock_us_spot_em_df['最新价']
|
||||
price_list = all_prices.tolist()
|
||||
# 清理非法浮点数值(NaN, Infinity, -Infinity)
|
||||
cleaned_price_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in price_list
|
||||
]
|
||||
|
||||
all_amplitudes = stock_us_spot_em_df['涨跌幅']
|
||||
amplitudes_list = all_amplitudes.tolist()
|
||||
# 清理非法浮点数值(NaN, Infinity, -Infinity)
|
||||
cleaned_amplitudes_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in amplitudes_list
|
||||
]
|
||||
# 返回的数据
|
||||
mgstocking = []
|
||||
for i in range(9):
|
||||
if cleaned_price_list[i] >= 0:
|
||||
flag = True
|
||||
else:
|
||||
flag = False
|
||||
mgstocking.append({
|
||||
'code': codes_list[i],
|
||||
'name': names_list[i],
|
||||
'market': '港股',
|
||||
'newprice': cleaned_price_list[i],
|
||||
'amplitudetype': flag,
|
||||
'amplitude': cleaned_amplitudes_list[i],
|
||||
'type': 'meigu'
|
||||
})
|
||||
|
||||
# 返回清理后的列表
|
||||
return mgstocking
|
||||
# 美股K线图历史数据
|
||||
@router.get("/meigudataK")
|
||||
async def meigudataK(
|
||||
symbol: str = Query(..., description="股票代码"),
|
||||
start_date: str = Query(..., description="起始日期"),
|
||||
end_date: str = Query(..., description="结束日期"),
|
||||
):
|
||||
try:
|
||||
stock_us_hist_df = ak.stock_us_hist(symbol=symbol, period="daily", start_date=start_date, end_date=end_date,
|
||||
adjust="qfq")
|
||||
|
||||
# 获取所有的code
|
||||
all_dates = stock_us_hist_df['日期']
|
||||
# 如果你想要一个列表而不是Pandas Series
|
||||
dates_list = all_dates.tolist()
|
||||
|
||||
all_opens = stock_us_hist_df['开盘']
|
||||
opens_list = all_opens.tolist()
|
||||
cleaned_opens_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in opens_list
|
||||
]
|
||||
|
||||
all_closes = stock_us_hist_df['收盘']
|
||||
close_list = all_closes.tolist()
|
||||
cleaned_close_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in close_list
|
||||
]
|
||||
|
||||
all_highs = stock_us_hist_df['最高']
|
||||
high_list = all_highs.tolist()
|
||||
cleaned_high_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in high_list
|
||||
]
|
||||
|
||||
all_lows = stock_us_hist_df['最低']
|
||||
low_list = all_lows.tolist()
|
||||
cleaned_low_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in low_list
|
||||
]
|
||||
|
||||
all_volumes = stock_us_hist_df['成交量']
|
||||
volume_list = all_volumes.tolist()
|
||||
cleaned_volume_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in volume_list
|
||||
]
|
||||
|
||||
all_amounts = stock_us_hist_df['成交额']
|
||||
amount_list = all_amounts.tolist()
|
||||
cleaned_amount_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in amount_list
|
||||
]
|
||||
global stock_data
|
||||
stock_data = {
|
||||
"amount": cleaned_amount_list,
|
||||
"close": cleaned_close_list,
|
||||
"date": dates_list,
|
||||
"high": cleaned_high_list,
|
||||
"low": cleaned_low_list,
|
||||
"open": cleaned_opens_list,
|
||||
"outstanding_share": [],
|
||||
"turnover": [],
|
||||
"volume": cleaned_volume_list
|
||||
}
|
||||
except Exception as e:
|
||||
print(e)
|
||||
stock_data = {
|
||||
"amount": [],
|
||||
"close": [],
|
||||
"date": [],
|
||||
"high": [],
|
||||
"low": [],
|
||||
"open": [],
|
||||
"outstanding_share": [],
|
||||
"turnover": [],
|
||||
"volume": []
|
||||
}
|
||||
finally:
|
||||
return {"message": stock_data}
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# 沪深代码数据
|
||||
@router.get("/hushendata")
|
||||
async def hushendata():
|
||||
try:
|
||||
stock_zh_a_spot_df = ak.stock_kc_a_spot_em()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
# print(stock_zh_a_spot_df)
|
||||
all_codes = stock_zh_a_spot_df['代码']
|
||||
# 如果你想要一个列表而不是Pandas Series
|
||||
codes_list = all_codes.tolist()
|
||||
|
||||
all_names = stock_zh_a_spot_df['名称']
|
||||
names_list = all_names.tolist()
|
||||
|
||||
all_prices = stock_zh_a_spot_df['最新价']
|
||||
price_list = all_prices.tolist()
|
||||
# 清理非法浮点数值(NaN, Infinity, -Infinity)
|
||||
cleaned_price_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in price_list
|
||||
]
|
||||
|
||||
all_amplitudes = stock_zh_a_spot_df['涨跌幅']
|
||||
amplitudes_list = all_amplitudes.tolist()
|
||||
# 清理非法浮点数值(NaN, Infinity, -Infinity)
|
||||
cleaned_amplitudes_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in amplitudes_list
|
||||
]
|
||||
# 返回的数据
|
||||
hsstocking = []
|
||||
|
||||
# for i in range(len(codes_list)):
|
||||
# if cleaned_price_list[i] >= 0:
|
||||
# flag = True
|
||||
# else:
|
||||
# flag = False
|
||||
# hsstocking.append({
|
||||
# 'code': codes_list[i],
|
||||
# 'name': names_list[i],
|
||||
# 'market': '港股',
|
||||
# 'newprice': cleaned_price_list[i],
|
||||
# 'amplitudetype': flag,
|
||||
# 'amplitude': cleaned_amplitudes_list[i],
|
||||
# })
|
||||
|
||||
|
||||
for i in range(9):
|
||||
if cleaned_price_list[i] >= 0:
|
||||
flag = True
|
||||
else:
|
||||
flag = False
|
||||
hsstocking.append({
|
||||
'code': codes_list[i],
|
||||
'name': names_list[i],
|
||||
'market': '港股',
|
||||
'newprice': cleaned_price_list[i],
|
||||
'amplitudetype': flag,
|
||||
'amplitude': cleaned_amplitudes_list[i],
|
||||
'type': 'hushen'
|
||||
})
|
||||
|
||||
# 返回清理后的列表
|
||||
return hsstocking
|
||||
@router.get("/hushendataK")
|
||||
async def hushendataK(
|
||||
symbol: str = Query(..., description="股票代码"),
|
||||
start_date: str = Query(..., description="起始日期"),
|
||||
end_date: str = Query(..., description="结束日期"),
|
||||
):
|
||||
try:
|
||||
stock_zh_a_daily_qfq_df = ak.stock_zh_a_daily(symbol='sh' + symbol, start_date=start_date, end_date=end_date,
|
||||
adjust="qfq")
|
||||
|
||||
# 获取所有的code
|
||||
all_dates = stock_zh_a_daily_qfq_df['date']
|
||||
# 如果你想要一个列表而不是Pandas Series
|
||||
dates_list = all_dates.tolist()
|
||||
|
||||
all_opens = stock_zh_a_daily_qfq_df['open']
|
||||
opens_list = all_opens.tolist()
|
||||
cleaned_opens_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in opens_list
|
||||
]
|
||||
|
||||
all_closes = stock_zh_a_daily_qfq_df['close']
|
||||
close_list = all_closes.tolist()
|
||||
cleaned_close_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in close_list
|
||||
]
|
||||
|
||||
all_highs = stock_zh_a_daily_qfq_df['high']
|
||||
high_list = all_highs.tolist()
|
||||
cleaned_high_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in high_list
|
||||
]
|
||||
|
||||
all_lows = stock_zh_a_daily_qfq_df['low']
|
||||
low_list = all_lows.tolist()
|
||||
cleaned_low_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in low_list
|
||||
]
|
||||
|
||||
all_volumes = stock_zh_a_daily_qfq_df['volume']
|
||||
volume_list = all_volumes.tolist()
|
||||
cleaned_volume_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in volume_list
|
||||
]
|
||||
|
||||
all_amounts = stock_zh_a_daily_qfq_df['amount']
|
||||
amount_lists = all_amounts.tolist()
|
||||
cleaned_amount_list = [
|
||||
value if not (math.isnan(value) or math.isinf(value)) else 0.00
|
||||
for value in amount_lists
|
||||
]
|
||||
global stock_data
|
||||
stock_data = {
|
||||
"amount": cleaned_amount_list,
|
||||
"close": cleaned_close_list,
|
||||
"date": dates_list,
|
||||
"high": cleaned_high_list,
|
||||
"low": cleaned_low_list,
|
||||
"open": cleaned_opens_list,
|
||||
"outstanding_share": [],
|
||||
"turnover": [],
|
||||
"volume": cleaned_volume_list
|
||||
}
|
||||
except Exception as e:
|
||||
print(e)
|
||||
stock_data = {
|
||||
"amount": [],
|
||||
"close": [],
|
||||
"date": [],
|
||||
"high": [],
|
||||
"low": [],
|
||||
"open": [],
|
||||
"outstanding_share": [],
|
||||
"turnover": [],
|
||||
"volume": []
|
||||
}
|
||||
finally:
|
||||
return {"message": stock_data}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
6
src/akshare_data/service.py
Normal file
6
src/akshare_data/service.py
Normal file
@ -0,0 +1,6 @@
|
||||
from fastapi import FastAPI,Query
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
import akshare as ak
|
||||
|
||||
async def get_day_k_data():
|
||||
pass
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,3 +1,4 @@
|
||||
3
|
||||
from typing import Any
|
||||
|
||||
from fastapi import FastAPI, HTTPException, Request, status
|
||||
|
@ -8,7 +8,7 @@ from src.responses import response_list_response
|
||||
financial_reports_router = APIRouter()
|
||||
|
||||
@financial_reports_router.post("/query")
|
||||
async def financial_repoets_query(request: FinancialReportQuery )-> JSONResponse:
|
||||
async def financial_repoets_query(request: FinancialReportQuery) -> JSONResponse:
|
||||
"""
|
||||
搜索接口
|
||||
"""
|
||||
|
0
src/klinedata/__init__.py
Normal file
0
src/klinedata/__init__.py
Normal file
24
src/main.py
24
src/main.py
@ -11,7 +11,6 @@ import asyncio
|
||||
from fastapi import FastAPI
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
|
||||
from src.exceptions import register_exception_handler
|
||||
@ -25,11 +24,13 @@ from src.financial_reports.router import financial_reports_router
|
||||
from src.utils.generate_pinyin_abbreviation import generate_pinyin_abbreviation
|
||||
from src.composite.router import composite_router
|
||||
|
||||
from src.akshare_data.router import router as akshare_data_router
|
||||
|
||||
from xtquant import xtdata
|
||||
from src.settings.config import app_configs, settings
|
||||
|
||||
import adata
|
||||
import akshare as ak
|
||||
# import adata
|
||||
# import akshare as ak
|
||||
|
||||
|
||||
app = FastAPI(**app_configs)
|
||||
@ -43,6 +44,7 @@ app.include_router(backtest_router, prefix="/backtest", tags=["回测接口"])
|
||||
app.include_router(combine_router, prefix="/combine", tags=["组合接口"])
|
||||
app.include_router(financial_reports_router, prefix="/financial-reports", tags=["财报接口"])
|
||||
app.include_router(composite_router, prefix="/composite", tags=["vacode组合接口"])
|
||||
app.include_router(akshare_data_router, prefix="/akshare", tags=["数据接口"])
|
||||
|
||||
if settings.ENVIRONMENT.is_deployed:
|
||||
sentry_sdk.init(
|
||||
@ -52,11 +54,14 @@ if settings.ENVIRONMENT.is_deployed:
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=settings.CORS_ORIGINS,
|
||||
allow_origin_regex=settings.CORS_ORIGINS_REGEX,
|
||||
# allow_origins=settings.CORS_ORIGINS,
|
||||
# allow_origin_regex=settings.CORS_ORIGINS_REGEX,
|
||||
allow_origins=["*"],
|
||||
allow_origin_regex=None,
|
||||
allow_credentials=True,
|
||||
allow_methods=("GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"),
|
||||
allow_headers=settings.CORS_HEADERS,
|
||||
# allow_headers=settings.CORS_HEADERS,
|
||||
allow_headers=["*"], # 允许所有请求头
|
||||
)
|
||||
|
||||
|
||||
@ -64,12 +69,13 @@ app.add_middleware(
|
||||
async def root():
|
||||
return {"message": "Hello, FastAPI!"}
|
||||
|
||||
|
||||
# 定时检查和数据抓取函数
|
||||
async def run_data_fetcher():
|
||||
print("财报抓取启动")
|
||||
while True:
|
||||
# 获取数据库中最新记录的时间
|
||||
latest_record = await FinancialReport .all().order_by("-created_at").first()
|
||||
latest_record = await FinancialReport.all().order_by("-created_at").first()
|
||||
latest_db_date = latest_record.created_at if latest_record else pd.Timestamp("1970-01-01")
|
||||
|
||||
# 将最新数据库日期设为无时区,以便比较
|
||||
@ -88,6 +94,7 @@ async def run_data_fetcher():
|
||||
# 休眠 12 小时(43200 秒),然后再次检查
|
||||
await asyncio.sleep(43200)
|
||||
|
||||
|
||||
async def test_liang_hua_ku():
|
||||
print("量化库测试函数启动")
|
||||
|
||||
@ -100,6 +107,5 @@ async def lifespan():
|
||||
asyncio.create_task(run_data_fetcher())
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run('src.main:app', host="0.0.0.0", port=8012, reload=True)
|
||||
uvicorn.run('src.main:app', host="127.0.0.1", port=8012, reload=True)
|
||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -44,9 +44,3 @@ app_configs: dict[str, Any] = {
|
||||
"docs_url": "/api/docs",
|
||||
}
|
||||
|
||||
# app_configs['debug'] = True
|
||||
# if settings.ENVIRONMENT.is_deployed:
|
||||
# app_configs["root_path"] = f"/v{settings.APP_VERSION}"
|
||||
#
|
||||
# if not settings.ENVIRONMENT.is_debug:
|
||||
# app_configs["openapi_url"] = None # hide docs
|
||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue
Block a user