feat: add 迅投trader
This commit is contained in:
parent
edf68935cc
commit
45827f4ae5
1
.python-version
Normal file
1
.python-version
Normal file
@ -0,0 +1 @@
|
|||||||
|
3.10.11
|
6
main.py
Normal file
6
main.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
def main():
|
||||||
|
print("Hello from real-trader!")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
12
pyproject.toml
Normal file
12
pyproject.toml
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
[project]
|
||||||
|
name = "real-trader"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Add your description here"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.10.11"
|
||||||
|
dependencies = [
|
||||||
|
"flask>=3.1.0",
|
||||||
|
"flask-limiter>=3.12",
|
||||||
|
"requests>=2.32.3",
|
||||||
|
"schedule>=1.2.2",
|
||||||
|
]
|
@ -1,6 +0,0 @@
|
|||||||
Flask
|
|
||||||
Flask-Limiter
|
|
||||||
pywin32
|
|
||||||
requests
|
|
||||||
schedule
|
|
||||||
pytesseract
|
|
@ -7,9 +7,9 @@ class Config:
|
|||||||
DEBUG = False
|
DEBUG = False
|
||||||
|
|
||||||
# Trading hours
|
# Trading hours
|
||||||
MARKET_OPEN_TIME = "09:00"
|
MARKET_OPEN_TIME = "09:20"
|
||||||
MARKET_ACTIVE_TIME = "09:15"
|
MARKET_ACTIVE_TIME = "09:15"
|
||||||
MARKET_CLOSE_TIME = "15:30"
|
MARKET_CLOSE_TIME = "15:10"
|
||||||
|
|
||||||
# Logging
|
# Logging
|
||||||
LOG_DIR = "logs"
|
LOG_DIR = "logs"
|
||||||
@ -21,3 +21,7 @@ class Config:
|
|||||||
# API Rate limiting
|
# API Rate limiting
|
||||||
RATE_LIMIT_REQUESTS = 100
|
RATE_LIMIT_REQUESTS = 100
|
||||||
RATE_LIMIT_PERIOD = 60 # seconds
|
RATE_LIMIT_PERIOD = 60 # seconds
|
||||||
|
|
||||||
|
# XtQuant 相关配置
|
||||||
|
XT_ACCOUNT = os.getenv('XT_ACCOUNT', '80391818')
|
||||||
|
XT_PATH = os.getenv('XT_PATH', r'C:\\江海证券QMT实盘_交易\\userdata_mini')
|
||||||
|
@ -1,99 +0,0 @@
|
|||||||
import easytrader
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
|
|
||||||
"""
|
|
||||||
需要32位python
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class RealTrader:
|
|
||||||
def __init__(self):
|
|
||||||
self._ACCOUNT = os.environ.get("ACCOUNT")
|
|
||||||
self._PASSWORD = os.environ.get("PASSWORD")
|
|
||||||
self._exe_path = r"C:\\ths\\xiadan.exe"
|
|
||||||
pass
|
|
||||||
|
|
||||||
def login(self):
|
|
||||||
print("准备登录:", self._ACCOUNT, self._PASSWORD)
|
|
||||||
# self.trader = easytrader.use("universal_client")
|
|
||||||
self.trader = easytrader.use("ths5.19")
|
|
||||||
# 使用修改版交易客户端时, 不会超时
|
|
||||||
try:
|
|
||||||
self.trader.enable_type_keys_for_editor()
|
|
||||||
self.trader.prepare(
|
|
||||||
user=self._ACCOUNT, password=self._PASSWORD, exe_path=self._exe_path
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
def logout(self):
|
|
||||||
self.trader.exit()
|
|
||||||
|
|
||||||
def get_balance(self):
|
|
||||||
return self.trader.balance
|
|
||||||
|
|
||||||
def get_positions(self):
|
|
||||||
return self.trader.position
|
|
||||||
|
|
||||||
# 查询当日成交
|
|
||||||
def get_today_trades(self):
|
|
||||||
return self.trader.today_trades
|
|
||||||
|
|
||||||
# 查询当日委托
|
|
||||||
def get_today_entrust(self):
|
|
||||||
return self.trader.today_entrusts
|
|
||||||
|
|
||||||
# 刷新数据
|
|
||||||
def refresh(self):
|
|
||||||
self.trader.refresh()
|
|
||||||
|
|
||||||
def buy(self, code, price, amount):
|
|
||||||
return self.trader.buy(code, price, amount)
|
|
||||||
|
|
||||||
def sell(self, code, price, amount):
|
|
||||||
return self.trader.sell(code, price, amount)
|
|
||||||
|
|
||||||
def cancel(self, entrust_no):
|
|
||||||
return self.trader.cancel_entrust(entrust_no)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# 创建RealTrader实例
|
|
||||||
trader = RealTrader()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# 测试登录
|
|
||||||
print("正在登录...")
|
|
||||||
trader.login()
|
|
||||||
print("登录成功!")
|
|
||||||
|
|
||||||
# 获取账户余额测试
|
|
||||||
balance = trader.get_balance()
|
|
||||||
print("账户余额信息:", balance)
|
|
||||||
|
|
||||||
# 获取持仓信息
|
|
||||||
positions = trader.get_positions()
|
|
||||||
print("持仓信息:", positions)
|
|
||||||
|
|
||||||
# 获取当日成交
|
|
||||||
today_trades = trader.get_today_trades()
|
|
||||||
print("当日成交:", today_trades)
|
|
||||||
|
|
||||||
# 获取当日委托
|
|
||||||
today_entrust = trader.get_today_entrust()
|
|
||||||
print("当日委托:", today_entrust)
|
|
||||||
|
|
||||||
# 刷新数据
|
|
||||||
trader.refresh()
|
|
||||||
print("数据已刷新")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print("发生错误:", str(e))
|
|
||||||
# finally:
|
|
||||||
# # 确保退出登录
|
|
||||||
# try:
|
|
||||||
# trader.logout()
|
|
||||||
# print("已安全退出!")
|
|
||||||
# except:
|
|
||||||
# pass
|
|
@ -1,7 +1,7 @@
|
|||||||
import schedule
|
import schedule
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from real_trader import RealTrader
|
from xt_trader import XtTrader
|
||||||
from flask import Flask, request, abort, jsonify
|
from flask import Flask, request, abort, jsonify
|
||||||
from flask_limiter import Limiter
|
from flask_limiter import Limiter
|
||||||
from flask_limiter.util import get_remote_address
|
from flask_limiter.util import get_remote_address
|
||||||
@ -69,7 +69,7 @@ def run_pending_tasks():
|
|||||||
|
|
||||||
# Run the task scheduler in a new thread
|
# Run the task scheduler in a new thread
|
||||||
threading.Thread(target=run_pending_tasks).start()
|
threading.Thread(target=run_pending_tasks).start()
|
||||||
trader = RealTrader()
|
trader = XtTrader()
|
||||||
trader.login()
|
trader.login()
|
||||||
|
|
||||||
|
|
||||||
@ -234,21 +234,6 @@ def get_today_entrust():
|
|||||||
abort(500, description="Internal server error")
|
abort(500, description="Internal server error")
|
||||||
|
|
||||||
|
|
||||||
@app.route("/yu/refresh", methods=["GET"])
|
|
||||||
def refresh():
|
|
||||||
"""Refresh the account."""
|
|
||||||
logger.info("Received refresh request")
|
|
||||||
try:
|
|
||||||
trader.refresh()
|
|
||||||
logger.info("Account data refreshed successfully")
|
|
||||||
|
|
||||||
response = {"success": True, "data": "Account data refreshed successfully."}
|
|
||||||
return jsonify(response), 200
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error processing refresh request: {str(e)}")
|
|
||||||
abort(500, description="Internal server error")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
logger.info(f"Server starting on {Config.HOST}:{Config.PORT}")
|
logger.info(f"Server starting on {Config.HOST}:{Config.PORT}")
|
||||||
app.run(debug=Config.DEBUG, host=Config.HOST, port=Config.PORT)
|
app.run(debug=Config.DEBUG, host=Config.HOST, port=Config.PORT)
|
||||||
|
179
src/xt_trader.py
Normal file
179
src/xt_trader.py
Normal file
@ -0,0 +1,179 @@
|
|||||||
|
import os
|
||||||
|
import random
|
||||||
|
import logging
|
||||||
|
from logging.handlers import TimedRotatingFileHandler
|
||||||
|
from config import Config
|
||||||
|
from xtquant.xttrader import XtQuantTrader
|
||||||
|
from xtquant.xttype import StockAccount
|
||||||
|
from xtquant import xtconstant
|
||||||
|
|
||||||
|
# 日志配置
|
||||||
|
LOG_DIR = "log"
|
||||||
|
if not os.path.exists(LOG_DIR):
|
||||||
|
os.makedirs(LOG_DIR)
|
||||||
|
log_path = os.path.join(LOG_DIR, "%Y-%m-%d.log")
|
||||||
|
logger = logging.getLogger("xt_trader")
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
handler = TimedRotatingFileHandler(
|
||||||
|
os.path.join(LOG_DIR, "xt_trader.log"), when="midnight", interval=1, backupCount=7, encoding="utf-8"
|
||||||
|
)
|
||||||
|
handler.suffix = "%Y-%m-%d"
|
||||||
|
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
if not logger.handlers:
|
||||||
|
logger.addHandler(handler)
|
||||||
|
|
||||||
|
class MyXtQuantTraderCallback:
|
||||||
|
def on_connected(self):
|
||||||
|
logger.info("连接成功")
|
||||||
|
def on_disconnected(self):
|
||||||
|
logger.warning("连接断开")
|
||||||
|
def on_account_status(self, status):
|
||||||
|
logger.info(f"账号状态: {status.account_id} {status.status}")
|
||||||
|
def on_stock_asset(self, asset):
|
||||||
|
logger.info(f"资金变动: {asset.account_id} {asset.cash} {asset.total_asset}")
|
||||||
|
def on_stock_order(self, order):
|
||||||
|
logger.info(f"委托回报: {order.stock_code} {order.order_status} {order.order_sysid}")
|
||||||
|
def on_stock_trade(self, trade):
|
||||||
|
logger.info(f"成交变动: {trade.account_id} {trade.stock_code} {trade.order_id}")
|
||||||
|
def on_stock_position(self, position):
|
||||||
|
logger.info(f"持仓变动: {position.stock_code} {position.volume}")
|
||||||
|
def on_order_error(self, order_error):
|
||||||
|
logger.error(f"委托失败: {order_error.order_id} {order_error.error_id} {order_error.error_msg}")
|
||||||
|
def on_cancel_error(self, cancel_error):
|
||||||
|
logger.error(f"撤单失败: {cancel_error.order_id} {cancel_error.error_id} {cancel_error.error_msg}")
|
||||||
|
def on_order_stock_async_response(self, response):
|
||||||
|
logger.info(f"异步下单反馈: {response.order_id}")
|
||||||
|
def on_cancel_order_stock_async_response(self, response):
|
||||||
|
logger.info(f"异步撤单反馈: {response.order_id}")
|
||||||
|
def on_smt_appointment_async_response(self, response):
|
||||||
|
logger.info(f"约券异步反馈: {response.seq}")
|
||||||
|
|
||||||
|
class XtTrader:
|
||||||
|
def __init__(self):
|
||||||
|
self._ACCOUNT = Config.XT_ACCOUNT
|
||||||
|
self._PATH = Config.XT_PATH
|
||||||
|
self._SESSION_ID = random.randint(100000, 99999999)
|
||||||
|
self._account_type = os.environ.get("XT_ACCOUNT_TYPE", "STOCK")
|
||||||
|
self._strategy_name = os.environ.get("XT_STRATEGY_NAME", "xt_strategy")
|
||||||
|
self._remark = os.environ.get("XT_REMARK", "remark")
|
||||||
|
self._callback = MyXtQuantTraderCallback()
|
||||||
|
self.xt_trader = XtQuantTrader(self._PATH, self._SESSION_ID)
|
||||||
|
self.account = StockAccount(self._ACCOUNT, self._account_type)
|
||||||
|
self.xt_trader.register_callback(self._callback)
|
||||||
|
self.started = False
|
||||||
|
self.connected = False
|
||||||
|
self.subscribed = False
|
||||||
|
|
||||||
|
def login(self):
|
||||||
|
if not self.started:
|
||||||
|
self.xt_trader.start()
|
||||||
|
self.started = True
|
||||||
|
if not self.connected:
|
||||||
|
result = self.xt_trader.connect()
|
||||||
|
self.connected = (result == 0)
|
||||||
|
if not self.subscribed:
|
||||||
|
result = self.xt_trader.subscribe(self.account)
|
||||||
|
self.subscribed = (result == 0)
|
||||||
|
return self.connected and self.subscribed
|
||||||
|
|
||||||
|
def logout(self):
|
||||||
|
if self.started:
|
||||||
|
self.xt_trader.stop()
|
||||||
|
self.started = False
|
||||||
|
self.connected = False
|
||||||
|
self.subscribed = False
|
||||||
|
|
||||||
|
def get_balance(self):
|
||||||
|
asset = self.xt_trader.query_stock_asset(self.account)
|
||||||
|
if asset:
|
||||||
|
return {
|
||||||
|
"account_id": asset.account_id,
|
||||||
|
"cash": asset.cash,
|
||||||
|
"frozen_cash": asset.frozen_cash,
|
||||||
|
"market_value": asset.market_value,
|
||||||
|
"total_asset": asset.total_asset
|
||||||
|
}
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_positions(self):
|
||||||
|
positions = self.xt_trader.query_stock_positions(self.account)
|
||||||
|
if positions:
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"account_id": p.account_id,
|
||||||
|
"stock_code": p.stock_code,
|
||||||
|
"volume": p.volume,
|
||||||
|
"can_use_volume": p.can_use_volume,
|
||||||
|
"open_price": p.open_price,
|
||||||
|
"market_value": p.market_value,
|
||||||
|
"frozen_volume": p.frozen_volume,
|
||||||
|
"on_road_volume": p.on_road_volume,
|
||||||
|
"yesterday_volume": p.yesterday_volume,
|
||||||
|
"avg_price": p.avg_price
|
||||||
|
} for p in positions
|
||||||
|
]
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_today_trades(self):
|
||||||
|
trades = self.xt_trader.query_stock_trades(self.account)
|
||||||
|
if trades:
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"account_id": t.account_id,
|
||||||
|
"stock_code": t.stock_code,
|
||||||
|
"order_id": t.order_id,
|
||||||
|
"traded_id": t.traded_id,
|
||||||
|
"traded_time": t.traded_time,
|
||||||
|
"traded_price": t.traded_price,
|
||||||
|
"traded_volume": t.traded_volume,
|
||||||
|
"traded_amount": t.traded_amount
|
||||||
|
} for t in trades
|
||||||
|
]
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_today_entrust(self):
|
||||||
|
orders = self.xt_trader.query_stock_orders(self.account)
|
||||||
|
if orders:
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"account_id": o.account_id,
|
||||||
|
"stock_code": o.stock_code,
|
||||||
|
"order_id": o.order_id,
|
||||||
|
"order_time": o.order_time,
|
||||||
|
"order_type": o.order_type,
|
||||||
|
"order_volume": o.order_volume,
|
||||||
|
"price_type": o.price_type,
|
||||||
|
"price": o.price,
|
||||||
|
"traded_volume": o.traded_volume,
|
||||||
|
"traded_price": o.traded_price,
|
||||||
|
"order_status": o.order_status,
|
||||||
|
"status_msg": o.status_msg
|
||||||
|
} for o in orders
|
||||||
|
]
|
||||||
|
return []
|
||||||
|
|
||||||
|
def buy(self, code, price, amount):
|
||||||
|
order_id = self.xt_trader.order_stock(
|
||||||
|
self.account, code, xtconstant.STOCK_BUY, amount, xtconstant.FIX_PRICE, price, self._strategy_name, self._remark
|
||||||
|
)
|
||||||
|
return {"order_id": order_id}
|
||||||
|
|
||||||
|
def sell(self, code, price, amount):
|
||||||
|
order_id = self.xt_trader.order_stock(
|
||||||
|
self.account, code, xtconstant.STOCK_SELL, amount, xtconstant.FIX_PRICE, price, self._strategy_name, self._remark
|
||||||
|
)
|
||||||
|
return {"order_id": order_id}
|
||||||
|
|
||||||
|
def cancel(self, entrust_no):
|
||||||
|
# 撤单接口需要订单编号
|
||||||
|
result = self.xt_trader.cancel_order_stock(self.account, int(entrust_no))
|
||||||
|
return {"cancel_result": result}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
trader = XtTrader()
|
||||||
|
trader.login()
|
||||||
|
logger.info(f"账户余额: {trader.get_balance()}")
|
||||||
|
logger.info(f"持仓: {trader.get_positions()}")
|
||||||
|
logger.info(f"当日成交: {trader.get_today_trades()}")
|
||||||
|
logger.info(f"当日委托: {trader.get_today_entrust()}")
|
27
src/xtquant/__init__.py
Normal file
27
src/xtquant/__init__.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
#coding: utf-8
|
||||||
|
|
||||||
|
__version__ = "xtquant"
|
||||||
|
|
||||||
|
|
||||||
|
def check_for_update(package_name):
|
||||||
|
import requests
|
||||||
|
from pkg_resources import get_distribution
|
||||||
|
# 获取当前安装的版本
|
||||||
|
current_version = get_distribution(package_name).version
|
||||||
|
# 查询PyPI的API获取最新版本信息
|
||||||
|
response = requests.get(f"https://pypi.org/pypi/{package_name}/json", timeout = 10)
|
||||||
|
if response.status_code == 200:
|
||||||
|
latest_version = response.json()['info']['version']
|
||||||
|
if current_version != latest_version:
|
||||||
|
print(f"xtquant{latest_version}已经发布,前往 http://dict.thinktrader.net/nativeApi/download_xtquant.html 查看更新说明\n")
|
||||||
|
else:
|
||||||
|
print("xtquant文档地址:http://dict.thinktrader.net/nativeApi/start_now.html")
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
check_for_update("xtquant")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
185
src/xtquant/config/MarketTime.ini
Normal file
185
src/xtquant/config/MarketTime.ini
Normal file
@ -0,0 +1,185 @@
|
|||||||
|
[IF]
|
||||||
|
tradetime=093000,113000,130000,150000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=113000,150000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[SH]
|
||||||
|
tradetime=093000,113000,130000,150000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=113000,150000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
bondsRepuropentime=093000,130000
|
||||||
|
bondsRepuroclosetime=113000,153000
|
||||||
|
|
||||||
|
[SZ]
|
||||||
|
tradetime=093000,113000,130000,150000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=113000,150000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
bondsRepuropentime=093000,130000
|
||||||
|
bondsRepuroclosetime=113000,153000
|
||||||
|
|
||||||
|
[SHO]
|
||||||
|
tradetime=093000,113000,130000,150000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=113000,150000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[SZO]
|
||||||
|
tradetime=093000,113000,130000,150000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=113000,150000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[HGT]
|
||||||
|
tradetime=093000,120000,130000,161000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=120000,161000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[SGT]
|
||||||
|
tradetime=093000,120000,130000,161000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=120000,161000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[NEEQ]
|
||||||
|
tradetime=093000,113000,130000,150000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=113000,150000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[SF]
|
||||||
|
tradetime=-30000,23000,90000,101500,103000,113000,133000,150000
|
||||||
|
opentime=090000,103000,133000
|
||||||
|
closetime=101500,113000,150000
|
||||||
|
nightopentime=210000,000000
|
||||||
|
nightclosetime=235959,023000
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[DF]
|
||||||
|
tradetime=-30000,-10000,90000,101500,103000,113000,133000,150000
|
||||||
|
opentime=090000,103000,133000
|
||||||
|
closetime=101500,113000,150000
|
||||||
|
nightopentime=210000
|
||||||
|
nightclosetime=230000
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[ZF]
|
||||||
|
tradetime=-30000,-10000,90000,101500,103000,113000,133000,150000
|
||||||
|
opentime=090000,103000,133000
|
||||||
|
closetime=101500,113000,150000
|
||||||
|
nightopentime=210000
|
||||||
|
nightclosetime=230000
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[YSWP]
|
||||||
|
tradetime=-76000,23000,90000,113000,130000,150000
|
||||||
|
opentime=090000
|
||||||
|
closetime=155500
|
||||||
|
nightopentime=164000,000000
|
||||||
|
nightclosetime=235959,020000
|
||||||
|
indexopentime=093000,130000
|
||||||
|
indexclosetime=113000,150000
|
||||||
|
|
||||||
|
[INE]
|
||||||
|
tradetime=-30000,23000,90000,101500,103000,113000,133000,150000
|
||||||
|
opentime=090000,103000,133000
|
||||||
|
closetime=101500,113000,150000
|
||||||
|
nightopentime=210000,000000
|
||||||
|
nightclosetime=235959,023000
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[SI]
|
||||||
|
tradetime=093000,113000,130000,150000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=113000,150000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[BKZS]
|
||||||
|
tradetime=093000,113000,130000,150000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=113000,150000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
futureindexopentime=090000,103000,133000
|
||||||
|
futureindexclosetime=101500,113000,150000
|
||||||
|
|
||||||
|
[HK]
|
||||||
|
tradetime=093000,120000,130000,160000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=120000,160000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[]
|
||||||
|
tradetime=000000,240000
|
||||||
|
opentime=000000
|
||||||
|
closetime=235959
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[WP]
|
||||||
|
tradetime=060000,290000
|
||||||
|
opentime=060000
|
||||||
|
closetime=290000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[GF]
|
||||||
|
tradetime=090000,101500,103000,113000,133000,150000
|
||||||
|
opentime=090000,103000,133000
|
||||||
|
closetime=101500,113000,150000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
||||||
|
|
||||||
|
[BJ]
|
||||||
|
tradetime=093000,113000,130000,150000
|
||||||
|
opentime=093000,130000
|
||||||
|
closetime=113000,150000
|
||||||
|
nightopentime=
|
||||||
|
nightclosetime=
|
||||||
|
indexopentime=
|
||||||
|
indexclosetime=
|
7
src/xtquant/config/StockInfo.lua
Normal file
7
src/xtquant/config/StockInfo.lua
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
--[[--------------------------------------------------------------
|
||||||
|
股票/期货代码的基本信息
|
||||||
|
cnname : 股票/期货中文名称
|
||||||
|
----------------------------------------------------------------]]
|
||||||
|
StockBasicInfo = {
|
||||||
|
{label = "if1209", cnname = "股指1209", blocks = {"某板块"},},
|
||||||
|
}
|
7
src/xtquant/config/captial_structure_1.ini
Normal file
7
src/xtquant/config/captial_structure_1.ini
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
股本表|CAPITALSTRUCTURE
|
||||||
|
变动日期|m_timetag|changeDate
|
||||||
|
公告日期|m_anntime|declareDate
|
||||||
|
总股本|total_capital|totalCapital
|
||||||
|
已上市流通A股|circulating_capital|floatCapitalA
|
||||||
|
其他流通股份|restrict_circulating_capital|otherCapital
|
||||||
|
自由流通股份|freeFloatCapital|freeFloatCapital
|
109
src/xtquant/config/cashflow_new_1.ini
Normal file
109
src/xtquant/config/cashflow_new_1.ini
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
现金流量表|ASHARECASHFLOW
|
||||||
|
披露日期|m_anntime|m_annTime
|
||||||
|
截止日期|m_timetag|m_endTime
|
||||||
|
收到原保险合同保费取得的现金|cash_received_ori_ins_contract_pre|m_cashReceivedOriInsContractPre
|
||||||
|
收到再保险业务现金净额|net_cash_received_rei_ope|m_netCashReceivedReiOpe
|
||||||
|
保户储金及投资款净增加额|net_increase_insured_funds|m_netIncreaseInsuredFunds
|
||||||
|
处置交易性金融资产净增加额|net_increase_in_disposal|m_netIncreaseInDisposal
|
||||||
|
收取利息、手续费及佣金的现金|cash_for_interest|m_cashForInterest
|
||||||
|
回购业务资金净增加额|net_increase_in_repurchase_funds|m_netIncreaseInRepurchaseFunds
|
||||||
|
支付原保险合同赔付款项的现金|cash_for_payment_original_insurance|m_cashForPaymentOriginalInsurance
|
||||||
|
支付保单红利的现金|cash_payment_policy_dividends|m_cashPaymentPolicyDividends
|
||||||
|
处置子公司及其他收到的现金|disposal_other_business_units|m_disposalOtherBusinessUnits
|
||||||
|
减少质押和定期存款所收到的现金|cash_received_from_pledges|m_cashReceivedFromPledges
|
||||||
|
投资所支付的现金|cash_paid_for_investments|m_cashPaidForInvestments
|
||||||
|
质押贷款净增加额|net_increase_in_pledged_loans|m_netIncreaseInPledgedLoans
|
||||||
|
取得子公司及其他营业单位支付的现金净额|cash_paid_by_subsidiaries|m_cashPaidBySubsidiaries
|
||||||
|
增加质押和定期存款所支付的现金|increase_in_cash_paid|m_increaseInCashPaid
|
||||||
|
其中子公司吸收现金|cass_received_sub_abs|m_cassReceivedSubAbs
|
||||||
|
其中:子公司支付给少数股东的股利、利润|cass_received_sub_investments|m_cassReceivedSubInvestments
|
||||||
|
少数股东损益|minority_shareholder_profit_loss|m_minorityShareholderProfitLoss
|
||||||
|
未确认的投资损失|unrecognized_investment_losses|m_unrecognizedInvestmentLosses
|
||||||
|
递延收益增加(减:减少)|ncrease_deferred_income|m_ncreaseDeferredIncome
|
||||||
|
预计负债|projected_liability|m_projectedLiability
|
||||||
|
经营性应付项目的增加|increase_operational_payables|m_increaseOperationalPayables
|
||||||
|
已完工尚未结算款的减少(减:增加)|reduction_outstanding_amounts_less|m_reductionOutstandingAmountsLess
|
||||||
|
已结算尚未完工款的增加(减:减少)|reduction_outstanding_amounts_more|m_reductionOutstandingAmountsMore
|
||||||
|
销售商品、提供劳务收到的现金|goods_sale_and_service_render_cash|m_goodsSaleAndServiceRenderCash
|
||||||
|
客户存款和同业存放款项净增加额|net_incr_dep_cob|m_netIncrDepCob
|
||||||
|
向中央银行借款净增加额(万元|net_incr_loans_central_bank|m_netIncrLoansCentralBank
|
||||||
|
向其他金融机构拆入资金净增加额|net_incr_fund_borr_ofi|m_netIncrFundBorrOfi
|
||||||
|
拆入资金净增加额|net_incr_fund_borr_ofi|m_netIncrFundBorrOfi
|
||||||
|
收到的税费与返还|tax_levy_refund|m_taxLevyRefund
|
||||||
|
投资支付的现金|cash_paid_invest|m_cashPaidInvest
|
||||||
|
收到的其他与经营活动有关的现金|other_cash_recp_ral_oper_act|m_otherCashRecpRalOperAct
|
||||||
|
经营活动现金流入小计|stot_cash_inflows_oper_act|m_stotCashInflowsOperAct
|
||||||
|
购买商品、接受劳务支付的现金|goods_and_services_cash_paid|m_goodsAndServicesCashPaid
|
||||||
|
客户贷款及垫款净增加额|net_incr_clients_loan_adv|m_netIncrClientsLoanAdv
|
||||||
|
存放中央银行和同业款项净增加额|net_incr_dep_cbob|m_netIncrDepCbob
|
||||||
|
支付利息、手续费及佣金的现金|handling_chrg_paid|m_handlingChrgPaid
|
||||||
|
支付给职工以及为职工支付的现金|cash_pay_beh_empl|m_cashPayBehEmpl
|
||||||
|
支付的各项税费|pay_all_typ_tax|m_payAllTypTax
|
||||||
|
支付其他与经营活动有关的现金|other_cash_pay_ral_oper_act|m_otherCashPayRalOperAct
|
||||||
|
经营活动现金流出小计|stot_cash_outflows_oper_act|m_stotCashOutflowsOperAct
|
||||||
|
经营活动产生的现金流量净额|net_cash_flows_oper_act|m_netCashFlowsOperAct
|
||||||
|
收回投资所收到的现金|cash_recp_disp_withdrwl_invest|m_cashRecpDispWithdrwlInvest
|
||||||
|
取得投资收益所收到的现金|cash_recp_return_invest|m_cashRecpReturnInvest
|
||||||
|
处置固定资产、无形资产和其他长期投资收到的现金|net_cash_recp_disp_fiolta|m_netCashecpDispFiolta
|
||||||
|
收到的其他与投资活动有关的现金|other_cash_recp_ral_inv_act|m_otherCashRecpRalInvAct
|
||||||
|
投资活动现金流入小计|stot_cash_inflows_inv_act|m_stotCashInflowsInvAct
|
||||||
|
购建固定资产、无形资产和其他长期投资支付的现金|cash_pay_acq_const_fiolta|m_cashPayAcqConstFiolta
|
||||||
|
投资活动现金流出小计|stot_cash_outflows_inv_act|m_stotCashOutflowsInvAct
|
||||||
|
投资活动产生的现金流量净额|net_cash_flows_inv_acm_netCashFlowsInvAct|m_netCashFlowsInvAct
|
||||||
|
吸收投资收到的现金|cash_recp_cap_contrib|m_cashRecpCapContrib
|
||||||
|
取得借款收到的现金|cash_recp_borrow|m_cashRecpBorrow
|
||||||
|
发行债券收到的现金|proc_issue_bonds|m_procIssueBonds
|
||||||
|
收到其他与筹资活动有关的现金|other_cash_recp_ral_fnc_act|m_otherCashRecpRalFncAct
|
||||||
|
筹资活动现金流入小计|stot_cash_inflows_fnc_act|m_stotCashInflowsFncAct
|
||||||
|
偿还债务支付现金|cash_prepay_amt_borr|m_cashPrepayAmtBorr
|
||||||
|
分配股利、利润或偿付利息支付的现金|cash_pay_dist_dpcp_int_exp|m_cashPayDistDpcpIntExp
|
||||||
|
支付其他与筹资的现金|other_cash_pay_ral_fnc_act|m_otherCashPayRalFncAct
|
||||||
|
筹资活动现金流出小计|stot_cash_outflows_fnc_act|m_stotCashOutflowsFncAct
|
||||||
|
筹资活动产生的现金流量净额|net_cash_flows_fnc_act|m_netCashFlowsFncAct
|
||||||
|
汇率变动对现金的影响|eff_fx_flu_cash|m_effFxFluCash
|
||||||
|
现金及现金等价物净增加额|net_incr_cash_cash_equ|m_netIncrCashCashEqu
|
||||||
|
期初现金及现金等价物余额|cash_cash_equ_beg_period|m_cashCashEquBegPeriod
|
||||||
|
期末现金及现金等价物余额|cash_cash_equ_end_period|m_cashCashEquEndPeriod
|
||||||
|
净利润|net_profit|m_netProfit
|
||||||
|
资产减值准备|plus_prov_depr_assets|m_plusProvDeprAssets
|
||||||
|
固定资产折旧、油气资产折耗、生产性物资折旧|depr_fa_coga_dpba|m_deprFaCogaDpba
|
||||||
|
无形资产摊销|amort_intang_assets|m_amortIntangAssets
|
||||||
|
长期待摊费用摊销|amort_lt_deferred_exp|m_amortLtDeferredExp
|
||||||
|
待摊费用的减少|decr_deferred_exp|m_decrDeferredExp
|
||||||
|
预提费用的增加|incr_acc_exp|m_incrAccExp
|
||||||
|
处置固定资产、无形资产和其他长期资产的损失|loss_disp_fiolta|m_lossDispFiolta
|
||||||
|
固定资产报废损失|loss_scr_fa|m_lossScrFa
|
||||||
|
公允价值变动损失|loss_fv_chg|m_lossFvChg
|
||||||
|
财务费用|fin_exp|m_finExp
|
||||||
|
投资损失|invest_loss|m_investLoss
|
||||||
|
递延所得税资产减少|decr_deferred_inc_tax_assets|m_decrDeferredIncTaxAssets
|
||||||
|
递延所得税负债增加|incr_deferred_inc_tax_liab|m_incrDeferredIncTaxLiab
|
||||||
|
存货的减少|decr_inventories|m_decrInventories
|
||||||
|
经营性应收项目的减少|decr_oper_payable|m_decrOperPayable
|
||||||
|
其他|others|m_others
|
||||||
|
经营活动产生现金流量净额|im_net_cash_flows_oper_act|m_imNetCashFlowsOperAct
|
||||||
|
债务转为资本|conv_debt_into_cap|m_convDebtIntoCap
|
||||||
|
一年内到期的可转换公司债券|conv_corp_bonds_due_within_1y|m_convCorpBondsDueWithin1y
|
||||||
|
融资租入固定资产|fa_fnc_leases|m_faFncLeases
|
||||||
|
现金的期末余额|end_bal_cash|m_endBalCash
|
||||||
|
现金的期初余额|less_beg_bal_cash|m_lessBegBalCash
|
||||||
|
现金等价物的期末余额|plus_end_bal_cash_equ|m_plusEndBalCashEqu
|
||||||
|
现金等价物的期初余额|less_beg_bal_cash_equ|m_lessBegBalCashEqu
|
||||||
|
现金及现金等价物的净增加额|im_net_incr_cash_cash_equ|m_imNetIncrCashCashEqu
|
||||||
|
|
||||||
|
销售商品、提供劳务收到的现金|m_cashSellingProvidingServices
|
||||||
|
拆出资金净减少额|m_netDecreaseUnwindingFunds
|
||||||
|
买入返售款项净减少额|m_netReductionPurchaseRebates
|
||||||
|
存放中央银行和同业款项净增加额|m_netIncreaseDepositsBanks
|
||||||
|
支付再保业务现金净额|m_netCashReinsuranceBusiness
|
||||||
|
保户储金及投资款净减少额|m_netReductionDeposInveFunds
|
||||||
|
拆出资金净增加额|m_netIncreaseUnwindingFunds
|
||||||
|
拆入资金净减少额|m_netReductionAmountBorrowedFunds
|
||||||
|
卖出回购款项净减少额|m_netReductionSaleRepurchaseProceeds
|
||||||
|
投资支付的现金|m_investmentPaidInCash
|
||||||
|
支付其他与投资活动有关的现金|m_paymentOtherCashRelated
|
||||||
|
投资活动产生的现金流出小计|m_cashOutFlowsInvesactivities
|
||||||
|
吸收权益性投资收到的现金|m_absorbCashEquityInv
|
||||||
|
其他对现金的影响|m_otherImpactsOnCash
|
||||||
|
经营性应收项目的增加|m_addOperatingReceivableItems
|
||||||
|
|
446
src/xtquant/config/config.lua
Normal file
446
src/xtquant/config/config.lua
Normal file
@ -0,0 +1,446 @@
|
|||||||
|
local __config_lua_path = debug.getinfo(1, "S").source:sub(2)
|
||||||
|
local __config_lua_dir = __config_lua_path:match("(.-)[\\/][^\\/]-$") .. "/"
|
||||||
|
local function testDofile(path)
|
||||||
|
local abs_path = __config_lua_dir .. path
|
||||||
|
local file = io.open(abs_path, "r")
|
||||||
|
if file ~= nil then
|
||||||
|
dofile(abs_path)
|
||||||
|
return true
|
||||||
|
else
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
g_is_address_from_daemon = 0
|
||||||
|
g_is_server = true
|
||||||
|
g_is_report_logcenter = false
|
||||||
|
g_system_tag = ""
|
||||||
|
g_is_topology_logcenter = false
|
||||||
|
|
||||||
|
g_ftCategory = {
|
||||||
|
["2101"]="ag",
|
||||||
|
["2102"]="al",
|
||||||
|
["2103"]="au",
|
||||||
|
["2104"]="bu",
|
||||||
|
["2105"]="cu",
|
||||||
|
["2106"]="fu",
|
||||||
|
["2107"]="hc",
|
||||||
|
["2108"]="pb",
|
||||||
|
["2109"]="rb",
|
||||||
|
["2110"]="ru",
|
||||||
|
["2111"]="wr",
|
||||||
|
["2112"]="zn",
|
||||||
|
["2113"]="ni",
|
||||||
|
["2114"]="sn",
|
||||||
|
|
||||||
|
["2151"]="IF",
|
||||||
|
["2152"]="T",
|
||||||
|
["2153"]="TF",
|
||||||
|
["2154"]="IC",
|
||||||
|
["2155"]="IH",
|
||||||
|
|
||||||
|
["2201"]="SP a&a",
|
||||||
|
["2202"]="SP b&b",
|
||||||
|
["2203"]="SP bb&bb",
|
||||||
|
["2204"]="SP c&c",
|
||||||
|
["2205"]="SP cs&cs",
|
||||||
|
["2206"]="SP fb&fb",
|
||||||
|
["2207"]="SP i&i",
|
||||||
|
["2208"]="SP j&j",
|
||||||
|
["2209"]="SP jd&jd",
|
||||||
|
["2210"]="SP jm&jm",
|
||||||
|
["2211"]="SP l&l",
|
||||||
|
["2212"]="SP m&m",
|
||||||
|
["2213"]="SP p&p",
|
||||||
|
["2214"]="SP pp&pp",
|
||||||
|
["2215"]="SP v&v",
|
||||||
|
["2216"]="SP y&y",
|
||||||
|
["2217"]="SPC a&m",
|
||||||
|
["2218"]="SPC c&cs",
|
||||||
|
["2219"]="SPC fb&bb",
|
||||||
|
["2220"]="SPC i&j",
|
||||||
|
["2221"]="SPC i&jm",
|
||||||
|
["2222"]="SPC j&jm",
|
||||||
|
["2223"]="SPC l&pp",
|
||||||
|
["2224"]="SPC l&v",
|
||||||
|
["2225"]="SPC v&pp",
|
||||||
|
["2226"]="SPC y&p",
|
||||||
|
["2227"]="a",
|
||||||
|
["2228"]="b",
|
||||||
|
["2229"]="bb",
|
||||||
|
["2230"]="c",
|
||||||
|
["2231"]="cs",
|
||||||
|
["2232"]="fb",
|
||||||
|
["2233"]="i",
|
||||||
|
["2234"]="j",
|
||||||
|
["2235"]="jd",
|
||||||
|
["2236"]="jm",
|
||||||
|
["2237"]="l",
|
||||||
|
["2238"]="m",
|
||||||
|
["2239"]="p",
|
||||||
|
["2240"]="pp",
|
||||||
|
["2241"]="v",
|
||||||
|
["2242"]="y",
|
||||||
|
|
||||||
|
["2251"]="CF",
|
||||||
|
["2252"]="FG",
|
||||||
|
["2253"]="IPS SF&SM",
|
||||||
|
["2254"]="JR",
|
||||||
|
["2255"]="LR",
|
||||||
|
["2256"]="MA",
|
||||||
|
["2257"]="ME",
|
||||||
|
["2258"]="OI",
|
||||||
|
["2259"]="PM",
|
||||||
|
["2260"]="RI",
|
||||||
|
["2261"]="RM",
|
||||||
|
["2262"]="RS",
|
||||||
|
["2263"]="SF",
|
||||||
|
["2264"]="SM",
|
||||||
|
["2265"]="SPD CF&CF",
|
||||||
|
["2266"]="SPD FG&FG",
|
||||||
|
["2267"]="SPD JR&JR",
|
||||||
|
["2268"]="SPD LR&LR",
|
||||||
|
["2269"]="SPD MA&MA",
|
||||||
|
["2270"]="SPD ME&ME",
|
||||||
|
["2271"]="SPD OI&OI",
|
||||||
|
["2272"]="SPD PM&PM",
|
||||||
|
["2273"]="SPD RI&RI",
|
||||||
|
["2274"]="SPD RM&RM",
|
||||||
|
["2275"]="SPD RS&RS",
|
||||||
|
["2276"]="SPD SF&SF",
|
||||||
|
["2277"]="SPD SM&SM",
|
||||||
|
["2278"]="SPD SR&SR",
|
||||||
|
["2279"]="SPD TA&TA",
|
||||||
|
["2280"]="SPD TC&TC",
|
||||||
|
["2281"]="SPD WH&WH",
|
||||||
|
["2282"]="SR",
|
||||||
|
["2283"]="TA",
|
||||||
|
["2284"]="TC",
|
||||||
|
["2285"]="WH"
|
||||||
|
}
|
||||||
|
|
||||||
|
testDofile("../config/platform.lua")
|
||||||
|
testDofile("../config/serverEnv.lua")
|
||||||
|
if testDofile("../config/clientEnv.lua") or testDofile("../config/itsmClientEnv.lua") then g_is_server = false end
|
||||||
|
testDofile("../config/env.lua")
|
||||||
|
testDofile("../config/xtdaemon.lua")
|
||||||
|
testDofile("../config/clientEnv.lua")
|
||||||
|
testDofile("../config/itsmClientEnv.lua")
|
||||||
|
testDofile("../config/serverEnv.lua")
|
||||||
|
testDofile("../config/fairplaytables.lua")
|
||||||
|
testDofile("../config/configHelper.lua")
|
||||||
|
testDofile("../config/xtstocktype.lua")
|
||||||
|
|
||||||
|
function getFutureOrderLimits()
|
||||||
|
return table2json({content = g_future_order_limits})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getFuturePlatforms()
|
||||||
|
return table2json({content = g_future_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getStockPlatforms()
|
||||||
|
return table2json({content = g_stock_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getCreditPlatforms()
|
||||||
|
return table2json({content = g_credit_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getHGTPlatforms()
|
||||||
|
return table2json({content = g_hgt_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getHGTQuotePlatforms()
|
||||||
|
return table2json({content = g_hgt_quote_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getFutureQuotePlatforms()
|
||||||
|
return table2json({content = g_future_quote_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getStockQuotePlatforms()
|
||||||
|
return table2json({content = g_stock_quote_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getStockOptionPlatforms()
|
||||||
|
return table2json({content = g_stockoption_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getStockOptionQuotePlatforms()
|
||||||
|
return table2json({content = g_stockoption_quote_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getNew3BoardPlatforms()
|
||||||
|
return table2json({content = g_new3board_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getNew3BoardQuotePlatforms()
|
||||||
|
return table2json({content = g_new3board_quote_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getGoldPlatforms()
|
||||||
|
return table2json({content = g_gold_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getGoldQuotePlatforms()
|
||||||
|
return table2json({content = g_gold_quote_platforms})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getBanks()
|
||||||
|
return table2json({content = g_banks})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getTTServiceGlobalConfig()
|
||||||
|
return table2json(g_ttservice_global_config)
|
||||||
|
end
|
||||||
|
|
||||||
|
function getMysqlConfig()
|
||||||
|
return table2json(g_mysql_config)
|
||||||
|
end
|
||||||
|
|
||||||
|
function getMysqlConfigWhiteListFlowControl()
|
||||||
|
return table2json(g_mysql_config_white_list_flow_control)
|
||||||
|
end
|
||||||
|
|
||||||
|
function getRabbitMqConfig()
|
||||||
|
return table2json(g_rabbitMq_config)
|
||||||
|
end
|
||||||
|
|
||||||
|
function getBatchOrderConfig()
|
||||||
|
return table2json(g_batchOrder_config)
|
||||||
|
end
|
||||||
|
|
||||||
|
function getPlatformInfo()
|
||||||
|
return getConfigByAppName("calcConfigEnv")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getSystemTag()
|
||||||
|
return g_system_tag
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 获取股票分类
|
||||||
|
function getXtStockType()
|
||||||
|
return toIni(g_stocktype_info)
|
||||||
|
end
|
||||||
|
|
||||||
|
function getBrokerAddressWithReInit(brokerType, platformId, brokerId, accountId, reInit)
|
||||||
|
if reInit then print("true") else print("false") end
|
||||||
|
local key = "xtbroker_" .. brokerType .. "_" .. platformId .. "_" ..brokerId
|
||||||
|
local address = g_defaultPorts[key]
|
||||||
|
if address == nil then
|
||||||
|
key = "xtbroker_" .. brokerType .. "_" .. platformId
|
||||||
|
address = g_defaultPorts[key]
|
||||||
|
if address == nil then
|
||||||
|
key = "xtbroker_" .. brokerType
|
||||||
|
address = g_defaultPorts[key]
|
||||||
|
if address == nil then
|
||||||
|
key = "xtbroker"
|
||||||
|
address = g_defaultPorts[key]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if address == nil then
|
||||||
|
if reInit then
|
||||||
|
g_brokerPorts = genBrokerInfos()
|
||||||
|
mergeBrokerInfos(g_brokerPorts)
|
||||||
|
address = getBrokerAddressWithReInit(brokerType, platformId, brokerId, accountId, false)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
if address == nil then address = "" end
|
||||||
|
return address
|
||||||
|
end
|
||||||
|
|
||||||
|
function getBrokerAddress(brokerType, platformId, brokerId, accountId)
|
||||||
|
return getBrokerAddressWithReInit(brokerType, platformId, brokerId, accountId, true)
|
||||||
|
end
|
||||||
|
|
||||||
|
-- tag即platformId
|
||||||
|
function getBrokerConfig(tag)
|
||||||
|
return getConfigByAppName("xtbroker", {tag})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getSfitMdquoterConfig(tag)
|
||||||
|
return getConfigByAppName("sfitMdquoter", {tag})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtQuoterConfig()
|
||||||
|
return getConfigByAppName("xtquoter")
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 取TTService配置
|
||||||
|
function getXtServiceConfig()
|
||||||
|
return getConfigByAppName("xtservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 取交易服务配置
|
||||||
|
function getXtTraderServiceConfig()
|
||||||
|
return getConfigByAppName("xttraderservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 取风控服务配置
|
||||||
|
function getXtRiskControlConfig()
|
||||||
|
return getConfigByAppName("xtriskcontrol")
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 取MysqlService配置
|
||||||
|
function getXtMysqlServiceConfig()
|
||||||
|
return getConfigByAppName("xtmysqlservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtSourceConfig()
|
||||||
|
return getConfigByAppName("xtsource")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtTaskConfig(tag)
|
||||||
|
return getConfigByAppName("xttask", {tag})
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtMobileServiceConfig()
|
||||||
|
return getConfigByAppName("xtmobileservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getParam(param)
|
||||||
|
return table2json(_G[param])
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtClientConfig()
|
||||||
|
return getConfigByAppName("xtclient")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtMiniQmtConfig()
|
||||||
|
return getConfigByAppName("xtminiqmt")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtMiniQuoteConfig()
|
||||||
|
return getConfigByAppName("xtminiquote")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtQuantServiceConfig()
|
||||||
|
return getConfigByAppName("xtquantservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtItsmClientConfig()
|
||||||
|
return getConfigByAppName("xtitsmclient")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtItsmServiceConfig()
|
||||||
|
return getConfigByAppName("xtitsmservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtQueryBrokerConfig()
|
||||||
|
return getConfigByAppName("xtquerybroker")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtOtpConfig()
|
||||||
|
return getConfigByAppName("xtotpservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtLogCenterConfig()
|
||||||
|
return getConfigByAppName("xtlogcenter")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getCtpServiceConfig()
|
||||||
|
return getConfigByAppName("xtctpservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtApiServiceConfig()
|
||||||
|
return getConfigByAppName("xtapiservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtClearServiceConfig()
|
||||||
|
return getConfigByAppName("xtclearservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getDelegateServiceConfig()
|
||||||
|
return getConfigByAppName("xtdelegateservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getFtProduct()
|
||||||
|
return table2json(g_ftCategory)
|
||||||
|
end
|
||||||
|
|
||||||
|
function getAlgoAdapterServiceConfig()
|
||||||
|
return getConfigByAppName("xtalgoadapterservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtFairPlayServiceConfig(tag)
|
||||||
|
return getConfigByAppName("xtfairplayservice", {tag} )
|
||||||
|
end
|
||||||
|
|
||||||
|
function getXtNonStandardServiceConfig()
|
||||||
|
return getConfigByAppName("xtnonstandardservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getModules()
|
||||||
|
modules = getModulesHelper()
|
||||||
|
return table2json(modules["modules"])
|
||||||
|
end
|
||||||
|
--获取客服经理的信息
|
||||||
|
function getCustomerServiceConfig()
|
||||||
|
return getConfigByAppName("customerservice")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getBrokerProxy()
|
||||||
|
return getConfigByAppName("xtbrokerproxy")
|
||||||
|
end
|
||||||
|
|
||||||
|
function getHttpUrlConfig()
|
||||||
|
return getConfigByAppName("xthttpurlconfig")
|
||||||
|
end
|
||||||
|
|
||||||
|
--require "std"
|
||||||
|
--require "io"
|
||||||
|
local function main()
|
||||||
|
if arg == nil then
|
||||||
|
return ""
|
||||||
|
end
|
||||||
|
|
||||||
|
if arg[1] ~= nil then
|
||||||
|
local d = _G[ arg[1] ]
|
||||||
|
if d ~= nil then
|
||||||
|
if type(d) == "function" then
|
||||||
|
local newArg = {}
|
||||||
|
for i = 1, 100 do
|
||||||
|
if arg[1 + i] ~= nil then
|
||||||
|
table.insert(newArg, arg[1 + i])
|
||||||
|
else
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return d(unpack(newArg))
|
||||||
|
elseif type(d) == "table" then
|
||||||
|
return table2json(d)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
local newArg = {}
|
||||||
|
for i = 1, 100 do
|
||||||
|
if arg[1 + i] ~= nil then
|
||||||
|
table.insert(newArg, arg[1 + i])
|
||||||
|
else
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return getConfigByAppName(arg[1], newArg)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
print(main())
|
||||||
|
--print(getXtClientConfig())
|
||||||
|
--[[
|
||||||
|
print(main())
|
||||||
|
print(getPlatform(""))
|
||||||
|
print("=====================")
|
||||||
|
print(getXtServiceConfig())
|
||||||
|
print("=====================")
|
||||||
|
print(getXTTraderServiceConfig())
|
||||||
|
print("=====================")
|
||||||
|
print(getXtQuoterConfig())
|
||||||
|
print("=====================")
|
||||||
|
print(getXtTaskConfig("xttask"))
|
||||||
|
print("=====================")
|
||||||
|
print(getBrokerConfig("1_21001_1001"))
|
||||||
|
--print(getConfigByAppName("sfit"))
|
||||||
|
]]
|
545
src/xtquant/config/configHelper.lua
Normal file
545
src/xtquant/config/configHelper.lua
Normal file
@ -0,0 +1,545 @@
|
|||||||
|
local __config_helper_lua_path = debug.getinfo(1, "S").source:sub(2)
|
||||||
|
local __config_helper_lua_dir = __config_helper_lua_path:match("(.-)[\\/][^\\/]-$") .. "/"
|
||||||
|
dofile(__config_helper_lua_dir .. "table2json.lua")
|
||||||
|
|
||||||
|
-- lua文件使用%作为分隔符
|
||||||
|
-- ini文件使用&作为分隔符
|
||||||
|
local function eval(str)
|
||||||
|
if type(str) == "string" then
|
||||||
|
if #str > 0 then
|
||||||
|
return loadstring("return " .. str)()
|
||||||
|
end
|
||||||
|
elseif type(str) == "number" then
|
||||||
|
return loadstring("return " .. tostring(str))()
|
||||||
|
else
|
||||||
|
error("is not a string")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
local function split(str,sep)
|
||||||
|
local ret={}
|
||||||
|
local n=1
|
||||||
|
for w in str:gmatch("([^" .. sep .. "]*)") do
|
||||||
|
ret[n]=ret[n] or w -- only set once (so the blank after a string is ignored)
|
||||||
|
if w=="" then n=n+1 end -- step forwards on a blank but not a string
|
||||||
|
end
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
function tableMerge(t1, t2)
|
||||||
|
for k,v in pairs(t2) do
|
||||||
|
if type(v) == "table" then
|
||||||
|
if type(t1[k] or false) == "table" then
|
||||||
|
tableMerge(t1[k] or {}, t2[k] or {})
|
||||||
|
else
|
||||||
|
t1[k] = v
|
||||||
|
end
|
||||||
|
else
|
||||||
|
t1[k] = v
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return t1
|
||||||
|
end
|
||||||
|
|
||||||
|
local function genMap(key, value)
|
||||||
|
local m = {}
|
||||||
|
local items = split(key, "%.")
|
||||||
|
local d = nil
|
||||||
|
for i = 0, (#items -1) do
|
||||||
|
local str = items[#items - i]
|
||||||
|
if i == 0 then
|
||||||
|
d = {}
|
||||||
|
d[str] = eval(value)
|
||||||
|
else
|
||||||
|
local raw = d
|
||||||
|
d = {}
|
||||||
|
d[str] = raw
|
||||||
|
end
|
||||||
|
end
|
||||||
|
for k, v in pairs(d) do
|
||||||
|
m[k] = v
|
||||||
|
end
|
||||||
|
return m
|
||||||
|
end
|
||||||
|
|
||||||
|
function parse(param, localMap)
|
||||||
|
local ret = param
|
||||||
|
for w in string.gmatch(param, "{{([^}]*)}}") do
|
||||||
|
local v = localMap[w]
|
||||||
|
if v == nil then v = eval(w) end
|
||||||
|
if v ~= nil then
|
||||||
|
ret = string.gsub(ret, "{{" .. w .. "}}", v)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
local function getLocalFileMap(filePath)
|
||||||
|
local ret = {}
|
||||||
|
local file = io.open(__config_helper_lua_dir..filePath, "r")
|
||||||
|
if file ~= nil then
|
||||||
|
local content = file:read("*a")
|
||||||
|
local loadRet = loadstring(content)
|
||||||
|
if loadRet == nil then
|
||||||
|
loadRet = loadstring("return " .. content)
|
||||||
|
end
|
||||||
|
if loadRet ~= nil then
|
||||||
|
ret = loadRet()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
local function getLocalMap()
|
||||||
|
m1 = getLocalFileMap("../config_local/customer.lua")
|
||||||
|
m2 = getLocalFileMap("../config_local/machine.lua")
|
||||||
|
return tableMerge(m1, m2)
|
||||||
|
end
|
||||||
|
|
||||||
|
function mergeLocal()
|
||||||
|
local m = getLocalMap()
|
||||||
|
local g_localMap = {}
|
||||||
|
local g_globalMap = {}
|
||||||
|
for k, v in pairs(m) do
|
||||||
|
local r1, r2 = k:find("g_")
|
||||||
|
if r1 == 1 then
|
||||||
|
g_globalMap[k] = v
|
||||||
|
else
|
||||||
|
g_localMap[k] = v
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
_G = tableMerge(_G, g_globalMap)
|
||||||
|
_G["g_localMap"] = g_localMap
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
function toIni(map)
|
||||||
|
local ret = ""
|
||||||
|
for key, value in pairs(map) do
|
||||||
|
str = "[" .. key .. "]\n"
|
||||||
|
for itemKey, itemValue in pairs(value) do
|
||||||
|
if type(itemValue) == type("") then
|
||||||
|
if itemValue:len() > 0 then
|
||||||
|
str = str .. itemKey .. "=" .. itemValue .. "\n"
|
||||||
|
end
|
||||||
|
elseif type(itemValue) == type(true) then
|
||||||
|
local v = 1
|
||||||
|
if itemValue then v = 1 else v = 0 end
|
||||||
|
str = str .. itemKey .. "=" .. v .. "\n"
|
||||||
|
else
|
||||||
|
str = str .. itemKey .. "=" .. itemValue .. "\n"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
ret = ret .. str .. "\n"
|
||||||
|
end
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
function getClientMap(tag)
|
||||||
|
local CLINET_CONFIG = {
|
||||||
|
tagTemplate = "", --标签模版, 可以做正则匹配
|
||||||
|
address = "127.0.0.1:80", -- 地址
|
||||||
|
isGetdAddressFromNameServer = g_is_address_from_daemon, -- 是否从NameServer取得地址
|
||||||
|
isUsedAloneIO = 0, -- 是否单独使用一个ioservice
|
||||||
|
timeoutSecond = 600, -- 超时检测时间
|
||||||
|
keepAliveCheckSecond = 5, -- 保活包
|
||||||
|
reconnectSecond = 3, -- 断线重连时间间隔
|
||||||
|
requestTimeoutSecond = 600, -- 请求超时时间
|
||||||
|
isUseSSL = 0, --是否使用SSL
|
||||||
|
sslCaPath = "", -- SSL证书地址
|
||||||
|
proxyType = "0", -- 代理类型, 0表示无, 1表示http, 2表示socket4, 3表示socket5
|
||||||
|
proxyIp = "", -- 代理地址
|
||||||
|
proxyPort = 80, -- 代理端口
|
||||||
|
proxyNeedCheck = 0, -- 是否需要验证
|
||||||
|
proxyUserName = "", -- 代理用户名
|
||||||
|
proxyPassword = "", -- 代理密码
|
||||||
|
packageDir = "", -- 存放网络包目录
|
||||||
|
}
|
||||||
|
local tagKey = "client_" .. tag
|
||||||
|
local ret = {
|
||||||
|
[tagKey] = CLINET_CONFIG
|
||||||
|
}
|
||||||
|
local address = g_defaultPorts[tag]
|
||||||
|
if address == nil then address = "127.0.0.1:8000" end
|
||||||
|
local m = {
|
||||||
|
[tagKey] = {
|
||||||
|
tagTemplate = tag,
|
||||||
|
address = g_defaultPorts[tag]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ret = tableMerge(ret, m)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
function getServerMap(tag)
|
||||||
|
local SERVER_CONFIG = {
|
||||||
|
tag = "", -- 标签模版, 可以做正则匹配
|
||||||
|
address = "0.0.0.0:80", -- 地址
|
||||||
|
isGetdAddressFromNameServer = 0,-- 是否从NameServer取得地址
|
||||||
|
timeoutSecond = 600, -- 超时检测时间
|
||||||
|
maxConnectionNum = 1000000, -- 最大连接数
|
||||||
|
isAutoBind = g_is_address_from_daemon, -- 是否自动绑定(即端口无法监听, 监听下一端口)
|
||||||
|
isUseSSL = 0, -- 是否启用SSL
|
||||||
|
crtPath = "",
|
||||||
|
serverKeyPath = "",
|
||||||
|
tempDhPath = "",
|
||||||
|
sslPassword = "",
|
||||||
|
packageDir = "",
|
||||||
|
}
|
||||||
|
local port = 80
|
||||||
|
local address = g_defaultPorts[tag]
|
||||||
|
if address ~= nil then
|
||||||
|
port = string.sub(address, string.find(address, ":") +1 ,#address)
|
||||||
|
end
|
||||||
|
|
||||||
|
local ret = {
|
||||||
|
["server_" .. tag] = SERVER_CONFIG
|
||||||
|
}
|
||||||
|
local m = {
|
||||||
|
["server_" .. tag] = {
|
||||||
|
tag = tag,
|
||||||
|
address = "0.0.0.0:" .. port,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ret = tableMerge(ret, m)
|
||||||
|
ret["server"] = ret["server_" .. tag]
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
function getLocalAppMap(tag)
|
||||||
|
local ret = {
|
||||||
|
app = {
|
||||||
|
appName = tag,
|
||||||
|
netThreadNum = 1, -- 线程数
|
||||||
|
dispatcherThreadNum = 1, -- 处理线程数
|
||||||
|
logPath = "", -- 日志文件路径
|
||||||
|
reportSeconds = 60, -- 状态报告时间
|
||||||
|
isReportLogCenter = 1 and g_is_report_logcenter or 0, -- 是否日志打印到logCenter
|
||||||
|
serverDeployType = g_server_deploy_type, -- 部署类型
|
||||||
|
host_ip = g_host_ip, -- 主机IP
|
||||||
|
zkRunningDir = g_running_dir, -- 运行目录
|
||||||
|
topology = 1 and g_is_topology_logcenter or 0, --是否发送拓扑数据
|
||||||
|
topologyInterval = 20, --发送拓扑数据时间间隔
|
||||||
|
},
|
||||||
|
client_NameService = {
|
||||||
|
tagTemplate = "NameService",
|
||||||
|
address = g_defaultPorts["xtdaemon"],
|
||||||
|
reconnectSecond = 3,
|
||||||
|
},
|
||||||
|
quoter_config = {
|
||||||
|
is_use_proxy_all_push = g_use_proxy_whole_quoter,
|
||||||
|
is_use_future_all_push = g_use_future_whole_quoter,
|
||||||
|
timeoutsec = 20,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
function getAppMap(serverTag, clients)
|
||||||
|
local ret = getLocalAppMap(serverTag)
|
||||||
|
if serverTag ~= nil then
|
||||||
|
local serverMap = getServerMap(serverTag)
|
||||||
|
ret = tableMerge(ret, serverMap)
|
||||||
|
end
|
||||||
|
if clients ~= nil then
|
||||||
|
for i, v in pairs(clients) do
|
||||||
|
local map = getClientMap(v)
|
||||||
|
ret = tableMerge(ret, map)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
function getLog4cxx(tag)
|
||||||
|
local d = [[
|
||||||
|
log4j.logger.TTStdFile=INFO,fa
|
||||||
|
log4j.logger.TTDbgFile=DEBUG,fa2
|
||||||
|
|
||||||
|
# 文件输出
|
||||||
|
log4j.appender.fa=org.apache.log4j.DailyRollingFileAppender
|
||||||
|
log4j.appender.fa.MaxFileSize=500MB
|
||||||
|
log4j.appender.fa.datePattern='.'yyyy-MM-dd
|
||||||
|
log4j.appender.fa.File=../userdata/log/{{tag}}.log
|
||||||
|
log4j.appender.fa.Append=true
|
||||||
|
log4j.appender.fa.layout=org.apache.log4j.PatternLayout
|
||||||
|
log4j.appender.fa.layout.ConversionPattern=%d [%p] [%t] %m%n
|
||||||
|
|
||||||
|
# 文件输出2
|
||||||
|
log4j.appender.fa2=org.apache.log4j.FileAppender
|
||||||
|
log4j.appender.fa2.MaxFileSize=500MB
|
||||||
|
log4j.appender.fa2.MaxBackupIndex=10
|
||||||
|
log4j.appender.fa2.File=../userdata/log/{{tag}}_debug.log
|
||||||
|
log4j.appender.fa2.Append=true
|
||||||
|
log4j.appender.fa2.layout=org.apache.log4j.PatternLayout
|
||||||
|
log4j.appender.fa2.layout.ConversionPattern=%d [%p] [%t] %m%n
|
||||||
|
|
||||||
|
# 控制台输出
|
||||||
|
log4j.appender.ca=org.apache.log4j.ConsoleAppender
|
||||||
|
log4j.appender.ca.layout=org.apache.log4j.PatternLayout
|
||||||
|
log4j.appender.ca.layout.ConversionPattern=%d [%p] [%t] %m%n
|
||||||
|
]]
|
||||||
|
d = parse(d, {["tag"] = tag})
|
||||||
|
return d
|
||||||
|
end
|
||||||
|
|
||||||
|
local function getTableDepth(t, depth)
|
||||||
|
if type(t) == "table" then
|
||||||
|
depth = depth + 1
|
||||||
|
local maxDepth = depth
|
||||||
|
for k, v in pairs(t) do
|
||||||
|
if type(v) == "table" then
|
||||||
|
local d = getTableDepth(v, depth)
|
||||||
|
if d > maxDepth then maxDepth = d end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
depth = maxDepth
|
||||||
|
end
|
||||||
|
return depth
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 根据App名称取配置
|
||||||
|
local function getConfigTableByAppName(param, m)
|
||||||
|
if m == nil then m = {} end
|
||||||
|
if type(m) ~= "table" then
|
||||||
|
m = {m}
|
||||||
|
end
|
||||||
|
local paramObj = _G[param]
|
||||||
|
if paramObj == nil then
|
||||||
|
local file = io.open(__config_helper_lua_dir .. param .. ".lua", "r")
|
||||||
|
if file ~= nil then
|
||||||
|
local content = file:read("*a")
|
||||||
|
if content ~= nil then
|
||||||
|
local loadRet = loadstring(content)
|
||||||
|
if loadRet == nil then
|
||||||
|
loadRet = loadstring("return " .. content)
|
||||||
|
end
|
||||||
|
if loadRet ~= nil then
|
||||||
|
paramObj = loadRet()
|
||||||
|
_G[param] = paramObj
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
if paramObj == nil then paramObj = {} end
|
||||||
|
local t = {}
|
||||||
|
if type(paramObj) == "function" then
|
||||||
|
t = paramObj(unpack(m))
|
||||||
|
elseif type(paramObj) == "table" then
|
||||||
|
t = paramObj
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 合并本地数据
|
||||||
|
local localMap = {}
|
||||||
|
if g_localMap ~= nil then
|
||||||
|
localMap = g_localMap[param]
|
||||||
|
end
|
||||||
|
if localMap == nil then localMap = {} end
|
||||||
|
t = tableMerge(t, localMap)
|
||||||
|
return t
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 根据App名称取配置
|
||||||
|
function getConfigByAppName(param, m)
|
||||||
|
local t = getConfigTableByAppName(param, m)
|
||||||
|
local depth = getTableDepth(t, 0)
|
||||||
|
if depth == 2 then
|
||||||
|
return toIni(t)
|
||||||
|
else
|
||||||
|
return table2json(t)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function getModulesHelper()
|
||||||
|
return getLocalFileMap("../config_local/modules.lua")
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 取Broker模块配置
|
||||||
|
function getBrokerModuleConfig(configTag, moduleTag)
|
||||||
|
local t = getConfigTableByAppName(configTag)
|
||||||
|
if moduleTag ~= nil then
|
||||||
|
local tag = moduleTag
|
||||||
|
local index = moduleTag:find("_")
|
||||||
|
if index ~= nil then
|
||||||
|
tag = moduleTag:sub(moduleTag:find("_", index+1) + 1)
|
||||||
|
end
|
||||||
|
tag = configTag .. "/" .. tag
|
||||||
|
local t1 = getConfigTableByAppName(tag)
|
||||||
|
t = tableMerge(t, t1)
|
||||||
|
end
|
||||||
|
local depth = getTableDepth(t, 0)
|
||||||
|
if depth == 2 then
|
||||||
|
return toIni(t)
|
||||||
|
else
|
||||||
|
return table2json(t)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function getPlatform(tag)
|
||||||
|
for k, v in pairs(g_allPlatforms) do
|
||||||
|
local tTag = v["m_nType"] .. "_" .. v["m_nId"] .. "_" .. v["m_brokerId"]
|
||||||
|
local r1, r2 = tag:find(tTag)
|
||||||
|
if r1 == 1 then
|
||||||
|
return v
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
|
||||||
|
local function genPlatformInfos()
|
||||||
|
local allTypes = {g_future_platforms, g_stock_platforms, g_credit_platforms, g_stockoption_platforms, g_new3board_platforms, g_hgt_platforms, g_gold_platforms}
|
||||||
|
for tk, tv in pairs(allTypes) do
|
||||||
|
for k, v in pairs(tv) do
|
||||||
|
table.insert(g_allPlatforms, v)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function genBrokerInfos()
|
||||||
|
local key = "xtbroker_1_21001_9999"
|
||||||
|
local ret = {}
|
||||||
|
ret[key] = "127.0.0.1:" .. (58000 + 3)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 参数:待分割的字符串,分割字符
|
||||||
|
-- 返回:子串表.(含有空串)
|
||||||
|
function lua_string_split(str, split_char)
|
||||||
|
local sub_str_tab = {};
|
||||||
|
|
||||||
|
while (true) do
|
||||||
|
local pos = string.find(str, split_char);
|
||||||
|
if (not pos) then
|
||||||
|
local size_t = table.getn(sub_str_tab)
|
||||||
|
table.insert(sub_str_tab,size_t+1,str);
|
||||||
|
break;
|
||||||
|
end
|
||||||
|
|
||||||
|
local sub_str = string.sub(str, 1, pos - 1);
|
||||||
|
local size_t = table.getn(sub_str_tab);
|
||||||
|
table.insert(sub_str_tab,size_t+1,sub_str);
|
||||||
|
local t = string.len(str);
|
||||||
|
str = string.sub(str, pos + 1, t);
|
||||||
|
end
|
||||||
|
return sub_str_tab;
|
||||||
|
end
|
||||||
|
|
||||||
|
function do_checkVersion(version, minVersion)
|
||||||
|
local tableVersion = lua_string_split(version, "%.")
|
||||||
|
local tempMinVersion = minVersion
|
||||||
|
local tableGMinVersion = lua_string_split(tempMinVersion , "%.")
|
||||||
|
local ret = false
|
||||||
|
local tablesize = 0
|
||||||
|
local isVShort = false
|
||||||
|
if table.getn(tableVersion) < table.getn(tableGMinVersion) then
|
||||||
|
isVShort = true
|
||||||
|
tablesize = table.getn(tableVersion)
|
||||||
|
else
|
||||||
|
tablesize = table.getn(tableGMinVersion)
|
||||||
|
end
|
||||||
|
|
||||||
|
for i = 1, tablesize , 1 do
|
||||||
|
tableVersion[i] = tonumber(tableVersion[i])
|
||||||
|
tableGMinVersion[i] = tonumber(tableGMinVersion[i])
|
||||||
|
if tableVersion[i] < tableGMinVersion[i] then
|
||||||
|
ret = true
|
||||||
|
return ret
|
||||||
|
elseif tableVersion[i] > tableGMinVersion[i] then
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if isVShort then
|
||||||
|
ret = true
|
||||||
|
end
|
||||||
|
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
function checkVersion(version)
|
||||||
|
local ret = do_checkVersion(version, g_minVersion)
|
||||||
|
local msg = ""
|
||||||
|
if ret then
|
||||||
|
msg = "您的客户端版本过低, 请联系" .. g_company .."获取最新版本!"
|
||||||
|
end
|
||||||
|
return msg
|
||||||
|
end
|
||||||
|
|
||||||
|
function checkMobileVersion(version)
|
||||||
|
local ret = do_checkVersion(version, g_minMobileVersion)
|
||||||
|
local msg = ""
|
||||||
|
if ret then
|
||||||
|
msg = "您的移动客户端版本过低, 请联系" .. g_company .."获取最新版本!"
|
||||||
|
end
|
||||||
|
return msg
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 取地址Ip
|
||||||
|
function getIp(address)
|
||||||
|
local ip = address:sub(1, address:find(":") - 1)
|
||||||
|
return ip
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 取地址端口
|
||||||
|
function getPort(address)
|
||||||
|
local port = address:sub(address:find(":") + 1, address:len())
|
||||||
|
return port
|
||||||
|
end
|
||||||
|
|
||||||
|
function genFairyPlayUnitInfos()
|
||||||
|
if g_fairplay_units == nil then return end
|
||||||
|
for k, v in pairs(g_fairplay_units) do
|
||||||
|
table.insert(g_fairPlayUnits, v)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function getFairPlayUnit(tag)
|
||||||
|
for k, v in pairs(g_fairPlayUnits) do
|
||||||
|
local tTag = v["m_nType"] .. "_" .. v["m_nId"]
|
||||||
|
local r1, r2 = tag:find(tTag)
|
||||||
|
if r1 == 1 then
|
||||||
|
return v
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
|
||||||
|
function mergeBrokerInfos(brokerPorts)
|
||||||
|
if g_allBrokers == nil then
|
||||||
|
g_allBrokers = {}
|
||||||
|
end
|
||||||
|
|
||||||
|
for k, v in pairs(brokerPorts) do
|
||||||
|
if g_defaultPorts[k] == nil then
|
||||||
|
g_defaultPorts[k] = v
|
||||||
|
end
|
||||||
|
table.insert(g_allBrokers, k)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 合并本地数据
|
||||||
|
mergeLocal()
|
||||||
|
|
||||||
|
if not g_is_address_from_daemon then
|
||||||
|
-- 产生平台信息
|
||||||
|
genPlatformInfos()
|
||||||
|
|
||||||
|
-- 产生券商信息
|
||||||
|
if g_brokerPorts == nil then
|
||||||
|
g_brokerPorts = genBrokerInfos()
|
||||||
|
end
|
||||||
|
mergeBrokerInfos(g_brokerPorts)
|
||||||
|
|
||||||
|
--产生公平交易单元信息
|
||||||
|
genFairyPlayUnitInfos()
|
||||||
|
end
|
||||||
|
|
||||||
|
function getExtraThreadPools(key)
|
||||||
|
local threadNum = g_extra_thread_pools[key]
|
||||||
|
if threadNum ~= nil then
|
||||||
|
return threadNum
|
||||||
|
end
|
||||||
|
return 0
|
||||||
|
end
|
159
src/xtquant/config/env.lua
Normal file
159
src/xtquant/config/env.lua
Normal file
@ -0,0 +1,159 @@
|
|||||||
|
|
||||||
|
g_minVersion = "2.0.1.600"
|
||||||
|
g_minMobileVersion = "1.0.0.0"
|
||||||
|
g_company = "睿智融科"
|
||||||
|
g_is_address_from_daemon = false
|
||||||
|
g_use_proxy_whole_quoter = 1
|
||||||
|
g_use_future_whole_quoter = 0
|
||||||
|
g_server_deploy_type = 0
|
||||||
|
|
||||||
|
g_defaultPorts = {
|
||||||
|
xtdaemon="127.0.0.1:55000",
|
||||||
|
xtservice="127.0.0.1:56000",
|
||||||
|
xtindex="127.0.0.1:56001",
|
||||||
|
xtmonitor="127.0.0.1:56002",
|
||||||
|
xtwebservice="127.0.0.1:56003",
|
||||||
|
xttraderservice="127.0.0.1:57000",
|
||||||
|
xtquoter="127.0.0.1:59000",
|
||||||
|
xtriskcontrol="127.0.0.1:60000",
|
||||||
|
proxy="210.14.136.66:55300",
|
||||||
|
proxy_backup="203.156.205.182:55300",
|
||||||
|
xtcounter="127.0.0.1:61100",
|
||||||
|
xtgateway="127.0.0.1:62100",
|
||||||
|
xtsource="127.0.0.1:63000",
|
||||||
|
xtitsmservice="127.0.0.1:63500",
|
||||||
|
xttask="127.0.0.1:61000",
|
||||||
|
xtquerybroker="127.0.0.1:65000",
|
||||||
|
xtotp="127.0.0.1:64200",
|
||||||
|
xtlogcenter="127.0.0.1:65100",
|
||||||
|
xtctpservice="127.0.0.1:65200",
|
||||||
|
xtapiservice="127.0.0.1:65300",
|
||||||
|
xtclearservice="127.0.0.1:64100",
|
||||||
|
xtdelegateservice="127.0.0.1:64300",
|
||||||
|
xtalgoadapterservice="127.0.0.1:64500",
|
||||||
|
xtmarket = "127.0.0.1:60100",
|
||||||
|
xtfairplayservice="127.0.0.1:64600",
|
||||||
|
xtnonstandardservice="127.0.0.1:64703",
|
||||||
|
xtantisharefinancingservice = "127.0.0.1:64800",
|
||||||
|
xtmysqlservice="127.0.0.1:64704",
|
||||||
|
xtmobileservice="127.0.0.1:65400",
|
||||||
|
xtmarketinfo="210.14.136.69:59500",
|
||||||
|
}
|
||||||
|
|
||||||
|
g_allPlatforms = {}
|
||||||
|
g_allBrokers = {}
|
||||||
|
g_fairPlayUnits = {}
|
||||||
|
|
||||||
|
g_ttservice_global_config = {
|
||||||
|
m_maxClientCount=1,
|
||||||
|
m_logCfg="ttservice.log4cxx",
|
||||||
|
m_listenIP="0.0.0.0",
|
||||||
|
m_nListenPort=56100,
|
||||||
|
m_proxyIP="210.14.136.66",
|
||||||
|
m_nProxyPort=55808,
|
||||||
|
m_nWorkFlowPort=63000,
|
||||||
|
m_workFlowIP="127.0.0.1",
|
||||||
|
m_redisHost="127.0.0.1",
|
||||||
|
m_redisPort=6379,
|
||||||
|
m_nPortalThread=5,
|
||||||
|
m_addrsPath="",
|
||||||
|
m_nProductMaxPortfilio=100,
|
||||||
|
m_debugAccounts="",
|
||||||
|
m_nUseMd5=0,
|
||||||
|
}
|
||||||
|
|
||||||
|
g_future_quote_platforms = {
|
||||||
|
{m_nId=20001, m_strName="CTP实盘", m_strAbbrName="sqsp", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=20002, m_strName="恒生实盘", m_strAbbrName="hssp", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=21018, m_strName="v8t实盘", m_strAbbrName="sqsp", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=21001, m_strName="CTP模拟", m_strAbbrName="gdmn", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=21002, m_strName="恒生模拟", m_strAbbrName="hsmn", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=21003, m_strName="v8t模拟", m_strAbbrName="gdmn", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=20000, m_strName="迅投高级行情", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=21111, m_strName="资管实盘", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=21112, m_strName="资管模拟", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=20013, m_strName="恒生实盘", m_strAbbrName="hshl", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=21013, m_strName="恒生模拟", m_strAbbrName="hshl", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=21015, m_strName="恒生大越", m_strAbbrName="hsdy", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=21014, m_strName="恒生英大", m_strAbbrName="hsyd", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=21017, m_strName="恒生金谷", m_strAbbrName="hsjg", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=21019, m_strName="恒生中原", m_strAbbrName="hszy", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=20015, m_strName="恒生大越实盘", m_strAbbrName="hsdysp", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=20014, m_strName="恒生英大实盘", m_strAbbrName="hsydsp", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=20017, m_strName="恒生金谷实盘", m_strAbbrName="hsjgsp", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
{m_nId=20019, m_strName="恒生中原实盘", m_strAbbrName="hszysp", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=1,},
|
||||||
|
}
|
||||||
|
|
||||||
|
g_futureoption_quote_platforms = {
|
||||||
|
{m_nId=70001, m_strName="CTP实盘", m_strAbbrName="sqsp", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=5,},
|
||||||
|
{m_nId=71001, m_strName="CTP模拟", m_strAbbrName="gdmn", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=5,},
|
||||||
|
{m_nId=71111, m_strName="资管实盘", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=5,},
|
||||||
|
{m_nId=71112, m_strName="资管模拟", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=5,},
|
||||||
|
}
|
||||||
|
|
||||||
|
g_stock_quote_platforms = {
|
||||||
|
{m_nId=10000, m_strName="迅投高级行情", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=2,},
|
||||||
|
{m_nId=1111, m_strName="资管实盘", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=2,},
|
||||||
|
{m_nId=1112, m_strName="资管模拟", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=2,},
|
||||||
|
}
|
||||||
|
|
||||||
|
g_credit_quote_platforms = {
|
||||||
|
{m_nId=10000, m_strName="迅投高级行情", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=3,},
|
||||||
|
}
|
||||||
|
|
||||||
|
g_stockoption_quote_platforms = {
|
||||||
|
{m_nId=10001, m_strName="迅投高级行情", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=6,},
|
||||||
|
{m_nId=1211, m_strName="资管实盘", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=6,},
|
||||||
|
{m_nId=1212, m_strName="资管模拟", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=6,},
|
||||||
|
}
|
||||||
|
|
||||||
|
g_hgt_quote_platforms = {
|
||||||
|
{m_nId=10003, m_strName="迅投高级行情", m_strAbbrName="hgtmn", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=7,},
|
||||||
|
{m_nId=1411, m_strName="资管实盘", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=7,},
|
||||||
|
{m_nId=1412, m_strName="资管模拟", m_strAbbrName="xtgj", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=7,},
|
||||||
|
}
|
||||||
|
|
||||||
|
g_new3board_quote_platforms = {
|
||||||
|
{m_nId=10002, m_strName="迅投高级行情", m_strAbbrName="neeq", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=10,},
|
||||||
|
{m_nId=1311, m_strName="资管实盘", m_strAbbrName="neeq", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=10,},
|
||||||
|
{m_nId=1312, m_strName="资管模拟", m_strAbbrName="neeq", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=10,},
|
||||||
|
}
|
||||||
|
|
||||||
|
g_gold_quote_platforms = {
|
||||||
|
{m_nId=31003, m_strName="迅投高级行情", m_strAbbrName="zxjtgold", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=4,},
|
||||||
|
{m_nId=31111, m_strName="资管实盘", m_strAbbrName="zxjtgold", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=4,},
|
||||||
|
{m_nId=31112, m_strName="资管模拟", m_strAbbrName="zxjtgold", m_strLogo="broker_logo_1", m_strBrokerTag="xtbroker", m_strQuoterTag="xtquoter", m_nType=4,},
|
||||||
|
}
|
||||||
|
|
||||||
|
g_future_order_limits = {
|
||||||
|
{m_strProductID="IF", m_nLimit=200},
|
||||||
|
{m_strProductID="AU", m_nLimit=100},
|
||||||
|
}
|
||||||
|
|
||||||
|
g_banks = {
|
||||||
|
{m_strLogo="bank_logo_1", m_strId="1", m_strName="工商银行",},
|
||||||
|
{m_strLogo="bank_logo_2", m_strId="2", m_strName="农业银行",},
|
||||||
|
{m_strLogo="bank_logo_3", m_strId="3", m_strName="中国银行",},
|
||||||
|
{m_strLogo="bank_logo_4", m_strId="4", m_strName="建设银行",},
|
||||||
|
{m_strLogo="bank_logo_5", m_strId="5", m_strName="交通银行",},
|
||||||
|
{m_strLogo="bank_logo_6", m_strId="6", m_strName="深圳建行",},
|
||||||
|
{m_strLogo="bank_logo_Z", m_strId="Z", m_strName="其它银行",}
|
||||||
|
}
|
||||||
|
|
||||||
|
g_batchOrder_config = {
|
||||||
|
-- 是否采用批量下单, 0 表示 不使用
|
||||||
|
is_batch_ordinaryOrder = 1,
|
||||||
|
-- 如果 采用批量下单,设置多少毫秒发送一次缓冲数据
|
||||||
|
buffer_clear_duration_milli_sec = 100,
|
||||||
|
buffer_clear_max_order_num = 100,
|
||||||
|
-- apiserver 单位时间内的 下单量
|
||||||
|
api_order_upper_limit = 1000,
|
||||||
|
-- 如果设置了 下单量上限,设置多少毫秒达到这个上限才打回请求
|
||||||
|
api_order_duration_milli_sec = 1000,
|
||||||
|
-- 设置算法单最小下单间隔 为 0.5s
|
||||||
|
api_min_algorithm_order_duration_milli_sec = 500,
|
||||||
|
-- 设置组合单最小下单间隔为 10s
|
||||||
|
api_min_group_order_duration_milli_sec = 10000,
|
||||||
|
max_order_duration_milli_sec = -1,
|
||||||
|
max_order_count = -1,
|
||||||
|
}
|
3032
src/xtquant/config/metaInfo.json
Normal file
3032
src/xtquant/config/metaInfo.json
Normal file
File diff suppressed because it is too large
Load Diff
29
src/xtquant/config/pershare_new.ini
Normal file
29
src/xtquant/config/pershare_new.ini
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
主要指标|PERSHAREINDEX
|
||||||
|
每股经营活动现金流量|s_fa_ocfps|m_sFaOcfps
|
||||||
|
每股净资产|s_fa_bps|m_sFaBps
|
||||||
|
基本每股收益|s_fa_eps_basic|m_sFaEpsBasic
|
||||||
|
稀释每股收益|s_fa_eps_diluted|m_sFaEpsDiluted
|
||||||
|
每股未分配利润|s_fa_undistributedps|m_sFaUndistributedps
|
||||||
|
每股资本公积金|s_fa_surpluscapitalps|m_sFaSurpluscapitalps
|
||||||
|
扣非每股收益|adjusted_earnings_per_share|m_adjustedEarningsPerShare
|
||||||
|
净资产收益率|du_return_on_equity|m_duReturnOnEquity
|
||||||
|
销售毛利率|sales_gross_profit|m_salesGrossProfit
|
||||||
|
主营收入同比增长|inc_revenue_rate|m_incRevenueRate
|
||||||
|
净利润同比增长|du_profit_rate|m_duProfitRate
|
||||||
|
归属于母公司所有者的净利润同比增长|inc_net_profit_rate|m_incNetProfitRate
|
||||||
|
扣非净利润同比增长|adjusted_net_profit_rate|m_adjustedNetProfitRate
|
||||||
|
营业总收入滚动环比增长|inc_total_revenue_annual|m_incTotalRevenueAnnual
|
||||||
|
归属净利润滚动环比增长|inc_net_profit_to_shareholders_annual|m_incNetProfitToShareholdersAnnual
|
||||||
|
扣非净利润滚动环比增长|adjusted_profit_to_profit_annual|m_adjustedProfitToProfitAnnual
|
||||||
|
加权净资产收益率|equity_roe|m_equityRoe
|
||||||
|
摊薄净资产收益率|net_roe|m_netRoe
|
||||||
|
摊薄总资产收益率|total_roe|m_totalRoe
|
||||||
|
毛利率|gross_profit|m_grossProfit
|
||||||
|
净利率|net_profit|m_netProfit
|
||||||
|
实际税率|actual_tax_rate|m_actualTaxRate
|
||||||
|
预收款/营业收入|pre_pay_operate_income|m_prePayOperateIncome
|
||||||
|
销售现金流/营业收入|sales_cash_flow|m_salesCashFlow
|
||||||
|
资产负债比率|gear_ratio|m_gearRatio
|
||||||
|
存货周转率|inventory_turnover|m_inventoryTurnover
|
||||||
|
公告日|m_anntime|m_anntime
|
||||||
|
报告截止日|m_timetag|m_endtime
|
143
src/xtquant/config/sharebalance_new_1.ini
Normal file
143
src/xtquant/config/sharebalance_new_1.ini
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
资产负债表|ASHAREBALANCESHEET
|
||||||
|
披露日期|m_anntime|m_annTime
|
||||||
|
截止日期|m_timetag|m_endTime
|
||||||
|
内部应收款|internal_shoule_recv|m_internalShouldRecv
|
||||||
|
固定资产清理|fixed_capital_clearance|m_fixedCapitalClearance
|
||||||
|
应付分保账款|should_pay_money|m_shouldPayMony
|
||||||
|
结算备付金|settlement_payment|m_settlementPayment
|
||||||
|
应收保费|receivable_premium|m_receivablePremium
|
||||||
|
应收分保账款|accounts_receivable_reinsurance|m_accountsReceivableReinsurance
|
||||||
|
应收分保合同准备金|reinsurance_contract_reserve|m_reinsuranceContractReserve
|
||||||
|
应收股利|dividends_payable|m_dividendsPayable
|
||||||
|
应收出口退税|tax_rebate_for_export|m_taxRebateForExport
|
||||||
|
应收补贴款|subsidies_receivable|m_subsidiesReceivable
|
||||||
|
应收保证金|deposit_receivable|m_depositReceivable
|
||||||
|
待摊费用|apportioned_cost|m_apportionedCost
|
||||||
|
待处理流动资产损益|profit_and_current_assets_with_deal|m_profitAndCurrentAssetsWithDeal
|
||||||
|
一年内到期的非流动资产|current_assets_one_year|m_currentAssetsOneYear
|
||||||
|
长期应收款|long_term_receivables|m_longTermReceivables
|
||||||
|
其他长期投资|other_long_term_investments|m_otherLongTermInvestments
|
||||||
|
固定资产原值|original_value_of_fixed_assets|m_originalValueOfFixedAssets
|
||||||
|
固定资产净值|net_value_of_fixed_assets|m_netValueOfFixedAssets
|
||||||
|
固定资产减值准备|depreciation_reserves_of_fixed_assets|m_depreciationReservesOfFixedAssets
|
||||||
|
生产性生物资产|productive_biological_assets|m_productiveBiologicalAssets
|
||||||
|
公益性生物资产|public_welfare_biological_assets|m_publicWelfareBiologicalAssets
|
||||||
|
油气资产|oil_and_gas_assets|m_oilAndGasAssets
|
||||||
|
开发支出|development_expenditure|m_developmentExpenditure
|
||||||
|
股权分置流通权|right_of_split_share_distribution|m_rightSplitShareDistribution
|
||||||
|
其他非流动资产|other_non_mobile_assets|m_otherNonMobileEssets
|
||||||
|
应付手续费及佣金|handling_fee_and_commission|m_handlingFeeAndCommission
|
||||||
|
其他应交款|other_payables|m_otherPayables
|
||||||
|
应付保证金|margin_payable|m_marginPayable
|
||||||
|
内部应付款|internal_accounts_payable|m_internalAccountsPayable
|
||||||
|
预提费用|advance_cost|m_advanceCost
|
||||||
|
保险合同准备金|insurance_contract_reserve|m_insuranceContractReserve
|
||||||
|
代理买卖证券款|broker_buying_and_selling_securities|m_brokerBuyingSellingSecurities
|
||||||
|
代理承销证券款|acting_underwriting_securities|m_actingUnderwritingSecurities
|
||||||
|
国际票证结算|international_ticket_settlement|m_internationalTicketSettlement
|
||||||
|
国内票证结算|domestic_ticket_settlement|m_domesticTicketSettlement
|
||||||
|
递延收益|deferred_income|m_deferredIncome
|
||||||
|
应付短期债券|short_term_bonds_payable|m_shortTermBondsPayable
|
||||||
|
长期递延收益|long_term_deferred_income|m_longTermDeferredIncome
|
||||||
|
未确定的投资损失|undetermined_investment_losses|m_undeterminedInvestmentLosses
|
||||||
|
拟分配现金股利|quasi_distribution_of_cash_dividends|m_quasiDistributionCashDividends
|
||||||
|
预计负债|provisions_not|m_provisionsNot
|
||||||
|
吸收存款及同业存放|cust_bank_dep|m_custBankDep
|
||||||
|
预计流动负债|provisions|m_provisions
|
||||||
|
减:库存股|less_tsy_stk|m_lessTsyStk
|
||||||
|
货币资金|cash_equivalents|m_cashEquivalents
|
||||||
|
拆出资金|loans_to_oth_banks|m_loansToOthBanks
|
||||||
|
交易性金融资产|tradable_fin_assets|m_tradableFinAssets
|
||||||
|
衍生金融资产|derivative_fin_assets|m_derivativeFinAssets
|
||||||
|
应收票据|bill_receivable|m_billReceivable
|
||||||
|
应收账款|account_receivable|m_accountReceivable
|
||||||
|
预付款项|advance_payment|m_advancePayment
|
||||||
|
应收利息|int_rcv|m_intRcv
|
||||||
|
其他应收款|other_receivable|m_otherReceivable
|
||||||
|
买入返售金融资产|red_monetary_cap_for_sale|m_redMonetaryCapForSale
|
||||||
|
以公允价值计量且其变动计入当期损益的金融资产|agency_bus_assets|m_agencyBusAssets
|
||||||
|
存货|inventories|m_inventories
|
||||||
|
其他流动资产|other_current_assets|m_otherCurrentAssets
|
||||||
|
流动资产合计|total_current_assets|m_totalCurrentAssets
|
||||||
|
发放贷款及垫款|loans_and_adv_granted|m_loansAndAdvGranted
|
||||||
|
可供出售金融资产|fin_assets_avail_for_sale|m_finAssetsAvailForSale
|
||||||
|
持有至到期投资|held_to_mty_invest|m_heldToMtyInvest
|
||||||
|
长期股权投资|long_term_eqy_invest|m_longTermEqyInvest
|
||||||
|
投资性房地产|invest_real_estate|m_investRealEstate
|
||||||
|
累计折旧|accumulated_depreciation|m_accumulatedDepreciation
|
||||||
|
固定资产|fix_assets|m_fixAssets
|
||||||
|
在建工程|constru_in_process|m_construInProcess
|
||||||
|
工程物资|construction_materials|m_constructionMaterials
|
||||||
|
长期负债|long_term_liabilities|m_longTermLiabilities
|
||||||
|
无形资产|intang_assets|m_intangAssets
|
||||||
|
商誉|goodwill|m_goodwill
|
||||||
|
长期待摊费用|long_deferred_expense|m_longDeferredExpense
|
||||||
|
递延所得税资产|deferred_tax_assets|m_deferredTaxAssets
|
||||||
|
非流动资产合计|total_non_current_assets|m_totalNonCurrentAssets
|
||||||
|
资产总计|tot_assets|m_totAssets
|
||||||
|
短期借款|shortterm_loan|m_shorttermLoan
|
||||||
|
向中央银行借款|borrow_central_bank|m_borrowCentralBank
|
||||||
|
拆入资金|loans_oth_banks|m_loansOthBanks
|
||||||
|
交易性金融负债|tradable_fin_liab|m_tradableFinLiab
|
||||||
|
衍生金融负债|derivative_fin_liab|m_derivativeFinLiab
|
||||||
|
应付票据|notes_payable|m_notesPayable
|
||||||
|
应付账款|accounts_payable|m_accountsPayable
|
||||||
|
预收账款|advance_peceipts|m_advancePeceipts
|
||||||
|
卖出回购金融资产款|fund_sales_fin_assets_rp|m_fundSalesFinAssetsRp
|
||||||
|
应付职工薪酬|empl_ben_payable|m_emplBenPayable
|
||||||
|
应交税费|taxes_surcharges_payable|m_taxesSurchargesPayable
|
||||||
|
应付利息|int_payable|m_intPayable
|
||||||
|
应付股利|dividend_payable|m_dividendPayable
|
||||||
|
其他应付款|other_payable|m_otherPayable
|
||||||
|
一年内到期的非流动负债|non_current_liability_in_one_year|m_nonCurrentLiabilityInOneYear
|
||||||
|
其他流动负债|other_current_liability|m_otherCurrentLiability
|
||||||
|
流动负债合计|total_current_liability|m_totalCurrentLiability
|
||||||
|
长期借款|long_term_loans|m_longTermLoans
|
||||||
|
应付债券|bonds_payable|m_bondsPayable
|
||||||
|
长期应付款|longterm_account_payable|m_longtermAccountPayable
|
||||||
|
专项应付款|grants_received|m_grantsReceived
|
||||||
|
递延所得税负债|deferred_tax_liab|m_deferredTaxLiab
|
||||||
|
其他非流动负债|other_non_current_liabilities|m_otherNonCurrentLiabilities
|
||||||
|
非流动负债合计|non_current_liabilities|m_nonCurrentLiabilities
|
||||||
|
负债合计|tot_liab|m_totLiab
|
||||||
|
实收资本(或股本)|cap_stk|m_capStk
|
||||||
|
资本公积|cap_rsrv|m_capRsrv
|
||||||
|
专项储备|specific_reserves|m_specificReserves
|
||||||
|
盈余公积|surplus_rsrv|m_surplusRsrv
|
||||||
|
一般风险准备|prov_nom_risks|m_provNomRisks
|
||||||
|
未分配利润|undistributed_profit|m_undistributedProfit
|
||||||
|
外币报表折算差额|cnvd_diff_foreign_curr_stat|m_cnvdDiffForeignCurrStat
|
||||||
|
归属于母公司股东权益合计|tot_shrhldr_eqy_excl_min_int|m_totShrhldrEqyExclMinInt
|
||||||
|
少数股东权益|minority_int|m_minorityInt
|
||||||
|
所有者权益合计|total_equity|m_totalEquity
|
||||||
|
负债和股东权益总计|tot_liab_shrhldr_eqy|m_totLiabShrhldrEqy
|
||||||
|
|
||||||
|
现金及存放中央银行款项:企业持有的现金、存放中央银行款项等总额|m_cashAdepositsCentralBank
|
||||||
|
贵金属:企业(金融)持有的黄金、白银等贵金属存货的成本|m_nobleMetal
|
||||||
|
存放同业和其它金融机构款项:企业(银行)存放于境内、境外银行和非银行金融机构的款项|m_depositsOtherFinancialInstitutions
|
||||||
|
短期投资|m_currentInvestment
|
||||||
|
买入返售金融资产|m_redemptoryMonetaryCapitalSale
|
||||||
|
应收代位追偿款净额|m_netAmountSubrogation
|
||||||
|
存出保证金|m_refundableDeposits
|
||||||
|
保户质押贷款净额|m_netAmountLoanPledged
|
||||||
|
定期存款|m_fixedTimeDeposit
|
||||||
|
长期债权投资净额|m_netLongtermDebtInvestments
|
||||||
|
长期投资|m_permanentInvestment
|
||||||
|
存出资本保证金|m_depositForcapitalRecognizance
|
||||||
|
在建工程净额|m_netBalConstructionProgress
|
||||||
|
独立账户资产|m_separateAccountAssets
|
||||||
|
代理业务资产|m_capitalInvicariousBussiness
|
||||||
|
其他资产|m_otherAssets
|
||||||
|
其中:同业及其他金融机构存放款项|m_depositsWithBanksOtherFinancialIns
|
||||||
|
应付赔付款|m_indemnityPayable
|
||||||
|
应付保单红利|m_policyDividendPayable
|
||||||
|
保户储金及投资款|m_guaranteeInvestmentFunds
|
||||||
|
预收保费|m_premiumsReceivedAdvance
|
||||||
|
保单负债|m_insuranceLiabilities
|
||||||
|
独立账户负债|m_liabilitiesIndependentAccounts
|
||||||
|
代理业务负债|m_liabilitiesVicariousBusiness
|
||||||
|
其他负债|m_otherLiablities
|
||||||
|
资本溢价|m_capitalPremium
|
||||||
|
保留溢利|m_petainedProfit
|
||||||
|
交易风险准备|m_provisionTransactionRisk
|
||||||
|
其他储备|m_otherReserves
|
9
src/xtquant/config/shareholder_new_1.ini
Normal file
9
src/xtquant/config/shareholder_new_1.ini
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
股东数|SHAREHOLDER
|
||||||
|
公告日期|declareDate|declareDate
|
||||||
|
截止日期|endDate|endDate
|
||||||
|
股东总数|shareholder|shareholder
|
||||||
|
A股东户数|shareholderA|shareholderA
|
||||||
|
B股东户数|shareholderB|shareholderB
|
||||||
|
H股东户数|shareholderH|shareholderH
|
||||||
|
已流通股东户数|shareholderFloat|shareholderFloat
|
||||||
|
未流通股东户数|shareholderOther|shareholderOther
|
68
src/xtquant/config/shareincome_new_1.ini
Normal file
68
src/xtquant/config/shareincome_new_1.ini
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
利润表|ASHAREINCOME
|
||||||
|
披露日期|m_anntime|m_annTime
|
||||||
|
截止日期|m_timetag|m_endTime
|
||||||
|
营业收入|revenue_inc|m_revenueInc
|
||||||
|
已赚保费|earned_premium|m_earnedPremium
|
||||||
|
房地产销售收入|real_estate_sales_income|m_realEstateSalesIncome
|
||||||
|
营业总成本|total_operating_cost|m_totalOperatingCost
|
||||||
|
房地产销售成本|real_estate_sales_cost|m_realEstateSalesCost
|
||||||
|
研发费用|research_expenses|m_researchExpenses
|
||||||
|
退保金|surrender_value|m_surrenderValue
|
||||||
|
赔付支出净额|net_payments|m_netPayments
|
||||||
|
提取保险合同准备金净额|net_withdrawal_ins_con_res|m_netWithdrawalInsConRes
|
||||||
|
保单红利支出|policy_dividend_expenses|m_policyDividendExpenses
|
||||||
|
分保费用|reinsurance_cost|m_reinsuranceCost
|
||||||
|
公允价值变动收益|change_income_fair_value|m_changeIncomeFairvalue
|
||||||
|
期货损益|futures_loss|m_futuresLoss
|
||||||
|
托管收益|trust_income|m_trustIncome
|
||||||
|
补贴收入|subsidize_revenue|m_subsidizeRevenue
|
||||||
|
其他业务利润|other_business_profits|m_otherBusinessProfits
|
||||||
|
被合并方在合并前实现净利润|net_profit_excl_merged_int_inc|m_netProfitExclMergedIntInc
|
||||||
|
利息收入|int_inc|m_intInc
|
||||||
|
手续费及佣金收入|handling_chrg_comm_inc|m_handlingChrgCommInc
|
||||||
|
手续费及佣金支出|less_handling_chrg_comm_exp|m_lessHandlingChrgCommExp
|
||||||
|
其他业务成本|other_bus_cost|m_otherBusCost
|
||||||
|
汇兑收益|plus_net_gain_fx_trans|m_plusNetGainFxTrans
|
||||||
|
非流动资产处置收益|il_net_loss_disp_noncur_asset|m_ilNetLossDispNoncurAsset
|
||||||
|
所得税费用|inc_tax|m_incTax
|
||||||
|
未确认投资损失|unconfirmed_invest_loss|m_unconfirmedInvestLoss
|
||||||
|
归属于母公司所有者的净利润|net_profit_excl_min_int_inc|m_netProfitExclMinIntInc
|
||||||
|
利息支出|less_int_exp|m_lessIntExp
|
||||||
|
其他业务收入|other_bus_inc|m_otherBusInc
|
||||||
|
营业总收入|revenue|m_revenue
|
||||||
|
营业成本|total_expense|m_totalExpense
|
||||||
|
营业税金及附加|less_taxes_surcharges_ops|m_lessTaxesSurchargesOps
|
||||||
|
销售费用|sale_expense|m_saleExpense
|
||||||
|
管理费用|less_gerl_admin_exp|m_lessGerlAdminExp
|
||||||
|
财务费用|financial_expense|m_financialExpense
|
||||||
|
资产减值损失|less_impair_loss_assets|m_lessImpairLossAssets
|
||||||
|
投资收益|plus_net_invest_inc|m_plusNetInvestInc
|
||||||
|
联营企业和合营企业的投资收益|incl_inc_invest_assoc_jv_entp|m_inclIncInvestAssocJvEntp
|
||||||
|
营业利润|oper_profit|m_operProfit
|
||||||
|
营业外收入|plus_non_oper_rev|m_plusNonOperRev
|
||||||
|
营业外支出|less_non_oper_exp|m_lessNonOperExp
|
||||||
|
利润总额|tot_profit|m_totProfit
|
||||||
|
净利润|net_profit_incl_min_int_inc|m_netProfitInclMinIntInc
|
||||||
|
净利润(扣除非经常性损益后)|net_profit_incl_min_int_inc_after|m_netProfitInclMinIntIncAfter
|
||||||
|
少数股东损益|minority_int_inc|m_minorityIntInc
|
||||||
|
基本每股收益|s_fa_eps_basic|m_sFaEpsBasic
|
||||||
|
稀释每股收益|s_fa_eps_diluted|m_sFaEpsDiluted
|
||||||
|
综合收益总额|total_income|m_totalIncome
|
||||||
|
归属于少数股东的综合收益总额|total_income_minority|m_totalIncomeMinority
|
||||||
|
其他收益|other_compreh_inc|m_otherComprehInc
|
||||||
|
|
||||||
|
利息净收入|m_netinterestIncome
|
||||||
|
手续费及佣金净收入|m_netFeesCommissions
|
||||||
|
保险业务收入|m_insuranceBusiness
|
||||||
|
减:分出保费|m_separatePremium
|
||||||
|
减:提取未到期责任准备金|m_asideReservesUndueLiabilities
|
||||||
|
赔付支出|m_paymentsInsuranceClaims
|
||||||
|
减:摊回赔付支出|m_amortizedCompensationExpenses
|
||||||
|
提取保险责任准备金净额|m_netReserveInsuranceLiability
|
||||||
|
提取保险责任准备金|m_policyReserve
|
||||||
|
减:摊回保险责任准备金|m_amortizeInsuranceReserve
|
||||||
|
保险业务手续费及佣金支出|m_nsuranceFeesCommissionExpenses
|
||||||
|
业务及管理费|m_operationAdministrativeExpense
|
||||||
|
减:摊回分保费用|m_amortizedReinsuranceExpenditure
|
||||||
|
其中:非流动资产处置净损益|m_netProfitLossdisposalNonassets
|
||||||
|
影响净利润的其他项目|m_otherItemsAffectingNetProfit
|
25
src/xtquant/config/table2json.lua
Normal file
25
src/xtquant/config/table2json.lua
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
|
||||||
|
-- 将lua中的table转换为json
|
||||||
|
function table2json(t)
|
||||||
|
local function serialize(tbl)
|
||||||
|
local tmp = {}
|
||||||
|
for k, v in pairs(tbl) do
|
||||||
|
local k_type = type(k)
|
||||||
|
local v_type = type(v)
|
||||||
|
local key = (k_type == "string" and "\"" .. k .. "\":")
|
||||||
|
or (k_type == "number" and "")
|
||||||
|
local value = (v_type == "table" and serialize(v))
|
||||||
|
or (v_type == "boolean" and tostring(v))
|
||||||
|
or (v_type == "string" and "\"" .. v .. "\"")
|
||||||
|
or (v_type == "number" and v)
|
||||||
|
tmp[#tmp + 1] = key and value and tostring(key) .. tostring(value) or nil
|
||||||
|
end
|
||||||
|
if table.maxn(tbl) == 0 then
|
||||||
|
return "{" .. table.concat(tmp, ",") .. "}"
|
||||||
|
else
|
||||||
|
return "[" .. table.concat(tmp, ",") .. "]"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
assert(type(t) == "table")
|
||||||
|
return serialize(t)
|
||||||
|
end
|
10
src/xtquant/config/top10holder_new_1.ini
Normal file
10
src/xtquant/config/top10holder_new_1.ini
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
十大股东|十大流通股东|TOP10HOLDER|TOP10FLOWHOLDER
|
||||||
|
公告日期|declareDate|declareDate
|
||||||
|
截止日期|endDate|endDate
|
||||||
|
股东名称|name|name
|
||||||
|
股东类型|type|type
|
||||||
|
持股数量|quantity|quantity
|
||||||
|
变动原因|reason|reason
|
||||||
|
持股比例|ratio|ratio
|
||||||
|
股份性质|nature|nature
|
||||||
|
持股排名|rank|rank
|
370
src/xtquant/config/tradeTime.txt
Normal file
370
src/xtquant/config/tradeTime.txt
Normal file
@ -0,0 +1,370 @@
|
|||||||
|
// AG晚上21:00--02:30 白天9:00--10:15 10:30-11:30 13:30--15:00
|
||||||
|
// ZN晚上21:00--01:00 白天9:00--11:30 13:30--15:00
|
||||||
|
// SC晚上21:00--02:30 白天9:00--10:15 10:30-11:30 13:30--15:00
|
||||||
|
|
||||||
|
// 上期所 白银(AG),黄金(AU) 晚上21:00--02:30
|
||||||
|
// 上期所 锌(ZN),铜(CU),铅(PB),铝(AL),镍(NI),锡(SN),不锈钢(SS),氧化铝(AO) 晚上21:00--01:00
|
||||||
|
// 上期所 螺纹钢(RB),热轧卷板(HC),石油沥青(BU,BUY) 晚上21:00--23:00
|
||||||
|
// 上期所 天然橡胶(RU),燃料油(FU),纸浆(SP) 晚上21:00--23:00
|
||||||
|
|
||||||
|
// 大商所 棕榈油(P),豆油(Y) 晚上21:00--23:30
|
||||||
|
// 大商所 豆粕(M),豆油(Y),焦炭(J),黄大豆一号(A,AX,AY) 晚上21:00--23:00
|
||||||
|
// 黄大豆二号(B),焦煤(JM),铁矿石(I),聚乙烯(L),聚氯乙烯(V)晚上21:00--23:00
|
||||||
|
// 聚丙烯(PP),乙二醇(EG),玉米(C),玉米淀粉(CS) 晚上21:00--23:00
|
||||||
|
// 苯乙烯(EB),液化石油气(PG),粳米(RR) 晚上21:00--23:00
|
||||||
|
|
||||||
|
// 郑商所 白糖(SR,SRX,SRY),棉花(CF),菜粕(RM),甲醇(MA,ME),PTA(TA)晚上21:00--23:00
|
||||||
|
// 动力煤(ZC),菜籽油(OI),玻璃(FG),棉纱(CY) 晚上21:00--23:00
|
||||||
|
// 纯碱(SA),短纤(PF)烧碱(SH)二甲苯(PX) 晚上21:00--23:00
|
||||||
|
// 能源中心 原油(SC,SCW,SCX,SCY,SCZ) 晚上21:00--02:30
|
||||||
|
// 国际铜(BC) 晚上21:00--01:00
|
||||||
|
// 低硫燃料油(LU),20号胶(NR) 晚上21:00--23:00
|
||||||
|
|
||||||
|
// 1: 交易时间 2: 夜盘开闭盘时间
|
||||||
|
|
||||||
|
|
||||||
|
1|SF|AG|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
1|SF|AU|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
1|SF|AUX|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
1|SF|AUY|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
|
||||||
|
1|SF|ZN|465|1261-1440|1-60|541-615|631-690|811-900
|
||||||
|
1|SF|CU|465|1261-1440|1-60|541-615|631-690|811-900
|
||||||
|
1|SF|PB|465|1261-1440|1-60|541-615|631-690|811-900
|
||||||
|
1|SF|AL|465|1261-1440|1-60|541-615|631-690|811-900
|
||||||
|
|
||||||
|
1|SF|SN|465|1261-1440|1-60|541-615|631-690|811-900
|
||||||
|
1|SF|NI|465|1261-1440|1-60|541-615|631-690|811-900
|
||||||
|
1|SF|SS|465|1261-1440|1-60|541-615|631-690|811-900
|
||||||
|
1|SF|AO|465|1261-1440|1-60|541-615|631-690|811-900
|
||||||
|
|
||||||
|
1|SF|RB|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|SF|HC|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|SF|BU|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|SF|BUY|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|SF|RU|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|SF|FU|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|SF|SP|345|1261-1380|541-615|631-690|811-900
|
||||||
|
|
||||||
|
1|DF|J|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|P|345|1261-1380|541-615|631-690|811-900
|
||||||
|
|
||||||
|
1|DF|M|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|Y|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|A|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|AX|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|AY|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|B|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|JM|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|I|345|1261-1380|541-615|631-690|811-900
|
||||||
|
|
||||||
|
1|DF|L|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|V|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|PP|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|EG|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|C|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|CS|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|EB|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|PG|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|DF|RR|345|1261-1380|541-615|631-690|811-900
|
||||||
|
|
||||||
|
1|ZF|SR|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|SRX|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|SRY|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|CF|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|RM|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|MA|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|ME|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|TA|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|ZC|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|FG|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|OI|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|CY|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|SA|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|PF|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|SH|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|ZF|PX|345|1261-1380|541-615|631-690|811-900
|
||||||
|
|
||||||
|
1|YSWP|CN|976|540-955|1001-1440|1-120
|
||||||
|
1|YSWP|ID|976|540-955|1001-1440|1-120
|
||||||
|
1|YSWP|IN|976|540-955|1001-1440|1-120
|
||||||
|
1|YSWP|SG|976|540-955|1001-1440|1-120
|
||||||
|
1|YSWP|SPDCN|976|540-955|1001-1440|1-120
|
||||||
|
1|YSWP|SPDID|976|540-955|1001-1440|1-120
|
||||||
|
1|YSWP|SPDIN|976|540-955|1001-1440|1-120
|
||||||
|
1|YSWP|SPDSG|976|540-955|1001-1440|1-120
|
||||||
|
|
||||||
|
1|INE|LU|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|INE|NR|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|INE|SC|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
1|INE|SCW|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
1|INE|SCX|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
1|INE|SCY|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
1|INE|SCZ|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
1|INE|BC|465|1261-1440|1-60|541-615|631-690|811-900
|
||||||
|
|
||||||
|
2|SF|AG,AU,AUX,AUY|210000-235959|000000-023000
|
||||||
|
2|SF|ZN,CU,PB,AL,SN,NI,SS,AO|210000-235959|000000-010000
|
||||||
|
2|SF|RB,HC,BU,BUY,RU,FU,SP,BR|210000-230000
|
||||||
|
|
||||||
|
2|DF|A,AX,AY,B,M,Y,J,JM,I,L,V,PP,EG,C,CS,P,EB,PG,RR|210000-230000
|
||||||
|
|
||||||
|
2|ZF|SR,SRX,SRY,CF,RM,MA,ME,TA,ZC,FG,OI,CY,SA,PF,RI,SH,PX|210000-230000
|
||||||
|
|
||||||
|
2|YSWP|CN,ID,IN,SG,SPDCN,SPDID,SPDIN,SPDSG,CNINDEX|164000-235959|000000-020000
|
||||||
|
|
||||||
|
2|INE|LU,NR|210000-230000
|
||||||
|
2|INE|SC,SCW,SCX,SCY,SCZ|210000-235959|000000-023000
|
||||||
|
2|INE|BC|210000-235959|000000-010000
|
||||||
|
|
||||||
|
1|WP|NYMEX:NG|1381|360-1440|1-300
|
||||||
|
1|WP|NYMEX:CL|1381|360-1440|1-300
|
||||||
|
1|WP|NYMEX:BZ|1381|360-1440|1-300
|
||||||
|
1|WP|NYMEX:QG|1381|360-1440|1-300
|
||||||
|
1|WP|NYMEX:QM|1381|360-1440|1-300
|
||||||
|
1|WP|NYMEX:HO|1381|360-1440|1-300
|
||||||
|
1|WP|NYMEX:RB|1381|360-1440|1-300
|
||||||
|
1|WP|NYMEX:MCL|1381|360-1440|1-300
|
||||||
|
1|WP|NYMEX:PA|1381|360-1440|1-300
|
||||||
|
1|WP|NYMEX:PL|1381|360-1440|1-300
|
||||||
|
1|WP|CME:6L|1381|360-1440|1-300
|
||||||
|
1|WP|CME:GE|1381|360-1440|1-300
|
||||||
|
1|WP|CME:6A|1381|360-1440|1-300
|
||||||
|
1|WP|CME:6B|1381|360-1440|1-300
|
||||||
|
1|WP|CME:6C|1381|360-1440|1-300
|
||||||
|
1|WP|CME:6E|1381|360-1440|1-300
|
||||||
|
1|WP|CME:6J|1381|360-1440|1-300
|
||||||
|
1|WP|CME:6S|1381|360-1440|1-300
|
||||||
|
1|WP|CME:6Z|1381|360-1440|1-300
|
||||||
|
1|WP|CME:SIR|1381|360-1440|1-300
|
||||||
|
1|WP|CME:6M|1381|360-1440|1-300
|
||||||
|
1|WP|CME:NKD|1381|360-1440|1-300
|
||||||
|
1|WP|CME:ES|1381|360-1440|1-300
|
||||||
|
1|WP|CME:MIR|1381|360-1440|1-300
|
||||||
|
1|WP|CME:BTC|1381|360-1440|1-300
|
||||||
|
1|WP|CME:ETH|1381|360-1440|1-300
|
||||||
|
1|WP|CME:MBT|1381|360-1440|1-300
|
||||||
|
1|WP|CME:MET|1381|360-1440|1-300
|
||||||
|
1|WP|CME:NQ|1381|360-1440|1-300
|
||||||
|
1|WP|CME:RP|1381|360-1440|1-300
|
||||||
|
1|WP|CME:6N|1381|360-1440|1-300
|
||||||
|
1|WP|CME:PJY|1381|360-1440|1-300
|
||||||
|
1|WP|CME:RY|1381|360-1440|1-300
|
||||||
|
1|WP|CME:EMD|1381|360-1440|1-300
|
||||||
|
1|WP|CME:M2K|1381|360-1440|1-300
|
||||||
|
1|WP|CME:MES|1381|360-1440|1-300
|
||||||
|
1|WP|CME:MNQ|1381|360-1440|1-300
|
||||||
|
1|WP|CME:RTY|1381|360-1440|1-300
|
||||||
|
1|WP|CME:M6A|1381|360-1440|1-300
|
||||||
|
1|WP|CME:M6B|1381|360-1440|1-300
|
||||||
|
1|WP|CME:M6E|1381|360-1440|1-300
|
||||||
|
1|WP|CME:M6J|1381|360-1440|1-300
|
||||||
|
1|WP|CME:MCD|1381|360-1440|1-300
|
||||||
|
1|WP|CME:MSF|1381|360-1440|1-300
|
||||||
|
1|WP|CME:RU|1381|360-1440|1-300
|
||||||
|
1|WP|CME:HE|276|1290-1440|1-125
|
||||||
|
1|WP|CME:GF|276|1290-1440|1-125
|
||||||
|
1|WP|CME:LE|276|1290-1440|1-125
|
||||||
|
1|WP|HKFE:MCS|1066|1155-1440|1-180|511-1110
|
||||||
|
1|WP|HKFE:CUS|1066|1155-1440|1-180|511-1110
|
||||||
|
1|WP|HKFE:CEU|1066|1155-1440|1-180|511-1110
|
||||||
|
1|WP|HKFE:CJP|1066|1155-1440|1-180|511-1110
|
||||||
|
1|WP|HKFE:CAU|1066|1155-1440|1-180|511-1110
|
||||||
|
1|WP|HKFE:UCN|1066|1155-1440|1-180|511-1110
|
||||||
|
1|WP|HKFE:HHI|961|1035-1440|1-180|556-720|781-990
|
||||||
|
1|WP|HKFE:MCH|961|1035-1440|1-180|556-720|781-990
|
||||||
|
1|WP|HKFE:HSI|961|1035-1440|1-180|556-720|781-990
|
||||||
|
1|WP|HKFE:MHI|961|1035-1440|1-180|556-720|781-990
|
||||||
|
1|WP|HKFE:HTI|961|1035-1440|1-180|556-720|781-990
|
||||||
|
1|WP|HKFE:CHI|1036|1035-1440|1-180|541-990
|
||||||
|
1|WP|HKFE:MCA|1036|1035-1440|1-180|541-990
|
||||||
|
1|WP|HKFE:MPS|1036|1035-1440|1-180|541-990
|
||||||
|
1|WP|HKFE:MVI|1036|1035-1440|1-180|541-990
|
||||||
|
1|WP|HKFE:MEI|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MIA|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MMA|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MDI|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MTD|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:CHN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:EMN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MPN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MVN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MSN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:EAN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MDN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MIN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MJU|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MMN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MTN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MXJ|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MAN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MCN|1036|1155-1440|1-180|541-1110
|
||||||
|
1|WP|HKFE:MTW|1051|870-1440|1-180|526-825
|
||||||
|
1|WP|HKFE:MWN|1051|1155-1440|1-180|526-1110
|
||||||
|
1|WP|HKFE:GDU|1066|1035-1440|1-180|511-990
|
||||||
|
1|WP|HKFE:FEM|1036|1035-1440|1-180|541-990
|
||||||
|
1|WP|HKFE:LUA|1036|1035-1440|1-180|541-990
|
||||||
|
1|WP|HKFE:LUC|1036|1035-1440|1-180|541-990
|
||||||
|
1|WP|HKFE:LUN|1036|1035-1440|1-180|541-990
|
||||||
|
1|WP|HKFE:LUP|1036|1035-1440|1-180|541-990
|
||||||
|
1|WP|HKFE:LUS|1036|1035-1440|1-180|541-990
|
||||||
|
1|WP|HKFE:LUZ|1036|1035-1440|1-180|541-990
|
||||||
|
1|WP|COMEX:ALI|1381|360-1440|1-300
|
||||||
|
1|WP|COMEX:HG|1381|360-1440|1-300
|
||||||
|
1|WP|COMEX:MHG|1381|360-1440|1-300
|
||||||
|
1|WP|COMEX:QC|1381|360-1440|1-300
|
||||||
|
1|WP|COMEX:GC|1381|360-1440|1-300
|
||||||
|
1|WP|COMEX:SI|1381|360-1440|1-300
|
||||||
|
1|WP|COMEX:MGC|1381|360-1440|1-300
|
||||||
|
1|WP|COMEX:QO|1381|360-1440|1-300
|
||||||
|
1|WP|COMEX:QI|1381|360-1440|1-300
|
||||||
|
1|WP|COMEX:SGU|1381|360-1440|1-300
|
||||||
|
1|WP|COMEX:SIL|1381|360-1440|1-300
|
||||||
|
1|WP|ICE.IFEU:T|1321|480-1440|1-360
|
||||||
|
1|WP|ICE.IFEU:B|1321|480-1440|1-360
|
||||||
|
1|WP|ICE.IFEU:G|1321|480-1440|1-360
|
||||||
|
1|WP|CBOT:ZL|1056|481-1245|1290-1440|1-140
|
||||||
|
1|WP|CBOT:ZM|1056|481-1245|1290-1440|1-140
|
||||||
|
1|WP|CBOT:XK|1056|481-1245|1290-1440|1-140
|
||||||
|
1|WP|CBOT:ZS|1056|481-1245|1290-1440|1-140
|
||||||
|
1|WP|CBOT:XC|1056|481-1245|1290-1440|1-140
|
||||||
|
1|WP|CBOT:ZC|1056|481-1245|1290-1440|1-140
|
||||||
|
1|WP|CBOT:KE|1056|481-1245|1290-1440|1-140
|
||||||
|
1|WP|CBOT:XW|1056|481-1245|1290-1440|1-140
|
||||||
|
1|WP|CBOT:ZW|1056|481-1245|1290-1440|1-140
|
||||||
|
1|WP|CBOT:ZO|1056|481-1245|1290-1440|1-140
|
||||||
|
1|WP|CBOT:ZR|1056|481-1245|1290-1440|1-140
|
||||||
|
1|WP|CBOT:MYM|1381|360-1440|1-300
|
||||||
|
1|WP|CBOT:YM|1381|360-1440|1-300
|
||||||
|
1|WP|CBOT:TN|1381|360-1440|1-300
|
||||||
|
1|WP|CBOT:UB|1381|360-1440|1-300
|
||||||
|
1|WP|CBOT:ZB|1381|360-1440|1-300
|
||||||
|
1|WP|CBOT:ZF|1381|360-1440|1-300
|
||||||
|
1|WP|CBOT:ZN|1381|360-1440|1-300
|
||||||
|
1|WP|CBOT:ZT|1381|360-1440|1-300
|
||||||
|
1|WP|CBOT:10Y|1381|360-1440|1-300
|
||||||
|
1|WP|CBOT:2YY|1381|360-1440|1-300
|
||||||
|
1|WP|CBOT:30Y|1381|360-1440|1-300
|
||||||
|
1|WP|CBOT:5YY|1381|360-1440|1-300
|
||||||
|
1|WP|SGX:FEF|1296|1215-1440|1-315|446-1200
|
||||||
|
1|WP|SGX:NK|1276|895-1440|1-315|451-865
|
||||||
|
1|WP|SGX:UC|1291|1095-1440|1-315|446-1075
|
||||||
|
1|WP|SGX:IU|1291|1190-1440|1-315|446-1170
|
||||||
|
1|WP|SGX:KU|1291|1190-1440|1-315|446-1170
|
||||||
|
1|WP|SGX:TF|605|476-1080
|
||||||
|
1|WP|SGX:US|1291|1190-1440|1-315|446-1170
|
||||||
|
1|WP|SGX:FCH|1186|1020-1440|1-315|541-990
|
||||||
|
1|WP|SGX:IN|1186|1120-1440|1-315|541-1090
|
||||||
|
1|WP|SGX:CN|1186|1020-1440|1-315|541-990
|
||||||
|
1|WP|SGX:SGP|1246|1070-1440|1-345|511-1040
|
||||||
|
1|WP|ICE.IFUS:MME|1321|480-1440|1-360
|
||||||
|
1|WP|ICE.IFUS:OJ|361|1200-1440|1-120
|
||||||
|
1|WP|ICE.IFUS:CT|1041|540-1440|1-140
|
||||||
|
1|WP|ICE.IFUS:KC|556|975-1440|1-90
|
||||||
|
1|WP|ICE.IFUS:MFS|1321|480-1440|1-360
|
||||||
|
1|WP|ICE.IFUS:SB|571|930-1440|1-60
|
||||||
|
1|WP|ICE.IFUS:RS|1101|480-1440|1-140
|
||||||
|
1|WP|ICE.IFUS:SF|241|1260-1440|1-60
|
||||||
|
1|WP|ICE.IFUS:CC|526|1005-1440|1-90
|
||||||
|
1|WP|ICE.IFUS:DX|1261|480-1440|1-300
|
||||||
|
1|WP|ICE.IFUS:FNG|1321|480-1440|1-360
|
||||||
|
1|WP|ICE.IFUS:QA|1261|480-1440|1-300
|
||||||
|
1|WP|EUREX:DAX|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:DXM|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:ESX|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:MHK|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:MIN|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:MTH|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:SMI|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:VS|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:BON|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:BTP|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:CRQ|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:ESB|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:GBL|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:GBM|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:GBS|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:GBX|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:OAT|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:STX|1186|495-1440|1-240
|
||||||
|
1|WP|EUREX:TDX|1186|495-1440|1-240
|
||||||
|
1|WP|ICE.IFLL:I|1201|480-1440|1-240
|
||||||
|
1|WP|ICE.IFLL:Z|1201|480-1440|1-240
|
||||||
|
1|WP|ICE.IFLL:R|601|900-1440|1-60
|
||||||
|
1|WP|ICE.IFLX:C|445|991-1435
|
||||||
|
1|WP|ICE.IFLX:RC|511|960-1440|1-30
|
||||||
|
1|WP|ICE.IFLX:W|556|945-1440|1-60
|
||||||
|
1|WP|BAN:BTC|1439|1-1439
|
||||||
|
1|WP|BAN:ETH|1439|1-1439
|
||||||
|
2|WP|NG,CL,BZ,QG,QM,HO,RB,MCL,PA,PL|060000-235959|000000-050000
|
||||||
|
2|WP|6L,GE,6A,6B,6C,6E,6J,6S,6Z,SIR,6M,NKD,ES,MIR,BTC,ETH,MBT,MET,NQ,RP,6N,PJY,RY,EMD,M2K,MES,MNQ,RTY,M6A,M6B,M6E,M6J,MCD,MSF,RU|060000-235959|000000-050000
|
||||||
|
2|WP|HE,GF,LE|213000-235959|000000-020500
|
||||||
|
2|WP|MCS,CUS,CEU,CJP,CAU,UCN,MEI,MIA,MMA,MDI,MTD,CHN,EMN,MPN,MVN,MSN,EAN,MDN,MIN,MJU,MMN,MTN,MXJ,MAN,MCN,MWN|191500-235959|000000-030000
|
||||||
|
2|WP|HHI,MCH,HSI,MHI,HTI,CHI,MCA,MPS,MVI,GDU,FEM,LUA,LUC,LUN,LUP,LUS,LUZ|171500-235959|000000-030000
|
||||||
|
2|WP|MTW|143000-235959|000000-030000
|
||||||
|
2|WP|ALI,HG,MHG,QC,GC,SI,MGC,QO,QI,SGU,SIL|060000-235959|000000-050000
|
||||||
|
2|WP|T,B,G|080000-235959|000000-060000
|
||||||
|
2|WP|ZL,ZM,XK,ZS,XC,ZC,KE,XW,ZW,ZO,ZR|080000-235959|000000-022000
|
||||||
|
2|WP|MYM,YM,TN,UB,ZB,ZF,ZN,ZT,10Y,2YY,30Y,5YY|060000-235959|000000-050000
|
||||||
|
2|WP|FEF|201500-235959|000000-051500
|
||||||
|
2|WP|NK|145500-235959|000000-051500
|
||||||
|
2|WP|UC|181500-235959|000000-051500
|
||||||
|
2|WP|IU,KU,US|195000-235959|000000-051500
|
||||||
|
2|WP|FCH,CN|170000-235959|000000-051500
|
||||||
|
2|WP|IN|184000-235959|000000-051500
|
||||||
|
2|WP|SGP|175000-235959|000000-054500
|
||||||
|
2|WP|MME,MFS,FNG|080000-235959|000000-060000
|
||||||
|
2|WP|OJ|200000-235959|000000-020000
|
||||||
|
2|WP|CT|090000-235959|000000-022000
|
||||||
|
2|WP|KC|161500-235959|000000-013000
|
||||||
|
2|WP|SB|153000-235959|000000-010000
|
||||||
|
2|WP|RS|080000-235959|000000-022000
|
||||||
|
2|WP|SF|210000-235959|000000-010000
|
||||||
|
2|WP|CC|164500-235959|000000-013000
|
||||||
|
2|WP|DX,QA|080000-235959|000000-050000
|
||||||
|
2|WP|DAX,DXM,ESX,MHK,MIN,MTH,SMI,VS,BON,BTP,CRQ,ESB,GBL,GBM,GBS,GBX,OAT,STX,TDX|081500-235959|000000-040000
|
||||||
|
2|WP|I,Z|080000-235959|000000-040000
|
||||||
|
2|WP|R|150000-235959|000000-010000
|
||||||
|
2|WP|RC|160000-235959|000000-003000
|
||||||
|
2|WP|W|154500-235959|000000-010000
|
||||||
|
|
||||||
|
1|BKZS|290000|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
1|BKZS|290006|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
1|BKZS|290020|555|1261-1440|1-150|541-615|631-690|811-900
|
||||||
|
1|BKZS|290002|465|1261-1440|1-60|541-615|631-690|811-900
|
||||||
|
1|BKZS|290001|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290003|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290004|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290005|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290007|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290008|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290009|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290010|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290012|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290013|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290014|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290015|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290016|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290017|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290018|345|1261-1380|541-615|631-690|811-900
|
||||||
|
1|BKZS|290019|345|1261-1380|541-615|631-690|811-900
|
||||||
|
2|BKZS|290000|210000-235959|000000-023000
|
||||||
|
2|BKZS|290006|210000-235959|000000-023000
|
||||||
|
2|BKZS|290020|210000-235959|000000-023000
|
||||||
|
2|BKZS|290001|210000-230000
|
||||||
|
2|BKZS|290003|210000-230000
|
||||||
|
2|BKZS|290004|210000-230000
|
||||||
|
2|BKZS|290005|210000-230000
|
||||||
|
2|BKZS|290007|210000-230000
|
||||||
|
2|BKZS|290008|210000-230000
|
||||||
|
2|BKZS|290009|210000-230000
|
||||||
|
2|BKZS|290010|210000-230000
|
||||||
|
2|BKZS|290012|210000-230000
|
||||||
|
2|BKZS|290013|210000-230000
|
||||||
|
2|BKZS|290014|210000-230000
|
||||||
|
2|BKZS|290015|210000-230000
|
||||||
|
2|BKZS|290016|210000-230000
|
||||||
|
2|BKZS|290017|210000-230000
|
||||||
|
2|BKZS|290018|210000-230000
|
||||||
|
2|BKZS|290019|210000-230000
|
||||||
|
2|BKZS|290002|210000-235959|000000-010000
|
0
src/xtquant/config/user/root2/lua/ConstFunc.lua
Normal file
0
src/xtquant/config/user/root2/lua/ConstFunc.lua
Normal file
117
src/xtquant/config/user/root2/lua/FunIndex.lua
Normal file
117
src/xtquant/config/user/root2/lua/FunIndex.lua
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
------------------------------------------------------------
|
||||||
|
-- 指标函数
|
||||||
|
-- 由脚本引擎预先定义,如果有性能问题,可用C++改写
|
||||||
|
-- @author zhangjin
|
||||||
|
-- @since 2012-10-8
|
||||||
|
-----------------------------------------------------------
|
||||||
|
function c_sar()
|
||||||
|
local cache = FormulaCacheContainer()
|
||||||
|
function sar(N, S, M, timetag, __formula)
|
||||||
|
return sar_c(cache, N, S, M, timetag, __formula)
|
||||||
|
end
|
||||||
|
return sar
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_sarturn()
|
||||||
|
local cache = FormulaCacheContainer()
|
||||||
|
function sarturn(N, S, M, timetag, __formula)
|
||||||
|
return sarturn_c(cache, N, S, M, timetag, __formula)
|
||||||
|
end
|
||||||
|
return sarturn
|
||||||
|
end
|
||||||
|
|
||||||
|
function callstock2()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(stockcode, metaID, fieldID, period, offset, timetag, formula)
|
||||||
|
return callstock2_c(container, stockcode, metaID, fieldID, period, offset, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function getstocklist()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(sector, timetag, formula)
|
||||||
|
return getstocklist_c(container, sector, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function getinitgroup()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(timetag, formula)
|
||||||
|
return getinitgroup_c(container, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getspotprodgroup()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(productcode, timetag, formula)
|
||||||
|
return getspotprodgroup_c(container, productcode, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getspotprodinst()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(productcode, stockindex, timetag, formula)
|
||||||
|
return getspotprodinst_c(container, productcode, stockindex, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getwarehousereceipt()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(productcode, warehousecode, timetag, formula)
|
||||||
|
return getwarehousereceipt_c(container, productcode, warehousecode, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getwarehousename()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(productcode, warehouseindex, timetag, formula)
|
||||||
|
return getwarehousename_c(container, productcode, warehouseindex, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getfutureseats()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(stockcode, field, rank, timetag, formula)
|
||||||
|
return getfutureseats_c(container, stockcode, field, rank, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getfutureseatsname()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(stockcode, field, rank, timetag, formula)
|
||||||
|
return getfutureseatsname_c(container, stockcode, field, rank, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_findfutureseats()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(stockcode, field, member, timetag, formula)
|
||||||
|
return findfutureseats_c(container, stockcode, field, member, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_stocktype()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(stockcode, timetag, formula)
|
||||||
|
return stocktype_c(container, stockcode, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_convindex()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(stockcode, type, timetag, formula)
|
||||||
|
return convindex_c(container, stockcode, type, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
238
src/xtquant/config/user/root2/lua/FunLogic.lua
Normal file
238
src/xtquant/config/user/root2/lua/FunLogic.lua
Normal file
@ -0,0 +1,238 @@
|
|||||||
|
------------------------------------------------------------
|
||||||
|
-- 逻辑函数
|
||||||
|
-- 由脚本引擎预先定义,如果有性能问题,可用C++改写
|
||||||
|
-- @author zhangjin
|
||||||
|
-- @since 2012-9-18
|
||||||
|
-----------------------------------------------------------
|
||||||
|
function c_any()
|
||||||
|
local count = 0
|
||||||
|
local history = FormulaDataCacheBool(1)
|
||||||
|
local ret
|
||||||
|
function any(condition, N, timetag, type)
|
||||||
|
count, ret = all_c(not(condition), N, count, timetag, type, history)
|
||||||
|
return not(ret)
|
||||||
|
end
|
||||||
|
return any
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_exist()
|
||||||
|
return c_any()
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_all()
|
||||||
|
local count = 0
|
||||||
|
local history = FormulaDataCacheBool(1)
|
||||||
|
local ret
|
||||||
|
function all(condition, N, timetag, type)
|
||||||
|
count, ret = all_c(condition, N, count, timetag, type, history)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return all
|
||||||
|
end
|
||||||
|
|
||||||
|
--条件跟随函数
|
||||||
|
function c_valuewhen()
|
||||||
|
local lastValue = 0 / 0
|
||||||
|
local ret
|
||||||
|
local first = true
|
||||||
|
local lastTimetag = 0
|
||||||
|
function valuewhen(condition, value, timetag)
|
||||||
|
if condition then
|
||||||
|
ret = value
|
||||||
|
else
|
||||||
|
ret = lastValue
|
||||||
|
end
|
||||||
|
if (lastTimetag ~= timetag) then
|
||||||
|
lastValue = ret
|
||||||
|
lastTimetag = timetag
|
||||||
|
end
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return valuewhen
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_cross()
|
||||||
|
local lastV1 = 0
|
||||||
|
local lastV2 = -1
|
||||||
|
local lastTime = -1
|
||||||
|
local t1 = 0
|
||||||
|
local t2 = -1
|
||||||
|
local count = 0
|
||||||
|
function cross(v1, v2, timetag)
|
||||||
|
if timetag ~= lastTime then
|
||||||
|
lastTime = timetag
|
||||||
|
count = 0
|
||||||
|
t1 = lastV1
|
||||||
|
t2 = lastV2
|
||||||
|
end
|
||||||
|
count = count + 1
|
||||||
|
if count > 1 then
|
||||||
|
lastV1 = t1
|
||||||
|
lastV2 = t2
|
||||||
|
end
|
||||||
|
local ret = cross_c(v1, v2, lastV1, lastV2)
|
||||||
|
lastV1 = v1
|
||||||
|
lastV2 = v2
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return cross
|
||||||
|
end
|
||||||
|
|
||||||
|
function iff(condition, v1, v2)
|
||||||
|
--print(type(v1),type(v2))
|
||||||
|
if condition then
|
||||||
|
return v1;
|
||||||
|
else
|
||||||
|
return v2;
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function ifelse(condition, v1, v2)
|
||||||
|
if condition then
|
||||||
|
return v1;
|
||||||
|
else
|
||||||
|
return v2;
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function ifn(X, A, B)
|
||||||
|
if X then
|
||||||
|
return B
|
||||||
|
else
|
||||||
|
return A
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function valid(value)
|
||||||
|
return isValid(value)
|
||||||
|
end
|
||||||
|
|
||||||
|
--todo: 当A, B, C中出现无效值时,金字塔返回无效值,该函数返回false
|
||||||
|
--function between(A, B, C)
|
||||||
|
--if (A - B >= 1e-6 and A - C <= 1e-6) or (A - B <= 1e-6 and A - C >= 1e-6) then
|
||||||
|
--return true
|
||||||
|
--else
|
||||||
|
--return false
|
||||||
|
--end
|
||||||
|
--end
|
||||||
|
|
||||||
|
--todo 这三个函数是隐藏的行情函数
|
||||||
|
--function isdown(timetag, __formula)
|
||||||
|
--if close(timetag, __formula) - open(timetag, __formula) < 1e-6 then
|
||||||
|
--return true
|
||||||
|
--else
|
||||||
|
--return false
|
||||||
|
--end
|
||||||
|
--end
|
||||||
|
--
|
||||||
|
--function isequal(timetag, __formula)
|
||||||
|
----if close(timetag, __formula) == open(timetag, __formula) then
|
||||||
|
--if math.fabs(close(timetag, __formula) - open(timetag, __formula)) < 1e-6 then
|
||||||
|
--return true
|
||||||
|
--else
|
||||||
|
--return false
|
||||||
|
--end
|
||||||
|
--end
|
||||||
|
--
|
||||||
|
--function isup(timetag, __formula)
|
||||||
|
--if close(timetag, __formula) - open(timetag, __formula) > 1e-6 then
|
||||||
|
--return true
|
||||||
|
--else
|
||||||
|
--return false
|
||||||
|
--end
|
||||||
|
--end
|
||||||
|
|
||||||
|
function islastbar(timetag, __formula)
|
||||||
|
return timetag == __formula:getLastBar()
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_last()
|
||||||
|
local history = FormulaDataCacheBool(1)
|
||||||
|
local count = 0
|
||||||
|
local ret
|
||||||
|
function last(X, A, B, timetag)
|
||||||
|
count, ret = last_c(X, A, B, timetag, count, history)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return last
|
||||||
|
end
|
||||||
|
--[[
|
||||||
|
function c_longcross(A, B, N)
|
||||||
|
local historyA = FormulaDataCacheDouble(0)
|
||||||
|
local historyB = FormulaDataCacheDouble(0)
|
||||||
|
local lessCount = 0
|
||||||
|
function longcross(A, B, N, type)
|
||||||
|
lessCount, ret = longcross_c(A, B, N, type, historyA, historyB, lessCount)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return longcross
|
||||||
|
end
|
||||||
|
]]--
|
||||||
|
|
||||||
|
function c_longcross(A, B, N) --jch
|
||||||
|
local lessCount = 0
|
||||||
|
local tmplessCount = 0
|
||||||
|
local lastTimetag = -1
|
||||||
|
function longcross(A, B, N, timetag)
|
||||||
|
local ret = false
|
||||||
|
local condition = A < B
|
||||||
|
if lastTimetag ~= timetag then
|
||||||
|
tmplessCount = lessCount
|
||||||
|
lastTimetag = timetag
|
||||||
|
end
|
||||||
|
if condition then
|
||||||
|
lessCount = tmplessCount + 1
|
||||||
|
else
|
||||||
|
if lessCount >= N then
|
||||||
|
ret = true
|
||||||
|
end
|
||||||
|
lessCount = 0
|
||||||
|
end
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return longcross
|
||||||
|
end
|
||||||
|
|
||||||
|
function range(A, B, C)
|
||||||
|
if A - B > 1e-6 and A - C< 1e-6 then
|
||||||
|
return true
|
||||||
|
else
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_orderdirection()
|
||||||
|
local cache = LastValueCache()
|
||||||
|
function orderdirection(timetag,formula)
|
||||||
|
return orderdirection_c(timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return orderdirection
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_isbuyorder()
|
||||||
|
local cache = LastValueCache()
|
||||||
|
function orderdirection(timetag,formula)
|
||||||
|
local val = orderdirection_c(timetag, formula, cache)
|
||||||
|
if val == 1 then
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
return 0
|
||||||
|
end
|
||||||
|
return 0
|
||||||
|
end
|
||||||
|
return orderdirection
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_issellorder()
|
||||||
|
local cache = LastValueCache()
|
||||||
|
function issellorder(timetag,formula)
|
||||||
|
local val = orderdirection_c(timetag, formula, cache)
|
||||||
|
if val == -1 then
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
return 0
|
||||||
|
end
|
||||||
|
return 0
|
||||||
|
end
|
||||||
|
return issellorder
|
||||||
|
end
|
24
src/xtquant/config/user/root2/lua/FunMath.lua
Normal file
24
src/xtquant/config/user/root2/lua/FunMath.lua
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
------------------------------------------------------------
|
||||||
|
-- 数学函数
|
||||||
|
-- 由脚本引擎预先定义,如果有性能问题,可用C++改写
|
||||||
|
-- @author jiangchanghao
|
||||||
|
-- @since 2012-9-18
|
||||||
|
-----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
function sgn(val)
|
||||||
|
if (type(val) == "boolean")
|
||||||
|
then if (val)
|
||||||
|
then return 1
|
||||||
|
else return 0
|
||||||
|
end
|
||||||
|
else if (val > 0)
|
||||||
|
then return 1
|
||||||
|
else if (val == 0)
|
||||||
|
then return 0
|
||||||
|
else return -1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return -1
|
||||||
|
end
|
831
src/xtquant/config/user/root2/lua/FunOther.lua
Normal file
831
src/xtquant/config/user/root2/lua/FunOther.lua
Normal file
@ -0,0 +1,831 @@
|
|||||||
|
------------------------------------------------------------
|
||||||
|
-- 指标函数--与通达信相关
|
||||||
|
-- 由脚本引擎预先定义,如果有性能问题,可用C++改写
|
||||||
|
-- @author
|
||||||
|
-- @since 2017-3-1
|
||||||
|
-----------------------------------------------------------
|
||||||
|
function c_const()
|
||||||
|
local ret
|
||||||
|
function const(X)
|
||||||
|
if not(X) then
|
||||||
|
ret = 0
|
||||||
|
else
|
||||||
|
ret = X
|
||||||
|
end
|
||||||
|
return ret;
|
||||||
|
end
|
||||||
|
return const
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_inblock()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function inblock(sector, timetag, __formula)
|
||||||
|
return inblock_c(sector, container, timetag, __formula)
|
||||||
|
end
|
||||||
|
return inblock
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_inblock2()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function inblock(sector, stockcode, timetag, __formula)
|
||||||
|
return inblock2_c(sector, stockcode, container, timetag, __formula)
|
||||||
|
end
|
||||||
|
return inblock
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_sellvol()
|
||||||
|
function sellvol(timetag, __formula)
|
||||||
|
return buysellvol_c(2, timetag, __formula)
|
||||||
|
end
|
||||||
|
return sellvol
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_buyvol()
|
||||||
|
function buyvol(timetag, __formula)
|
||||||
|
return buysellvol_c(1, timetag, __formula)
|
||||||
|
end
|
||||||
|
return buyvol
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_upnday()
|
||||||
|
local history = FormulaDataCacheDouble(0,0)
|
||||||
|
local his=FormulaDataCacheDouble(0,0)
|
||||||
|
local turn=1
|
||||||
|
function upnday(X, M, timetag, style)
|
||||||
|
turn =1
|
||||||
|
for i=0,M-1,1 do
|
||||||
|
if back(X,i, timetag,history,style) < back(X,i+1, timetag,his,style) then
|
||||||
|
turn = 0
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return turn
|
||||||
|
end
|
||||||
|
return upnday
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_downnday()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0)
|
||||||
|
local his=FormulaDataCacheDouble(0, 0)
|
||||||
|
function downnday(X, M, timetag, style)
|
||||||
|
turn =1
|
||||||
|
for i=0,M-1,1 do
|
||||||
|
if back(X,i, timetag,history,style) > back(X,i+1, timetag,his,style) then
|
||||||
|
turn = 0
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return turn
|
||||||
|
end
|
||||||
|
return downnday
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_nday()
|
||||||
|
local history = FormulaDataCacheDouble(0,0)
|
||||||
|
local turn=1
|
||||||
|
function nday(X, M, timetag, style)
|
||||||
|
turn =1
|
||||||
|
for i=0,M-1,1 do
|
||||||
|
if back(X,i, timetag,history,style)==0 then
|
||||||
|
turn = 0
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return turn
|
||||||
|
end
|
||||||
|
return nday
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_turn()
|
||||||
|
local cache = TurnDataCache();
|
||||||
|
function turn(value, N, timetag, formula)
|
||||||
|
return turn_c(value,N,timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return turn
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_transcationstatic()
|
||||||
|
local cache = TransactionCache();
|
||||||
|
function transcationstatic(stockcode, type, timetag, formula)
|
||||||
|
return transcationstatic_c(stockcode, type, timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return transcationstatic
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_transcationstaticl1()
|
||||||
|
local cache = TransactionCache();
|
||||||
|
function transcationstaticl1(stockcode, type, timetag, formula)
|
||||||
|
return transcationstaticl1_c(stockcode, type, timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return transcationstaticl1
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_get_cb_convert_price()
|
||||||
|
local cache = CBConvertPriceCache();
|
||||||
|
function getCbConvertPrice(bondCode,timetag, formula)
|
||||||
|
return get_cb_convert_price_c(bondCode,timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return getCbConvertPrice
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
function c_external_data()
|
||||||
|
local cache = ExternalCache();
|
||||||
|
function external_data(data_name,field,period,N,timetag, formula)
|
||||||
|
return external_data_c(data_name,field, period,N,timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return external_data
|
||||||
|
end
|
||||||
|
|
||||||
|
local __pydatacache = {}
|
||||||
|
function c_callpython()
|
||||||
|
local first = true
|
||||||
|
function callpython(script, period, stockcode, timetag, formula)
|
||||||
|
local _dckey = string.sub(script, 1, string.find(script, '%.'))..period..'.'..stockcode
|
||||||
|
local cache = __pydatacache[_dckey]
|
||||||
|
if not cache then
|
||||||
|
__pydatacache[_dckey] = DataCache()
|
||||||
|
cache = __pydatacache[_dckey]
|
||||||
|
else
|
||||||
|
first = false
|
||||||
|
end
|
||||||
|
local ret = callpython_c(script, period , stockcode, timetag, formula, first, cache)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return callpython
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getfindata()
|
||||||
|
local cache = FinDataCache()
|
||||||
|
function getfindata(value1, value2, session,timetag, formula)
|
||||||
|
return getfindata_c(value1,value2,session,timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return getfindata
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getfindatabyperiod()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function getfindatabyperiod(tablename, colname, year, period, announce, timetag, formula)
|
||||||
|
return getfindatabyperiod_c(container, tablename, colname, year, period, announce, timetag, formula)
|
||||||
|
end
|
||||||
|
return getfindatabyperiod
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getfindatayear()
|
||||||
|
local cache = FormulaDataCacheDouble(0,0)
|
||||||
|
function getfindatayear(value1, value2, timetag, formula)
|
||||||
|
return getfindatayear_c(value1,value2,timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return getfindatayear
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_get_longhubang()
|
||||||
|
local cache = LonghubangDataCache()
|
||||||
|
--value1:filed value2:direction value3: rank
|
||||||
|
function getlonghubang(value1, value2, value3, timetag,formula)
|
||||||
|
return get_longhubang_c(value1, value2, value3, timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return getlonghubang
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_get_holderNumber()
|
||||||
|
local cache = HolderNumberCache()
|
||||||
|
--value:filed
|
||||||
|
function getholdernum(value,timetag,formula)
|
||||||
|
return get_holder_num_c(value,timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return getholdernum
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
function c_get_top10shareholder()
|
||||||
|
local cache = Top10shareholderCache()
|
||||||
|
--value1: type ,value2 : filed value3 rank
|
||||||
|
function gettop10shareholder(value1, value2, value3, timetag,formula)
|
||||||
|
return get_top10shareholder_c(value1, value2, value3, timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return gettop10shareholder
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_get_top10shareholderbyperiod()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function gettop10shareholderbyperiod(tablename, colname, rank, year, period, announce, type, timetag,formula)
|
||||||
|
return get_top10shareholderbyperiod_c(container, tablename, colname, rank, year, period, announce, type, timetag, formula)
|
||||||
|
end
|
||||||
|
return gettop10shareholderbyperiod
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_gethismaincontract()
|
||||||
|
local cache = GetHisMainContractCache()
|
||||||
|
function gethismaincontract(value1, timetag, formula)
|
||||||
|
return get_his_main_contract_c(value1, timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return gethismaincontract
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
function c_getrealcontract()
|
||||||
|
local cache = GetHisMainContractCache()
|
||||||
|
function getrealcontract(value1, timetag, formula)
|
||||||
|
return get_real_contract_c(value1, timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return getrealcontract
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_maincontractchange()
|
||||||
|
local cache = GetHisMainContractCache()
|
||||||
|
function maincontractchange(value1,timetag,formula)
|
||||||
|
return main_contract_change_c(value1,timetag,formula,cache)
|
||||||
|
end
|
||||||
|
return maincontractchange
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_tickvoldistribution()
|
||||||
|
local volnum = 0
|
||||||
|
local N = 0
|
||||||
|
local cache = TickVolDataCache()
|
||||||
|
local ret = 0
|
||||||
|
local stayTimetag = 0
|
||||||
|
local ratio = 0
|
||||||
|
local isFirst = true
|
||||||
|
function tickvoldistribution(seconds, ratio, direction, timetag, __formula, style)
|
||||||
|
if timetag==0 then
|
||||||
|
midret = findoptimumvol(0, ratio, seconds, stayTimetag,__formula, cache, isFirst)
|
||||||
|
isFirst = false
|
||||||
|
end
|
||||||
|
|
||||||
|
if ratio<=0 or ratio >1 then
|
||||||
|
return -1
|
||||||
|
end
|
||||||
|
if (direction==2 and close(timetag, __formula)==bidprice(timetag, __formula)) then
|
||||||
|
volnum = vol(timetag,__formula)
|
||||||
|
else
|
||||||
|
if(direction==1 and close(timetag, __formula)==askprice(timetag, __formula)) then
|
||||||
|
volnum = vol(timetag,__formula)
|
||||||
|
else
|
||||||
|
if(direction == 0) then
|
||||||
|
volnum = vol(timetag,__formula)
|
||||||
|
else
|
||||||
|
volnum = 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
--用C++找最优解,提高速度
|
||||||
|
if volnum > 0 then
|
||||||
|
midret = findoptimumvol(volnum, ratio, seconds, stayTimetag,__formula, cache, isFirst)
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
if midret ~= -1 then
|
||||||
|
ret = midret
|
||||||
|
end
|
||||||
|
|
||||||
|
if volnum > 0 then
|
||||||
|
stayTimetag = stayTimetag + 1
|
||||||
|
end
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return tickvoldistribution
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_finance()
|
||||||
|
local cache = FinDataCache()
|
||||||
|
function finance(value,timetag, formula)
|
||||||
|
return finance_c(value,timetag,formula,cache)
|
||||||
|
end
|
||||||
|
return finance
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_buysellvols()
|
||||||
|
local cache = QuoterDataCache()
|
||||||
|
function buysellvols(value,timetag,formula)
|
||||||
|
return buysellvols_c(value, timetag, formula, cache)
|
||||||
|
end
|
||||||
|
return buysellvols
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_iopv()
|
||||||
|
local cache = QuoterDataCache()
|
||||||
|
function iopv(timetag,formula)
|
||||||
|
return iopv_c(cache, timetag, formula)
|
||||||
|
end
|
||||||
|
return iopv
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getopenamount()
|
||||||
|
local cache = QuoterDataCache()
|
||||||
|
function getopenamount(formula)
|
||||||
|
return getopenamount_c(cache,formula)
|
||||||
|
end
|
||||||
|
return getopenamount
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getopenvol()
|
||||||
|
local cache = QuoterDataCache()
|
||||||
|
function getopenvol(formula)
|
||||||
|
return getopenvol_c(cache,formula)
|
||||||
|
end
|
||||||
|
return getopenvol
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_blkname()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function blkname(formula)
|
||||||
|
return blkname_c(container, "申万一级行业板块", formula)
|
||||||
|
end
|
||||||
|
return blkname
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_findblock()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function findblock(folder, formula)
|
||||||
|
return blkname_c(container, folder, formula)
|
||||||
|
end
|
||||||
|
return findblock
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_findindex()
|
||||||
|
local indexTable = {}
|
||||||
|
function findindex(sector, stockcode, timetag, formula)
|
||||||
|
key = sector..stockcode
|
||||||
|
for k, v in pairs(indexTable) do
|
||||||
|
if k == key then
|
||||||
|
return v
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
index = findindex_c(sector, stockcode, timetag, formula)
|
||||||
|
indexTable[key] = index
|
||||||
|
return index
|
||||||
|
end
|
||||||
|
return findindex
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_switchindex()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function funcimpl(stockcode, suffix, timetag, formula)
|
||||||
|
return switchindex_c(stockcode, suffix, container, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_extdatablockrank()
|
||||||
|
local cache = ExtFormulaCache()
|
||||||
|
function extdatablockrank(name, stockcode, sector, timetag, formula)
|
||||||
|
return extdatablockrank_c(name, stockcode, sector, cache, timetag, formula)
|
||||||
|
end
|
||||||
|
return extdatablockrank
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_extdatablocksum()
|
||||||
|
local cache = ExtFormulaCache()
|
||||||
|
function extdatablocksum(name, sector, timetag, formula)
|
||||||
|
return extdatablocksum_c(name, sector, cache, timetag, formula)
|
||||||
|
end
|
||||||
|
return extdatablocksum
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_extdatablocksumrange()
|
||||||
|
local cache = ExtFormulaCache()
|
||||||
|
function funcimpl(name, sector, range, timetag, formula)
|
||||||
|
return extdatablocksumrange_c(name, sector, range, cache, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_extblockranktocode()
|
||||||
|
local cache = ExtFormulaCache()
|
||||||
|
function funcimpl(name, sector, rate, timetag, formula)
|
||||||
|
return extblockranktocode_c(name, sector, rate, cache, timetag, formula)
|
||||||
|
end
|
||||||
|
return funcimpl
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_blocksize()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function blocksize(sector, ...)
|
||||||
|
return blocksize_c(container, sector, ...)
|
||||||
|
end
|
||||||
|
return blocksize
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_stockbyblockrank()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function stockbyblockrank(sector, fieldID, rate, timetag, formula)
|
||||||
|
return stockbyblockrank_c(sector, fieldID, rate, container, timetag, formula)
|
||||||
|
end
|
||||||
|
return stockbyblockrank
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_blocksum()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function blocksum(sector, fieldID, timetag, formula)
|
||||||
|
return blocksum_c(sector, fieldID, container, timetag, formula)
|
||||||
|
end
|
||||||
|
return blocksum
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_blockrank()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(sector, stockcode, fieldID, timetag, formula)
|
||||||
|
return blockrank_c(sector, stockcode, fieldID, container, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_paramcombcalc()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function paramcombcalc(...)
|
||||||
|
local a = -1
|
||||||
|
local b = -1
|
||||||
|
local c = bvector()
|
||||||
|
for k,v in ipairs({...}) do
|
||||||
|
if k == 1 then
|
||||||
|
a = v
|
||||||
|
elseif k == 2 then
|
||||||
|
b = v
|
||||||
|
else
|
||||||
|
c:push_back(v)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return paramcombcalc_c(a, b, c, container)
|
||||||
|
end
|
||||||
|
return paramcombcalc
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getoptinfo()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(optcode, timetag, formula)
|
||||||
|
return getoptinfo_c(container, optcode, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getoptcodebyundl()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(undlCode, index, timetag, formula)
|
||||||
|
return getoptcodebyundl_c(container, undlCode, index, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getoptcode()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(optcode, side, price, contractType, timetag, formula)
|
||||||
|
return getoptcode_c(container, optcode, side, price, contractType, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getoptundlcode()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(optcode, timetag, formula)
|
||||||
|
return getoptundlcode_c(container, optcode, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getoptcodebyno()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(undlCode, side, contractType, no, day, contractType1, mode, period, timetag, formula)
|
||||||
|
local param = ivector()
|
||||||
|
param:push_back(contractType)
|
||||||
|
param:push_back(no)
|
||||||
|
param:push_back(day)
|
||||||
|
param:push_back(contractType1)
|
||||||
|
param:push_back(mode)
|
||||||
|
param:push_back(period)
|
||||||
|
param:push_back(1)
|
||||||
|
return getoptcodebyno_c(container, undlCode, side, param, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getoptcodebyno2()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(undlCode, side, contractType, no, day, contractType1, mode, period, timetag, formula)
|
||||||
|
local param = ivector()
|
||||||
|
param:push_back(contractType)
|
||||||
|
param:push_back(no)
|
||||||
|
param:push_back(day)
|
||||||
|
param:push_back(contractType1)
|
||||||
|
param:push_back(mode)
|
||||||
|
param:push_back(period)
|
||||||
|
param:push_back(0)
|
||||||
|
return getoptcodebyno_c(container, undlCode, side, param, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getexerciseinterval()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(undlCode, contractType, timetag, formula)
|
||||||
|
return getexerciseinterval_c(container, undlCode, contractType, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_tdate()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(timetag, formula)
|
||||||
|
return tdate_c(container, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_tweekday()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(timetag, formula)
|
||||||
|
return tweekday_c(container, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_timerat()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(dateNum, timeNum, timetag, formula)
|
||||||
|
return timerat_c(container, dateNum, timeNum, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_timerafter()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(hh, mm, ss, timetag, formula)
|
||||||
|
return timerafter_c(container, hh, mm, ss, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_deliveryinterval()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(timetag, formula)
|
||||||
|
return deliveryinterval_c(container, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_deliveryinterval2()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(stock, timetag, formula)
|
||||||
|
return deliveryinterval2_c(container, stock, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_deliveryinterval3()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(timetag, formula)
|
||||||
|
return deliveryinterval3_c(container, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getcbconversionvalue()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(code, timetag, formula)
|
||||||
|
return getcbconversionvalue_c(container, code, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getcbconversionpremium()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(code, timetag, formula)
|
||||||
|
return getcbconversionpremium_c(container, code, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getorderflowdetail()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(price, index, timetag, formula)
|
||||||
|
return getorderflowdetail_c(container, price, index, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getorderflow()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(index, timetag, formula)
|
||||||
|
return getorderflow_c(container, index, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getorderflowunbalance()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(threshold, thresholdTimes, barcount, timetag, formula)
|
||||||
|
return getorderflowunbalance_c(container, threshold, thresholdTimes, barcount, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getorderflowunbalancepricein()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(threshold, thresholdTimes, barcount, price1, price2, timetag, formula)
|
||||||
|
return getorderflowunbalancepricein_c(container, threshold, thresholdTimes, barcount, price1, price2, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getorderflowpoc()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(timetag, formula)
|
||||||
|
return getorderflowpoc_c(container, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getorderflowdelta()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(timetag, formula)
|
||||||
|
return getorderflowdelta_c(container, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getlastfuturemonth()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(code, index, timetag, formula)
|
||||||
|
return getlastfuturemonth_c(container, code, index, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getlastfuturecode()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(code, index, timetag, formula)
|
||||||
|
return getlastfuturecode_c(container, code, index, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_extdatablocksplitavg()
|
||||||
|
local cache = ExtFormulaCache()
|
||||||
|
function wrapper(name, sector, total, index, timetag, formula)
|
||||||
|
return extdatablocksplitavg_c(name, sector, total, index, cache, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getcapitalflow()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(filed, rank, timetag, formula)
|
||||||
|
return getcapitalflow_c(filed, rank, timetag, formula, container)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getcapitalflowbyholder()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(sharedholder, filed, timetag, formula)
|
||||||
|
return getcapitalflowbyholder_c(sharedholder, filed, timetag, formula, container)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getfuturecode()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(code, timetag, formula)
|
||||||
|
return getfuturecode_c(container, code, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_winner()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(price,timetag,formula)
|
||||||
|
return winner_cost_c(price,0,container,timetag,formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_cost()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(price,timetag,formula)
|
||||||
|
return winner_cost_c(price,1,container,timetag,formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_findblocklist()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(folder, formula)
|
||||||
|
return findblocklist_c(container, folder, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_unitofquantity()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(code, formula)
|
||||||
|
return unitofquantity_c(code, container, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_equalweightindex()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(code, formula)
|
||||||
|
return equalweightindex_c(code, container, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_isindexorglr()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(code, formula)
|
||||||
|
return isindexorglr_c(code, container, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_isetfcode()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(code, formula)
|
||||||
|
return isetfcode_c(container, code, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_isindexcode()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(code, formula)
|
||||||
|
return isindexcode_c(container, code, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_isfuturecode()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(code, formula)
|
||||||
|
return isfuturecode_c(container, code, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_upstopprice()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function upstopprice(stockcode, timetag, formula)
|
||||||
|
return stopprice_c(container, stockcode, 1, timetag, formula)
|
||||||
|
end
|
||||||
|
return upstopprice
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_downstopprice()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function downstopprice(stockcode, timetag, formula)
|
||||||
|
return stopprice_c(container, stockcode, 2, timetag, formula)
|
||||||
|
end
|
||||||
|
return downstopprice
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_dividfactor()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function func(type, timetag, formula)
|
||||||
|
return dividfactor_c(container, type, timetag, formula)
|
||||||
|
end
|
||||||
|
return func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_getinstrumentdetail()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(stockcode, fieldname, timetag, formula)
|
||||||
|
return getinstrumentdetail_c(container, stockcode, fieldname, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_limitupperformance()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(stockcode, type, timetag, formula)
|
||||||
|
return limitupperformance_c(container, stockcode, type, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_fundnetvalue()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(stockcode, type, timetag, formula)
|
||||||
|
return fundnetvalue_c(container, stockcode, type, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_get_etf_statistics()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function fun(stockcode, field, timetag, formula)
|
||||||
|
return get_etf_statistics_c(container, 1, stockcode, field, timetag, formula)
|
||||||
|
end
|
||||||
|
return fun
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_get_etf_statisticsl2()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function fun(stockcode, field, timetag, formula)
|
||||||
|
return get_etf_statistics_c(container, 2, stockcode, field, timetag, formula)
|
||||||
|
end
|
||||||
|
return fun
|
||||||
|
end
|
657
src/xtquant/config/user/root2/lua/FunRef.lua
Normal file
657
src/xtquant/config/user/root2/lua/FunRef.lua
Normal file
@ -0,0 +1,657 @@
|
|||||||
|
------------------------------------------------------------
|
||||||
|
-- 引用函数
|
||||||
|
-- 由脚本引擎预先定义,如果有性能问题,可用C++改写
|
||||||
|
-- @author zhangjin
|
||||||
|
-- @since 2012-9-18
|
||||||
|
-----------------------------------------------------------
|
||||||
|
|
||||||
|
function c_ref()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0)
|
||||||
|
function ref(X, distance, timetag, style)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
return ref_c(X, distance, timetag, history, style)
|
||||||
|
end
|
||||||
|
return ref
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_barslast()
|
||||||
|
local lastTrue = -1
|
||||||
|
local lastTrue_1 = -1
|
||||||
|
local curTimetag = -1
|
||||||
|
function barslast(condition, timetag)
|
||||||
|
if curTimetag ~= timetag then
|
||||||
|
curTimetag = timetag
|
||||||
|
lastTrue_1 = lastTrue
|
||||||
|
else
|
||||||
|
lastTrue = lastTrue_1
|
||||||
|
end
|
||||||
|
|
||||||
|
if not(condition) then
|
||||||
|
condition = false
|
||||||
|
end
|
||||||
|
|
||||||
|
if condition then
|
||||||
|
lastTrue = curTimetag
|
||||||
|
end
|
||||||
|
|
||||||
|
if lastTrue == -1 then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return curTimetag - lastTrue
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return barslast
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_barslasts()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(condition, N, timetag)
|
||||||
|
return barslasts_c(condition, N, container, timetag)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_count()
|
||||||
|
local history = FormulaDataCacheBool(0, 0)
|
||||||
|
function count(condition, N, timetag, type)
|
||||||
|
if not(condition) then
|
||||||
|
condition = false
|
||||||
|
end
|
||||||
|
return count_c(condition, N, timetag, history, type)
|
||||||
|
end
|
||||||
|
return count
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_ma()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0)
|
||||||
|
function ma(X, N, timetag, type)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
local avg = ma_c(X, N, timetag, history, type)
|
||||||
|
return avg;
|
||||||
|
end
|
||||||
|
return ma
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_xma()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0)
|
||||||
|
function xma(X, N, timetag, __formula, type)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
local avg = xma_c(X, N, timetag, history, __formula, type)
|
||||||
|
return avg;
|
||||||
|
end
|
||||||
|
return xma
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_ima()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0)
|
||||||
|
function ima(X, N, S, timetag, type)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
return ima_c(X, N, S, timetag, history, type)
|
||||||
|
end
|
||||||
|
return ima
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
--求动态移动平均 jch
|
||||||
|
function c_dma()
|
||||||
|
local last
|
||||||
|
local lasttimetag = -1
|
||||||
|
local ret = math.huge
|
||||||
|
function dma(X, A, timetag)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
if lasttimetag ~= timetag then --主推数据
|
||||||
|
last = ret
|
||||||
|
lasttimetag = timetag
|
||||||
|
end
|
||||||
|
if not(isValid(last)) then
|
||||||
|
last = X
|
||||||
|
end
|
||||||
|
if (A > 1 or A <= 0) then
|
||||||
|
A = 1
|
||||||
|
end
|
||||||
|
ret = A * X + (1 - A) * last
|
||||||
|
return ret;
|
||||||
|
end
|
||||||
|
return dma
|
||||||
|
end
|
||||||
|
|
||||||
|
--[[求动态移动平均
|
||||||
|
function c_dma()
|
||||||
|
local last = 0
|
||||||
|
local ret
|
||||||
|
function dma(X, A, timetag)
|
||||||
|
last, ret = dma_c(X, last, A, timetag);
|
||||||
|
return ret;
|
||||||
|
end
|
||||||
|
return dma
|
||||||
|
end]]--
|
||||||
|
|
||||||
|
--求指数平滑移动平均 jch
|
||||||
|
function c_ema()
|
||||||
|
local last
|
||||||
|
local lasttimetag = -1
|
||||||
|
local ret = math.huge
|
||||||
|
function ema(X, N, timetag)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
if lasttimetag ~= timetag then --主推数据
|
||||||
|
last = ret
|
||||||
|
lasttimetag = timetag
|
||||||
|
end
|
||||||
|
if not(isValid(last)) then
|
||||||
|
last = X
|
||||||
|
end
|
||||||
|
ret = (2 * X + (N - 1) * last) / (N + 1)
|
||||||
|
return ret;
|
||||||
|
end
|
||||||
|
return ema
|
||||||
|
end
|
||||||
|
|
||||||
|
--[[求指数平滑移动平均
|
||||||
|
function c_ema()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0)
|
||||||
|
function ema(X, N, timetag)
|
||||||
|
ret = ema_c(X, last, N, timetag);
|
||||||
|
return ret;
|
||||||
|
end
|
||||||
|
return ema
|
||||||
|
end
|
||||||
|
function c_sma()
|
||||||
|
local last = math.huge
|
||||||
|
local lasttimetag = -1
|
||||||
|
local ret = 0
|
||||||
|
function sma(X, N, M, timetag)
|
||||||
|
if timetag == 0 and isValid(X) then
|
||||||
|
last = X
|
||||||
|
end
|
||||||
|
if not(isValid(X)) then
|
||||||
|
X = math.huge
|
||||||
|
end
|
||||||
|
local ret = (X * M + (N - M) * last) / N;
|
||||||
|
if isValid(X) and not(isValid(ret)) then
|
||||||
|
last = X
|
||||||
|
else
|
||||||
|
last = ret
|
||||||
|
end
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return sma
|
||||||
|
end
|
||||||
|
]]--
|
||||||
|
--移动平均 jch
|
||||||
|
function c_sma()
|
||||||
|
local last = math.huge
|
||||||
|
local lasttimetag = -1
|
||||||
|
local ret = math.huge
|
||||||
|
function sma(X, N, M, timetag)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
if lasttimetag ~= timetag then
|
||||||
|
last = ret
|
||||||
|
lasttimetag = timetag
|
||||||
|
end
|
||||||
|
if not(isValid(last)) then
|
||||||
|
last = X
|
||||||
|
end
|
||||||
|
ret = (M * X + (N - M) * last) / N
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return sma
|
||||||
|
end
|
||||||
|
|
||||||
|
--递归移动平均 jch
|
||||||
|
function c_tma()
|
||||||
|
local last
|
||||||
|
local lasttimetag = -1
|
||||||
|
local ret = math.huge
|
||||||
|
function tma(X, N, M, timetag)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
if lasttimetag ~= timetag then
|
||||||
|
last = ret
|
||||||
|
lasttimetag = timetag
|
||||||
|
end
|
||||||
|
if not(isValid(last)) then
|
||||||
|
last = X
|
||||||
|
end
|
||||||
|
ret = N * last + M * X
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return tma
|
||||||
|
end
|
||||||
|
--[[递归移动平均
|
||||||
|
function c_tma()
|
||||||
|
local last = 0
|
||||||
|
function tma(X, N, M, timetag)
|
||||||
|
if not(isValid(X)) then
|
||||||
|
X = math.huge;
|
||||||
|
end
|
||||||
|
local ret = M * X + N * last;
|
||||||
|
last = ret
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return tma
|
||||||
|
end]]--
|
||||||
|
|
||||||
|
function c_sum()
|
||||||
|
local history = FormulaDataCacheDouble(1)
|
||||||
|
function sum(X, N, timetag, type)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
local ret = sum_c(X, N, history, timetag, type)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return sum
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_hhv()
|
||||||
|
local history = FormulaDataCacheDouble(-math.huge, -1)
|
||||||
|
function hhv(X, N, timetag, style)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
local index, max = hhv_c(X, N, timetag, history, style)
|
||||||
|
return max
|
||||||
|
end
|
||||||
|
return hhv
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_hhvbars()
|
||||||
|
local history = FormulaDataCacheDouble(-math.huge, -1)
|
||||||
|
function hhvbars(X, N, timetag, style)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
local index, max = hhv_c(X, N, timetag, history, style)
|
||||||
|
return index
|
||||||
|
end
|
||||||
|
return hhvbars
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_llv()
|
||||||
|
local history = FormulaDataCacheDouble(math.huge, -1)
|
||||||
|
function llv(X, N, timetag, type)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
local index, min = llv_c(X, N, timetag, history, type)
|
||||||
|
return min
|
||||||
|
end
|
||||||
|
return llv
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_llvbars()
|
||||||
|
local history = FormulaDataCacheDouble(math.huge, -1)
|
||||||
|
function llvbars(X, N, timetag, type)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
local index, min = llv_c(X, N, timetag, history, type)
|
||||||
|
return index
|
||||||
|
end
|
||||||
|
return llvbars
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_filter()
|
||||||
|
local lastTrue = -1
|
||||||
|
local lastTimetag = -1
|
||||||
|
local realTimeLastTrue = -1
|
||||||
|
function filter(val, N, timetag)
|
||||||
|
local ret = 0
|
||||||
|
if timetag ~= lastTimetag then
|
||||||
|
lastTrue = realTimeLastTrue
|
||||||
|
end
|
||||||
|
if timetag - lastTrue > N then
|
||||||
|
ret = val
|
||||||
|
if val > 0 then
|
||||||
|
realTimeLastTrue = timetag
|
||||||
|
else
|
||||||
|
realTimeLastTrue = lastTrue
|
||||||
|
end
|
||||||
|
end
|
||||||
|
lastTimetag = timetag
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return filter
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_sfilter()
|
||||||
|
local lastX = 0
|
||||||
|
local lastCond = 0
|
||||||
|
function sfilter(X, cond, timetag)
|
||||||
|
if cond then
|
||||||
|
lastCond = timetag
|
||||||
|
if X then
|
||||||
|
lastX = timetag
|
||||||
|
end
|
||||||
|
return X
|
||||||
|
else
|
||||||
|
if lastX > lastCond then
|
||||||
|
return false
|
||||||
|
else
|
||||||
|
if X then
|
||||||
|
lastX = timetag
|
||||||
|
end
|
||||||
|
return X
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return sfilter
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_barscount()
|
||||||
|
local isValid = false
|
||||||
|
local first = 0
|
||||||
|
function barscount(X, timetag)
|
||||||
|
if isValid then
|
||||||
|
return timetag - first + 1
|
||||||
|
elseif valid(X) then
|
||||||
|
isValid = true
|
||||||
|
first = timetag
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
return 0 / 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return barscount
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_barssincen()
|
||||||
|
local isFirst = true
|
||||||
|
local index = 0
|
||||||
|
local ret = 0
|
||||||
|
local indexArray = {}--存放满足条件的timetag
|
||||||
|
local indFirst = 0 --indexArray存放timetag的开始位 ,indFirst>=indLast时说明没有满足条件的timetag
|
||||||
|
local indLast = 0 --indexArray存放timetag的结束位
|
||||||
|
function barssincen(condition,n,timetag)
|
||||||
|
if n < 2 then
|
||||||
|
return 0
|
||||||
|
end
|
||||||
|
ret = 0
|
||||||
|
if timetag >= n-1 and (not isFirst) then --timetag 是从0开始的
|
||||||
|
if timetag - index > n-1 then
|
||||||
|
if indFirst < indLast then
|
||||||
|
indFirst = indFirst + 1
|
||||||
|
index = indexArray[indFirst]
|
||||||
|
ret = timetag - index
|
||||||
|
indexArray[indFirst] = nil --释放空间
|
||||||
|
end
|
||||||
|
else
|
||||||
|
ret = timetag - index
|
||||||
|
end
|
||||||
|
end
|
||||||
|
if condition then
|
||||||
|
indLast = indLast + 1
|
||||||
|
indexArray[indLast] = timetag
|
||||||
|
if isFirst then
|
||||||
|
isFirst = false
|
||||||
|
index = timetag
|
||||||
|
if index == 0 then --如果第一个K线就满足条件,在此做数组删除处理(避免下个周期还读到index=0)
|
||||||
|
indexArray[indLast] = nil
|
||||||
|
indLast = indLast - 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return barssincen
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_barssince()
|
||||||
|
local isFirst = false
|
||||||
|
local index = 0
|
||||||
|
function barssince(condition, timetag)
|
||||||
|
if isFirst then
|
||||||
|
return timetag - index
|
||||||
|
elseif condition then
|
||||||
|
isFirst = true
|
||||||
|
index = timetag
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return barssince
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_barssincen()
|
||||||
|
local isFirst = true
|
||||||
|
local index = 0
|
||||||
|
local ret = 0
|
||||||
|
local indexArray = {}--存放满足条件的timetag
|
||||||
|
local indFirst = 0 --indexArray存放timetag的开始位 ,indFirst>=indLast时说明没有满足条件的timetag
|
||||||
|
local indLast = 0 --indexArray存放timetag的结束位
|
||||||
|
function barssincen(condition,n,timetag)
|
||||||
|
if n < 2 then
|
||||||
|
return 0
|
||||||
|
end
|
||||||
|
ret = 0
|
||||||
|
if timetag >= n-1 and (not isFirst) then --timetag 是从0开始的
|
||||||
|
if timetag - index > n-1 then
|
||||||
|
if indFirst < indLast then
|
||||||
|
indFirst = indFirst + 1
|
||||||
|
index = indexArray[indFirst]
|
||||||
|
ret = timetag - index
|
||||||
|
indexArray[indFirst] = nil --释放空间
|
||||||
|
end
|
||||||
|
else
|
||||||
|
ret = timetag - index
|
||||||
|
end
|
||||||
|
end
|
||||||
|
if condition then
|
||||||
|
indLast = indLast + 1
|
||||||
|
indexArray[indLast] = timetag
|
||||||
|
if isFirst then
|
||||||
|
isFirst = false
|
||||||
|
index = timetag
|
||||||
|
if index == 0 then --如果第一个K线就满足条件,在此做数组删除处理(避免下个周期还读到index=0)
|
||||||
|
indexArray[indLast] = nil
|
||||||
|
indLast = indLast - 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return barssincen
|
||||||
|
end
|
||||||
|
|
||||||
|
function currbarscount(timetag, __formula)
|
||||||
|
local endPos = __formula:getLastBar()
|
||||||
|
return endPos - timetag + 1
|
||||||
|
end
|
||||||
|
|
||||||
|
function drawnull()
|
||||||
|
return 0 / 0
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_tr()
|
||||||
|
local lastClose = 0 / 0
|
||||||
|
function tr(timetag, __formula)
|
||||||
|
local c = close(timetag, __formula)
|
||||||
|
local h = high(timetag, __formula)
|
||||||
|
local l = low(timetag, __formula)
|
||||||
|
local v1 = h - l
|
||||||
|
local v2 = h - lastClose
|
||||||
|
local v3 = lastClose - l
|
||||||
|
lastClose = c
|
||||||
|
if v1 > v2 then
|
||||||
|
if v1 > v3 then
|
||||||
|
return v1
|
||||||
|
else
|
||||||
|
return v3
|
||||||
|
end
|
||||||
|
else
|
||||||
|
if v2 > v3 then
|
||||||
|
return v2
|
||||||
|
else
|
||||||
|
return v3
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return tr
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_wma()
|
||||||
|
local history = FormulaDataCacheDouble(2)
|
||||||
|
--local sum = 0
|
||||||
|
--local weightSum = 0
|
||||||
|
local avg
|
||||||
|
function wma(X, N, timetag, tp)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
--sum, weightSum, avg = wma_c(X, N, timetag, sum, weightSum, history)
|
||||||
|
avg = wma_c(X, N, timetag, history, tp)
|
||||||
|
return avg
|
||||||
|
end
|
||||||
|
return wma
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_trma()
|
||||||
|
local ma1 = c_ma()
|
||||||
|
local ma2 = c_ma()
|
||||||
|
function trma(X, N, timetag, type)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
local N1
|
||||||
|
local temp, ret
|
||||||
|
if N % 2 == 1 then
|
||||||
|
N1 = (N + 1) / 2
|
||||||
|
temp = ma1(X, N1, timetag, type)
|
||||||
|
ret = ma2(temp, N1, timetag, type)
|
||||||
|
else
|
||||||
|
N1 = N / 2
|
||||||
|
temp = ma1(X, N1, timetag, type)
|
||||||
|
ret = ma2(temp, N1 + 1, timetag, type)
|
||||||
|
end
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return trma
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_ret()
|
||||||
|
local history = FormulaDataCacheDouble(0)
|
||||||
|
local lastValue
|
||||||
|
function ret(X, A, timetag, __formula, type)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
lastValue = ret_c(X, A, timetag, __formula, history, type)
|
||||||
|
return lastValue
|
||||||
|
end
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_newhbars()
|
||||||
|
local history = FormulaDataCacheDouble(0)
|
||||||
|
function newhbars(X, N, timetag, type)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
return newhbars_c(X, N, timetag, history, type)
|
||||||
|
end
|
||||||
|
return newhbars
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_newlbars()
|
||||||
|
local history = FormulaDataCacheDouble(0)
|
||||||
|
function newlbars(X, N, timetag, type)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
--return newlbars_c(X, N, history)
|
||||||
|
return newhbars_c( -1 * X, N, timetag, history, type)
|
||||||
|
end
|
||||||
|
return newlbars
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_hod()
|
||||||
|
local history = FormulaDataCacheDouble(0)
|
||||||
|
function hod(X, N, timetag)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
return hod_c(X, N, timetag, history)
|
||||||
|
end
|
||||||
|
return hod
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_lod()
|
||||||
|
local history = FormulaDataCacheDouble(0)
|
||||||
|
function lod(X, N, timetag)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
return hod_c( -1 * X, N, timetag, history)
|
||||||
|
end
|
||||||
|
return lod
|
||||||
|
end
|
||||||
|
--[[
|
||||||
|
function c_sumbars()
|
||||||
|
local history = FormulaDataCacheDouble(2)
|
||||||
|
local sum = 0.0
|
||||||
|
local period = 0
|
||||||
|
function sumbars(X, A)
|
||||||
|
sum, period = sumbars_c(X, A, sum, period, history)
|
||||||
|
return period
|
||||||
|
end
|
||||||
|
return sumbars
|
||||||
|
end]]--
|
||||||
|
function c_sumbars()
|
||||||
|
local history = FormulaDataCacheDouble(0,0)
|
||||||
|
function sumbars(X, A, timetag)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
local ret = sumbars_c(X, A, timetag, history)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return sumbars
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_barslastcount()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(condition, timetag, __formula)
|
||||||
|
return barslastcount_c(container, condition, timetag, __formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_mema()
|
||||||
|
local last
|
||||||
|
local lasttimetag = -1
|
||||||
|
local ret = math.huge
|
||||||
|
function mema(X, N, timetag)
|
||||||
|
if not(X) then
|
||||||
|
X = 0
|
||||||
|
end
|
||||||
|
if lasttimetag ~= timetag then --主推数据
|
||||||
|
last = ret
|
||||||
|
lasttimetag = timetag
|
||||||
|
end
|
||||||
|
if not(isValid(last)) then
|
||||||
|
last = X
|
||||||
|
end
|
||||||
|
ret = (X + (N - 1) * last) / N
|
||||||
|
return ret;
|
||||||
|
end
|
||||||
|
return mema
|
||||||
|
end
|
||||||
|
|
395
src/xtquant/config/user/root2/lua/FunStatistic.lua
Normal file
395
src/xtquant/config/user/root2/lua/FunStatistic.lua
Normal file
@ -0,0 +1,395 @@
|
|||||||
|
------------------------------------------------------------
|
||||||
|
-- 统计函数
|
||||||
|
-- 由脚本引擎预先定义,如果有性能问题,可用C++改写
|
||||||
|
-- @author zhangjin
|
||||||
|
-- @since 2012-9-18
|
||||||
|
-----------------------------------------------------------
|
||||||
|
|
||||||
|
--判别调用
|
||||||
|
function poisson(v1,v2,v3,timetag,formula)
|
||||||
|
if (type(v3) == "number")
|
||||||
|
then if (v3>0)
|
||||||
|
then return poisson_c(v1,v2,true,timetag,formula)
|
||||||
|
else return poisson_c(v1,v2,false,timetag,formula)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return poisson_c(v1,v2,v3,timetag,formula)
|
||||||
|
end
|
||||||
|
|
||||||
|
function weibull(v1,v2,v3,v4,timetag,formula)
|
||||||
|
return weibull_c(v1,v2,v3,v4,timetag,formula)
|
||||||
|
end
|
||||||
|
|
||||||
|
function expondist(v1,v2,v3,timetag,formula)
|
||||||
|
return expondist_c(v1,v2,v3,timetag,formula)
|
||||||
|
end
|
||||||
|
|
||||||
|
function binomdist(v1,v2,v3,v4,timetag,formula)
|
||||||
|
return binomdist_c(v1,v2,v3,v4,timetag,formula)
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
cacheDoubleNum = 4
|
||||||
|
--drl2 曲线回归偏离度
|
||||||
|
function c_drl2()
|
||||||
|
local cache1 = FormulaDataCacheDouble(15)
|
||||||
|
local cache2 = FormulaDataCacheDouble(15)
|
||||||
|
local cache3 = FormulaDataCacheDouble(15)
|
||||||
|
function drl2_func(value, N, timetag, __formula)
|
||||||
|
local ret = drl2(value, cache1, cache2, cache3, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return drl2_func
|
||||||
|
end
|
||||||
|
--forecast2 二次曲线回归预测值。
|
||||||
|
function c_forecast2()
|
||||||
|
local cache1 = FormulaDataCacheDouble(10)
|
||||||
|
local cache2 = FormulaDataCacheDouble(10)
|
||||||
|
local cache3 = FormulaDataCacheDouble(10)
|
||||||
|
function forecast2_func(value, N, timetag, __formula)
|
||||||
|
local ret = forecast2(value, cache1, cache2, cache3, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return forecast2_func
|
||||||
|
end
|
||||||
|
--slope 曲线回归相关系数
|
||||||
|
function c_slope20()
|
||||||
|
local cache1 = FormulaDataCacheDouble(10)
|
||||||
|
local cache2 = FormulaDataCacheDouble(10)
|
||||||
|
local cache3 = FormulaDataCacheDouble(10)
|
||||||
|
function slope20_func(value, N, timetag, __formula)
|
||||||
|
local ret = slope20(value, cache1, cache2, cache3,N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return slope20_func
|
||||||
|
end
|
||||||
|
function c_slope21()
|
||||||
|
local cache1 = FormulaDataCacheDouble(10)
|
||||||
|
local cache2 = FormulaDataCacheDouble(10)
|
||||||
|
local cache3 = FormulaDataCacheDouble(10)
|
||||||
|
function slope21_func(value, N, timetag, __formula)
|
||||||
|
local ret = slope21(value, cache1, cache2, cache3, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return slope21_func
|
||||||
|
end
|
||||||
|
function c_slope22()
|
||||||
|
local cache1 = FormulaDataCacheDouble(10)
|
||||||
|
local cache2 = FormulaDataCacheDouble(10)
|
||||||
|
local cache3 = FormulaDataCacheDouble(10)
|
||||||
|
function slope22_func(value, N, timetag, __formula)
|
||||||
|
local ret = slope22(value, cache1, cache2, cache3, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return slope22_func
|
||||||
|
end
|
||||||
|
--drl 直线回归偏离度
|
||||||
|
function c_drl()
|
||||||
|
local cachey = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cachex = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function drl_func(value, N, timetag, __formula)
|
||||||
|
local ret = drl(value, cachey, cachex, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return drl_func
|
||||||
|
end
|
||||||
|
--forecast 线性回归预测值。
|
||||||
|
function c_forecast()
|
||||||
|
local cachey = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cachex = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function forecast_func(value, N, timetag, __formula)
|
||||||
|
local ret = forecast(value, cachey, cachex, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return forecast_func
|
||||||
|
end
|
||||||
|
--slope 线性回归斜率
|
||||||
|
function c_slope()
|
||||||
|
local cachey = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cachex = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function slope_func(value, N, timetag, __formula)
|
||||||
|
local ret = slope(value, cachey, cachex, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return slope_func
|
||||||
|
end
|
||||||
|
--percentrank 返回特定数值在数据集中的百分比排位
|
||||||
|
function c_percentrank()
|
||||||
|
local cache = FormulaOrderCache()
|
||||||
|
function percentrank_func(value, N, x, significance, timetag, __formula)
|
||||||
|
local ret = percentrank(value, cache, N, x, significance, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return percentrank_func
|
||||||
|
end
|
||||||
|
--percentile 返回区域中数值的第 K 个百分点的值
|
||||||
|
function c_percentile()
|
||||||
|
local cache = FormulaOrderCache()
|
||||||
|
function percentile_func(value, N, k, timetag, __formula)
|
||||||
|
local ret = percentile(value, cache, N, k, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return percentile_func
|
||||||
|
end
|
||||||
|
--median 返回区域中数值的中位数
|
||||||
|
function c_median()
|
||||||
|
local cache = FormulaOrderCache()
|
||||||
|
function median_func(value, N, timetag, __formula)
|
||||||
|
local ret = percentile(value, cache, N, 0.5, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return median_func
|
||||||
|
end
|
||||||
|
--trimmean 返回数据的内部平均值
|
||||||
|
function c_trimmean()
|
||||||
|
local cache = FormulaOrderCache()
|
||||||
|
function trimmean_func(value, N, percent, timetag, __formula)
|
||||||
|
local ret = trimmean(value, cache, N, percent, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return trimmean_func
|
||||||
|
end
|
||||||
|
--quartile 返回数据的四分位数
|
||||||
|
function c_quartile()
|
||||||
|
local cache = FormulaOrderCache()
|
||||||
|
function quartile_func(value, N, quart, timetag, __formula)
|
||||||
|
local ret = quartile(value, cache, N, quart, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return quartile_func
|
||||||
|
end
|
||||||
|
--large 数据集中第 k 个最大值
|
||||||
|
function c_large()
|
||||||
|
local cache = FormulaOrderCache()
|
||||||
|
function large_func(value, N, k, timetag, __formula)
|
||||||
|
local ret = large(value, cache, N, k, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return large_func
|
||||||
|
end
|
||||||
|
--small数据集中第 k 个最小值
|
||||||
|
function c_small()
|
||||||
|
local cache = FormulaOrderCache()
|
||||||
|
function small_func(value, N, k, timetag, __formula)
|
||||||
|
local ret = small(value, cache, N, k, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return small_func
|
||||||
|
end
|
||||||
|
--skew分布的偏斜度
|
||||||
|
function c_skew()
|
||||||
|
local cache = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function skew_func(value, N, timetag, __formula)
|
||||||
|
local ret = skew(value, cache, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return skew_func
|
||||||
|
end
|
||||||
|
--ftest
|
||||||
|
function c_ftest()
|
||||||
|
local cache1 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cache2 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function ftest_func(value1, value2, N, timetag, __formula)
|
||||||
|
local ret = ftest(value1, value2, cache1, cache2, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return ftest_func
|
||||||
|
end
|
||||||
|
|
||||||
|
--数据集的峰值
|
||||||
|
function c_kurt()
|
||||||
|
local cache = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function kurt_func(value, N, timetag, __formula)
|
||||||
|
local ret = kurt(value, cache, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return kurt_func
|
||||||
|
end
|
||||||
|
--几何平均值
|
||||||
|
function c_geomean()
|
||||||
|
local cache = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function geomean_func(value, N, timetag, __formula)
|
||||||
|
local ret = geomean(value, cache, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return geomean_func
|
||||||
|
end
|
||||||
|
--调和平均值
|
||||||
|
function c_harmean()
|
||||||
|
local cache = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function harmean_func(value, N, timetag, __formula)
|
||||||
|
local ret = harmean(value, cache, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return harmean_func
|
||||||
|
end
|
||||||
|
|
||||||
|
-- INTERCEPT(Y,X,N),求序列Y,X的线性回归线截距
|
||||||
|
function c_intercept()
|
||||||
|
local cache1 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cache2 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function intercept_func(value1, value2, N, timetag, __formula)
|
||||||
|
local ret = intercept(value1, value2, cache1, cache2, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return intercept_func
|
||||||
|
end
|
||||||
|
-- RSQ(A,B,N),计算A,B序列的N周期乘积矩相关系数的平方.
|
||||||
|
function c_rsq()
|
||||||
|
local cache1 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cache2 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function rsq_func(value1, value2, N, timetag, __formula)
|
||||||
|
local ret = rsq(value1, value2, cache1, cache2, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return rsq_func
|
||||||
|
end
|
||||||
|
|
||||||
|
-- Pearson(皮尔生)乘积矩相关系数
|
||||||
|
function c_pearson()
|
||||||
|
local cache1 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cache2 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function pearson_func(value1, value2, N, timetag, __formula)
|
||||||
|
local ret = pearson(value1, value2, cache1, cache2, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return pearson_func
|
||||||
|
end
|
||||||
|
--通过线性回归法计算每个 x 的 y 预测值时所产生的标准误差
|
||||||
|
function c_steyx()
|
||||||
|
local cache1 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cache2 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function steyx_func(value1, value2, N, timetag, __formula)
|
||||||
|
local ret = steyx(value1, value2, cache1, cache2, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return steyx_func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_mode()
|
||||||
|
local dcache = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local ccache = FormulaCountCache()
|
||||||
|
function mode_func(value, N, timetag, __formula)
|
||||||
|
local ret = mode(value, dcache, ccache, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return mode_func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_covar()
|
||||||
|
local cache1 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cache2 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function covar_func(value1, value2, N, timetag, __formula)
|
||||||
|
local ret = covar(value1, value2, cache1, cache2, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return covar_func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_beta2()
|
||||||
|
local cache1 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cache2 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cache3 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function beta2_func(value1, value2, N, timetag, __formula)
|
||||||
|
local ret = beta2(value1, value2, cache1, cache2,cache3, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return beta2_func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_avedev()
|
||||||
|
local cache = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
--print("hello")
|
||||||
|
function avedev_func(value, N, timetag, __formula)
|
||||||
|
local ret = avedev(value, cache, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return avedev_func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_devsq()
|
||||||
|
local cache = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
--print("hello")
|
||||||
|
function devsq_func(value, N, timetag, __formula)
|
||||||
|
local ret = devsq(value, cache, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return devsq_func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_relate()
|
||||||
|
local cache1 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
local cache2 = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function relate_func(value1, value2, N, timetag, __formula)
|
||||||
|
local ret = relate(value1, value2, cache1, cache2, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return relate_func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_std()
|
||||||
|
local cache = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function std_func(value, N, timetag, __formula, type)
|
||||||
|
local ret = std(value, cache, N, timetag, __formula, type)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return std_func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_var()
|
||||||
|
local cache = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function var_func(value, N, timetag, __formula)
|
||||||
|
local ret = var(value, cache, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return var_func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_stdp()
|
||||||
|
local cache = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function stdp_func(value, N, timetag, __formula)
|
||||||
|
local ret = stdp(value, cache, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return stdp_func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_varp()
|
||||||
|
local cache = FormulaDataCacheDouble(cacheDoubleNum)
|
||||||
|
function varp_func(value, N, timetag, __formula)
|
||||||
|
local ret = varp(value, cache, N, timetag, __formula)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return varp_func
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_std3()
|
||||||
|
local history = List.new()
|
||||||
|
function std_func(value, N, timetag, __formula)
|
||||||
|
List.pushright(history, value);
|
||||||
|
local sz = history.last - history.first + 1;
|
||||||
|
if (sz > N) then
|
||||||
|
List.popleft(history);
|
||||||
|
sz = sz - 1
|
||||||
|
end
|
||||||
|
if (sz < N) then
|
||||||
|
return 0 / 0;
|
||||||
|
end
|
||||||
|
sum1 = 0.0;
|
||||||
|
sum2 = 0.0;
|
||||||
|
count = 0
|
||||||
|
for i = history.first, history.last, 1 do
|
||||||
|
temp = history[i];
|
||||||
|
sum1 = sum1 + temp;
|
||||||
|
sum2 = sum2 + temp * temp;
|
||||||
|
count = count + 1;
|
||||||
|
end
|
||||||
|
sum1 = sum1 * sum1;
|
||||||
|
sum1 = sum1 / N;
|
||||||
|
ret = math.sqrt((sum2-sum1) / (N-1));
|
||||||
|
return ret;
|
||||||
|
end
|
||||||
|
return std_func;
|
||||||
|
end
|
||||||
|
|
||||||
|
|
164
src/xtquant/config/user/root2/lua/FunString.lua
Normal file
164
src/xtquant/config/user/root2/lua/FunString.lua
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
------------------------------------------------------------
|
||||||
|
-- 字符串函数
|
||||||
|
-- 由脚本引擎预先定义,如果有性能问题,可用C++改写
|
||||||
|
-- @author jiangchanghao
|
||||||
|
-- @since 2012-10-11
|
||||||
|
-----------------------------------------------------------
|
||||||
|
function lowerstr(str)
|
||||||
|
return string.lower(str)
|
||||||
|
end
|
||||||
|
|
||||||
|
function upperstr(str)
|
||||||
|
return string.upper(str)
|
||||||
|
end
|
||||||
|
|
||||||
|
function strlen(str)
|
||||||
|
return string.len(str)
|
||||||
|
end
|
||||||
|
|
||||||
|
function strleft(str, n)
|
||||||
|
return string.sub(str, 1, n)
|
||||||
|
end
|
||||||
|
|
||||||
|
function strright(str, n)
|
||||||
|
if (n < string.len(str))
|
||||||
|
then
|
||||||
|
return string.sub(str, string.len(str)-n+1, -1)
|
||||||
|
else
|
||||||
|
return str
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function strmid(str, i,j)
|
||||||
|
if (i <= 0)
|
||||||
|
then return ""
|
||||||
|
end
|
||||||
|
return string.sub(str, i,j+i-1)
|
||||||
|
end
|
||||||
|
|
||||||
|
function ltrim(str)
|
||||||
|
s,_ = string.gsub(str,'^ +','')
|
||||||
|
return s
|
||||||
|
end
|
||||||
|
|
||||||
|
function rtrim(str)
|
||||||
|
s, _ = string.gsub(str,' +$','')
|
||||||
|
return s
|
||||||
|
end
|
||||||
|
|
||||||
|
function numtostr(num, N)
|
||||||
|
return string.format(string.format('%s.%if','%',N),num)
|
||||||
|
end
|
||||||
|
|
||||||
|
function strcat(des,str)
|
||||||
|
return string.format('%s%s', des, str)
|
||||||
|
end
|
||||||
|
|
||||||
|
function strtonum(str)
|
||||||
|
return tonumber(str)
|
||||||
|
end
|
||||||
|
|
||||||
|
function strtonumex(str, default)
|
||||||
|
local num = tonumber(str)
|
||||||
|
if isvalid(num) then
|
||||||
|
return num
|
||||||
|
else
|
||||||
|
return default
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function strinsert(str, index, str1)
|
||||||
|
return string.format('%s%s%s',string.sub(str,1,index),str1,string.sub(str,index+1,-1))
|
||||||
|
end
|
||||||
|
|
||||||
|
function strremove(str, index, cound)
|
||||||
|
return string.format('%s%s',string.sub(str,1,index),string.sub(str,index+cound+1,-1))
|
||||||
|
end
|
||||||
|
|
||||||
|
function strfind(str, s1, n, timetag)
|
||||||
|
i,_ = string.find(str,s1,n)
|
||||||
|
if (i == nil)
|
||||||
|
then return 0
|
||||||
|
else return i
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function strreplace(str, strold, strnew)
|
||||||
|
s, _ = string.gsub(str, strold, strnew)
|
||||||
|
return s
|
||||||
|
end
|
||||||
|
|
||||||
|
function strtrimleft(str, str1)
|
||||||
|
s, _ = string.gsub(str,string.format('%s%s%s','^',str1,'+'),'')
|
||||||
|
return s
|
||||||
|
end
|
||||||
|
|
||||||
|
function strtrimright(str, str1)
|
||||||
|
s, _ = string.gsub(str,string.format('%s%s',str1,'+$'),'')
|
||||||
|
return s
|
||||||
|
end
|
||||||
|
|
||||||
|
function strcmp(str1,str2)
|
||||||
|
if (str1 < str2)
|
||||||
|
then return -1
|
||||||
|
else if (str1 == str2)
|
||||||
|
then return 0
|
||||||
|
else return 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function stricmp(str1,str2)
|
||||||
|
s1 = string.upper(str1)
|
||||||
|
s2 = string.upper(str2)
|
||||||
|
if (s1 < s2)
|
||||||
|
then return -1
|
||||||
|
else if (s1 == s2)
|
||||||
|
then return 0
|
||||||
|
else return 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function strncmp(str1,str2,n)
|
||||||
|
s1 = string.sub(str1,1,n)
|
||||||
|
s2 = string.sub(str2,1,n)
|
||||||
|
if (s1 < s2)
|
||||||
|
then return -1
|
||||||
|
else if (s1 == s2)
|
||||||
|
then return 0
|
||||||
|
else return 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function fmt(...)
|
||||||
|
local tt = {...}
|
||||||
|
local len = #tt
|
||||||
|
if (len == 1) then
|
||||||
|
return tostring(tt[1])
|
||||||
|
end
|
||||||
|
|
||||||
|
fc = FmtCache()
|
||||||
|
fc:setFmtString(tostring(tt[1]))
|
||||||
|
local i = 2
|
||||||
|
repeat
|
||||||
|
local v = tt[i]
|
||||||
|
if (type(v) == "number") then
|
||||||
|
fc:pushDouble(i, v)
|
||||||
|
elseif (type(v) == "string") then
|
||||||
|
fc:pushString(i, v)
|
||||||
|
elseif (type(v) == "boolean") then
|
||||||
|
if (v) then
|
||||||
|
fc:pushDouble(i, 1)
|
||||||
|
else
|
||||||
|
fc:pushDouble(i, 0)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
fc:pushString(i, "nil")
|
||||||
|
end
|
||||||
|
i = i + 1
|
||||||
|
until i > len
|
||||||
|
return fc:getFmtString()
|
||||||
|
end
|
||||||
|
|
75
src/xtquant/config/user/root2/lua/FunSystem.lua
Normal file
75
src/xtquant/config/user/root2/lua/FunSystem.lua
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
------------------------------------------------------------
|
||||||
|
-- 系统函数
|
||||||
|
-- 由脚本引擎预先定义,如果有性能问题,可用C++改写
|
||||||
|
-- @author jiangchanghao
|
||||||
|
-- @since 2012-10-11
|
||||||
|
-----------------------------------------------------------
|
||||||
|
|
||||||
|
function c_print()
|
||||||
|
local pc = PrintCache()
|
||||||
|
function printout(...)
|
||||||
|
local tt = {...}
|
||||||
|
local len = #tt
|
||||||
|
timetag = tt[len - 1]
|
||||||
|
formula = tt[len]
|
||||||
|
if (len == 3 and type(tt[1]) == "nil") then
|
||||||
|
printOut(pc, formula)
|
||||||
|
return
|
||||||
|
end
|
||||||
|
local i = 1
|
||||||
|
repeat
|
||||||
|
local v = tt[i]
|
||||||
|
if (type(v) == "string") then
|
||||||
|
printStr(v, tt[i+1], pc, timetag, formula)
|
||||||
|
else
|
||||||
|
if (type(v) == "boolean") then
|
||||||
|
printBool(v, tt[i+1], pc, timetag, formula)
|
||||||
|
else
|
||||||
|
printNum(v, tt[i+1], pc, timetag, formula)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
i = i + 2
|
||||||
|
until i >= len - 1
|
||||||
|
end
|
||||||
|
return printout
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_serialize()
|
||||||
|
local cc = FormulaCacheContainer()
|
||||||
|
function ff(...)
|
||||||
|
local tt = {...}
|
||||||
|
local len = #tt
|
||||||
|
|
||||||
|
timetag = tt[len - 1]
|
||||||
|
formula = tt[len]
|
||||||
|
|
||||||
|
serialize_c(-1, 0, 0, cc, timetag, formula)
|
||||||
|
|
||||||
|
if len == 3 then
|
||||||
|
serialize_c(0, 0, tt[1], cc, timetag, formula)
|
||||||
|
return serialize_c(1, 0, 0, cc, timetag, formula)
|
||||||
|
end
|
||||||
|
|
||||||
|
if tt[1] == 'list' then
|
||||||
|
for i = 2, len - 2, 1 do
|
||||||
|
serialize_c(0, i - 2, tt[i], cc, timetag, formula)
|
||||||
|
end
|
||||||
|
return '[' .. serialize_c(1, 0, 0, cc, timetag, formula) .. ']'
|
||||||
|
end
|
||||||
|
|
||||||
|
if tt[1] == 'dict' then
|
||||||
|
for i = 2, len - 2, 2 do
|
||||||
|
if i + 1 <= len - 2 then
|
||||||
|
serialize_c(0, tt[i], tt[i + 1], cc, timetag, formula)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return '{' .. serialize_c(1, 0, 0, cc, timetag, formula) .. '}'
|
||||||
|
end
|
||||||
|
|
||||||
|
for i = 2, len - 2, 1 do
|
||||||
|
serialize_c(0, i - 2, tt[i], cc, timetag, formula)
|
||||||
|
end
|
||||||
|
return serialize_c(1, 0, 0, cc, timetag, formula)
|
||||||
|
end
|
||||||
|
return ff
|
||||||
|
end
|
182
src/xtquant/config/user/root2/lua/FunTrader.lua
Normal file
182
src/xtquant/config/user/root2/lua/FunTrader.lua
Normal file
@ -0,0 +1,182 @@
|
|||||||
|
------------------------------------------------------------
|
||||||
|
-- 程序化交易函数
|
||||||
|
-- 由脚本引擎预先定义,如果有性能问题,可用C++改写
|
||||||
|
-- @author zhangjin
|
||||||
|
-- @since 2012-10-18
|
||||||
|
-----------------------------------------------------------
|
||||||
|
function c_order()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0);
|
||||||
|
local lastBasket = ''
|
||||||
|
function order(opType, chanel, addr, basket, timetag, formula)
|
||||||
|
if basket == nil then
|
||||||
|
basket = lastBasket
|
||||||
|
end
|
||||||
|
local ret = placeorder_c(opType, chanel, addr, basket, timetag, formula, history)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return order
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_passorder()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0);
|
||||||
|
--orderCode:basketName or stockID
|
||||||
|
function passorder(opType, orderType, accountid, orderCode, prType, price, volume, quickTrade, strategyName, userOrderId, timetag, formula)
|
||||||
|
--由于luabind的限制不能处理超过10个参数,这里把accountid和strategyName合并在一起,到了C++里再拆分
|
||||||
|
accidAndstrName = accountid.."#"..strategyName.."#"..tostring(quickTrade).."#"..userOrderId
|
||||||
|
local ret = passorder_c(opType, orderType, accidAndstrName, orderCode, prType, price, volume, timetag, formula, history)
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
return passorder
|
||||||
|
end
|
||||||
|
|
||||||
|
function passorder(
|
||||||
|
optype, ordertype, accountid, accounttype
|
||||||
|
, marketstock, pricetype, price, volume
|
||||||
|
, strategyname, quicktrade, remark
|
||||||
|
, timetag, formula
|
||||||
|
)
|
||||||
|
ptable = {
|
||||||
|
quicktrade = quicktrade
|
||||||
|
, strategyname = strategyname
|
||||||
|
, remark = remark
|
||||||
|
, barpos = timetag
|
||||||
|
}
|
||||||
|
|
||||||
|
return passorder2_c(
|
||||||
|
optype, ordertype, accountid, accounttype
|
||||||
|
, marketstock, pricetype, price, volume
|
||||||
|
, ptable, formula
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_trade()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0);
|
||||||
|
local tp = tradeparam()
|
||||||
|
function trade_c(param, address, timetag, formula)
|
||||||
|
if param then
|
||||||
|
tp = copyParam(param)
|
||||||
|
trade(param, 0, timetag, formula, history, address)
|
||||||
|
else
|
||||||
|
trade(tp, 1, timetag, formula, history, address)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return trade_c
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_hedgestocktrade()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0);
|
||||||
|
local tp = tradeparam()
|
||||||
|
function hedgestocktrade_c(param, address, timetag, formula)
|
||||||
|
if param then
|
||||||
|
tp = copyParam(param)
|
||||||
|
hedgestocktrade(param, 0, timetag, formula, history, address)
|
||||||
|
else
|
||||||
|
hedgestocktrade(tp, 1, timetag, formula, history, address)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return hedgestocktrade_c
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_cancel()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0)
|
||||||
|
function cancel_c(codeNumber, timetag)
|
||||||
|
cancel(codeNumber, history)
|
||||||
|
end
|
||||||
|
return cancel_c
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_writeorder()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0)
|
||||||
|
function writeorder_c(filepath, content, timetag, formula)
|
||||||
|
return writeorder(filepath, content, timetag, formula, history)
|
||||||
|
end
|
||||||
|
return writeorder_c
|
||||||
|
end
|
||||||
|
|
||||||
|
function positionadjust(positions, weight, channel)
|
||||||
|
local s = '35,'..channel..'\n'
|
||||||
|
for i = 0, positions:size() - 1, 1 do
|
||||||
|
local detail = positions:at(i)
|
||||||
|
local adjustedVol = detail.m_nVolume * weight
|
||||||
|
s = s..detail.m_strInstrumentID..'\t'..adjustedVol..'\n'
|
||||||
|
end
|
||||||
|
return s
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_portfoliosell(type)
|
||||||
|
local history = FormulaDataCacheDouble(0, 0)
|
||||||
|
function portfoliosell_c(type, timetag, formula)
|
||||||
|
return portfoliosell(type, timetag, formula, history)
|
||||||
|
end
|
||||||
|
return portfoliosell_c
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_portfoliobuy()
|
||||||
|
local history = FormulaDataCacheDouble(0, 0);
|
||||||
|
local lastBasket = ''
|
||||||
|
function portfoliobuy(opType, chanel, addr, basket, timetag, formula)
|
||||||
|
if basket == nil then
|
||||||
|
basket = lastBasket
|
||||||
|
end
|
||||||
|
return portfoliobuy_c(opType, chanel, addr, basket, timetag, formula, history)
|
||||||
|
end
|
||||||
|
return portfoliobuy
|
||||||
|
end
|
||||||
|
|
||||||
|
function algo_passorder(
|
||||||
|
optype, ordertype, accountid, accounttype
|
||||||
|
, marketstock, pricetype, price, volume
|
||||||
|
, strategyname, quicktrade, remark
|
||||||
|
, algoname
|
||||||
|
, timetag, formula
|
||||||
|
)
|
||||||
|
ptable = {
|
||||||
|
quicktrade = quicktrade
|
||||||
|
, strategyname = strategyname
|
||||||
|
, remark = remark
|
||||||
|
, barpos = timetag
|
||||||
|
, algoname = algoname
|
||||||
|
}
|
||||||
|
|
||||||
|
return algo_passorder_c(
|
||||||
|
optype, ordertype, accountid, accounttype
|
||||||
|
, marketstock, pricetype, price, volume
|
||||||
|
, ptable, formula
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
function cancel_task(taskID, accountID, accountType)
|
||||||
|
return cancel_task_c(taskID, accountID, accountType, timetag, formula)
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_readsignal()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(filePath, stockCode, timetag, formula)
|
||||||
|
return readsignal_c(filePath, stockCode, container, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_drawsignal()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(cond, signalType, drawPrice, timetag, formula)
|
||||||
|
return drawsignal_c(container, cond, signalType, drawPrice, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_cmdprogress()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(cmdID, timetag, formula)
|
||||||
|
return cmdprogress_c(container, cmdID, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
||||||
|
|
||||||
|
function c_cmdstatus()
|
||||||
|
local container = FormulaCacheContainer()
|
||||||
|
function wrapper(cmdID, timetag, formula)
|
||||||
|
return cmdstatus_c(container, cmdID, timetag, formula)
|
||||||
|
end
|
||||||
|
return wrapper
|
||||||
|
end
|
10
src/xtquant/config/user/root2/lua/MetaType.lua
Normal file
10
src/xtquant/config/user/root2/lua/MetaType.lua
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
--------------------------------------------------------------
|
||||||
|
-- lua与C++兼容的常量定义,常量值目前为"MetaId_FieldId"
|
||||||
|
-- @author zhangjin
|
||||||
|
-- @since 2012-7-17
|
||||||
|
---------------------------------------------------------------
|
||||||
|
|
||||||
|
--K线
|
||||||
|
META_FIELD_KLINE_HIGH_PRICE = "1001_1" --最高价
|
||||||
|
META_FIELD_KLINE_LOW_PRICE = "1001_2" --最低价
|
||||||
|
META_FIELD_KLINE_CLOSE_PRICE = "1001_3" --收盘价
|
1485
src/xtquant/config/user/root2/lua/config.lua
Normal file
1485
src/xtquant/config/user/root2/lua/config.lua
Normal file
File diff suppressed because it is too large
Load Diff
292
src/xtquant/config/user/root2/lua/util.lua
Normal file
292
src/xtquant/config/user/root2/lua/util.lua
Normal file
@ -0,0 +1,292 @@
|
|||||||
|
-------------------------------------------------------------
|
||||||
|
-- 脚本引擎工作期间用到的辅助函数
|
||||||
|
-- @author zhangjin
|
||||||
|
-- @since 2012-7-17
|
||||||
|
-------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
function unpackEx(arglist)
|
||||||
|
local arg = {}
|
||||||
|
for k, v in pairs(arglist) do
|
||||||
|
print(k, v)
|
||||||
|
table.insert(arg, v)
|
||||||
|
end
|
||||||
|
return unpack(arg)
|
||||||
|
end
|
||||||
|
|
||||||
|
function pack(func, arglist)
|
||||||
|
f = _G[func]
|
||||||
|
--print(func, type(f))
|
||||||
|
return f(unpack(arglist))
|
||||||
|
end
|
||||||
|
|
||||||
|
List = {}
|
||||||
|
function List.new ()
|
||||||
|
return {first = 0, last = -1}
|
||||||
|
end
|
||||||
|
|
||||||
|
function List.pushleft (list, value)
|
||||||
|
local first = list.first - 1
|
||||||
|
list.first = first
|
||||||
|
list[first] = value
|
||||||
|
end
|
||||||
|
|
||||||
|
function List.pushright (list, value)
|
||||||
|
local last = list.last + 1
|
||||||
|
list.last = last
|
||||||
|
list[last] = value
|
||||||
|
end
|
||||||
|
|
||||||
|
function List.popright (list)
|
||||||
|
local last = list.last
|
||||||
|
if list.first > last then error("list is empty") end
|
||||||
|
local value = list[last]
|
||||||
|
list[last] = nil -- to allow garbage collection
|
||||||
|
list.last = last - 1
|
||||||
|
return value
|
||||||
|
end
|
||||||
|
|
||||||
|
function List.popleft(list)
|
||||||
|
local first = list.first
|
||||||
|
local value = list[first]
|
||||||
|
list[first] = nil
|
||||||
|
list.first = first + 1
|
||||||
|
return value
|
||||||
|
end
|
||||||
|
|
||||||
|
function isValid(v)
|
||||||
|
if type(v) == "number" then
|
||||||
|
return v == v and -1.7*10^308 < v and v < 1.7*10^308
|
||||||
|
else
|
||||||
|
if type(v) == "nil" then
|
||||||
|
return false
|
||||||
|
else
|
||||||
|
return true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function isvalid(v)
|
||||||
|
return isValid(v)
|
||||||
|
end
|
||||||
|
|
||||||
|
function d2b(v)
|
||||||
|
return v ~= 0 --此处应该是 double值为正的时候返回true,还是double不为0的时候返回true
|
||||||
|
end
|
||||||
|
|
||||||
|
function b2d(v)
|
||||||
|
if v then
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
return 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
FLOAT_ERROR = 1e-6;
|
||||||
|
function isZero(value)
|
||||||
|
return math.abs(value) <= FLOAT_ERROR;
|
||||||
|
end
|
||||||
|
|
||||||
|
function isGreaterThanZero(value)
|
||||||
|
return value > FLOAT_ERROR
|
||||||
|
end
|
||||||
|
|
||||||
|
function isLessThanZero(value)
|
||||||
|
return value < -1 * FLOAT_ERROR
|
||||||
|
end
|
||||||
|
|
||||||
|
function isequalv(left, right)
|
||||||
|
if type(left)=='string' and type(right)=='string' then
|
||||||
|
if left==right then
|
||||||
|
return true
|
||||||
|
else
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return isZero(left - right)
|
||||||
|
end
|
||||||
|
|
||||||
|
function isgreater(left, right)
|
||||||
|
return isGreaterThanZero(left - right)
|
||||||
|
end
|
||||||
|
|
||||||
|
function isgreaterequal(left, right)
|
||||||
|
return not(isLessThanZero(left - right))
|
||||||
|
end
|
||||||
|
|
||||||
|
function isless(left, right)
|
||||||
|
return isLessThanZero(left - right)
|
||||||
|
end
|
||||||
|
|
||||||
|
function islessequal(left, right)
|
||||||
|
return not(isGreaterThanZero(left - right))
|
||||||
|
end
|
||||||
|
|
||||||
|
function isTrue(v)
|
||||||
|
local s = type(v)
|
||||||
|
if s == 'boolean' then
|
||||||
|
return v
|
||||||
|
else
|
||||||
|
return v ~= 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function sortedpairs(t,comparator)
|
||||||
|
local sortedKeys = {};
|
||||||
|
table.foreach(t, function(k,v) table.insert(sortedKeys,k) end);
|
||||||
|
table.sort(sortedKeys,comparator);
|
||||||
|
local i = 0;
|
||||||
|
local function _f(_s,_v)
|
||||||
|
i = i + 1;
|
||||||
|
local k = sortedKeys[i];
|
||||||
|
if (k) then
|
||||||
|
return k,t[k];
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return _f,nil,nil;
|
||||||
|
end
|
||||||
|
|
||||||
|
function getweight(stock)
|
||||||
|
local v = __stock2Weight[stock]
|
||||||
|
if v == nil then
|
||||||
|
v = 0
|
||||||
|
end
|
||||||
|
return v
|
||||||
|
end
|
||||||
|
|
||||||
|
function setweight(weight)
|
||||||
|
__stock2Weight = {}
|
||||||
|
for k, v in pairs(weight) do
|
||||||
|
__stock2Weight[k] = v
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function gettotalweight()
|
||||||
|
local total = 0
|
||||||
|
for k, v in pairs(__stock2Weight) do
|
||||||
|
if k ~= 0 then
|
||||||
|
total = total + v;
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return total
|
||||||
|
end
|
||||||
|
|
||||||
|
function exist1(t, key)
|
||||||
|
return t[key] ~= nil
|
||||||
|
end
|
||||||
|
|
||||||
|
function existrange(t, N, key)
|
||||||
|
local size = #t
|
||||||
|
return size - N < key
|
||||||
|
end
|
||||||
|
|
||||||
|
function removekey(t, key)
|
||||||
|
t[key] = nil
|
||||||
|
end
|
||||||
|
|
||||||
|
function toabandon(t, key)
|
||||||
|
t[key] = nil
|
||||||
|
end
|
||||||
|
|
||||||
|
function tohold(t, key)
|
||||||
|
t[key] = 1
|
||||||
|
end
|
||||||
|
|
||||||
|
function holdingornot(t, val)
|
||||||
|
for _, v in pairs(t) do
|
||||||
|
if v == val then
|
||||||
|
return true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
|
||||||
|
function sortedByKey(test_table)
|
||||||
|
local key_table = {}
|
||||||
|
local tt={}
|
||||||
|
|
||||||
|
for key,_ in pairs(test_table) do
|
||||||
|
table.insert(key_table,key)
|
||||||
|
end
|
||||||
|
|
||||||
|
table.sort(key_table)
|
||||||
|
|
||||||
|
return key_table
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
function multi(tbl, keytbl, num, total)
|
||||||
|
|
||||||
|
local localTbl=tbl[num]
|
||||||
|
local t={}
|
||||||
|
local tt={}
|
||||||
|
local ttt={}
|
||||||
|
|
||||||
|
for _,v in pairs(keytbl) do
|
||||||
|
if t[localTbl[v]] == nil then
|
||||||
|
t[localTbl[v]]={}
|
||||||
|
end
|
||||||
|
table.insert(t[localTbl[v]],v)
|
||||||
|
end
|
||||||
|
|
||||||
|
for i,v in pairs(t) do
|
||||||
|
if #(v) > 1 and num+1 <= total then
|
||||||
|
m=multi(tbl,v,num+1,total)
|
||||||
|
t[i]=m
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
tt=sortedByKey(t)
|
||||||
|
|
||||||
|
for _,v in pairs(tt) do
|
||||||
|
n=t[v]
|
||||||
|
for ii,vv in pairs(n) do
|
||||||
|
table.insert(ttt,vv)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return ttt
|
||||||
|
end
|
||||||
|
|
||||||
|
function oneTable(tab)
|
||||||
|
local tbl={}
|
||||||
|
function printTable(tab)
|
||||||
|
for i,v in pairs(tab) do
|
||||||
|
if type(v) == "table" then
|
||||||
|
printTable(v)
|
||||||
|
else
|
||||||
|
table.insert(tbl,v)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
printTable(tab)
|
||||||
|
return tbl
|
||||||
|
end
|
||||||
|
|
||||||
|
function getKeys(tbl)
|
||||||
|
k={}
|
||||||
|
for i,v in pairs(tbl) do
|
||||||
|
for ii,vv in pairs(v) do
|
||||||
|
k[ii]=0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
key={}
|
||||||
|
for i,v in pairs(k) do
|
||||||
|
table.insert(key,i)
|
||||||
|
end
|
||||||
|
return key
|
||||||
|
end
|
||||||
|
|
||||||
|
function multisort(...)
|
||||||
|
local tttt={}
|
||||||
|
local numArgs=select("#", ...)
|
||||||
|
local tbl={}
|
||||||
|
for i=1 ,numArgs do
|
||||||
|
local arg=select(i, ...)
|
||||||
|
table.insert(tbl,arg)
|
||||||
|
end
|
||||||
|
key = getKeys(tbl)
|
||||||
|
|
||||||
|
final = multi(tbl,key,1,#tbl)
|
||||||
|
return final
|
||||||
|
end
|
19
src/xtquant/config/xtquantservice.log4cxx
Normal file
19
src/xtquant/config/xtquantservice.log4cxx
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
log4j.logger.TTConsole=INFO,ca
|
||||||
|
log4j.logger.TTStdFile=INFO,fa1
|
||||||
|
log4j.logger.TTDbgFile=INFO,fa1
|
||||||
|
log4j.logger.datasource=INFO,fa1
|
||||||
|
log4j.logger.TTPerformanceFile=INFO,fa1
|
||||||
|
|
||||||
|
# 文件输出1
|
||||||
|
log4j.appender.fa1=org.apache.log4j.DailyRollingFileAppender
|
||||||
|
log4j.appender.fa1.MaxFileSize=500MB
|
||||||
|
log4j.appender.fa1.MaxBackupIndex=10
|
||||||
|
log4j.appender.fa1.DatePattern='{{XTQAUNT_LOG_DATEPATTERN}}'
|
||||||
|
log4j.appender.fa1.Append=true
|
||||||
|
log4j.appender.fa1.layout=org.apache.log4j.PatternLayout
|
||||||
|
log4j.appender.fa1.layout.ConversionPattern=%d [%p] [%t] %m%n
|
||||||
|
|
||||||
|
# 控制台输出
|
||||||
|
log4j.appender.ca=org.apache.log4j.ConsoleAppender
|
||||||
|
log4j.appender.ca.layout=org.apache.log4j.PatternLayout
|
||||||
|
log4j.appender.ca.layout.ConversionPattern=%d [%p] [%t] %m%n
|
116
src/xtquant/config/xtquantservice.lua
Normal file
116
src/xtquant/config/xtquantservice.lua
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
-- 取Client配置
|
||||||
|
|
||||||
|
function()
|
||||||
|
local ret = {
|
||||||
|
app = {
|
||||||
|
appName = "XtQuantService",
|
||||||
|
netThreadNum = 5,
|
||||||
|
netProcessThreadNum = 1,
|
||||||
|
dispatcherThreadNum = 5,
|
||||||
|
reportSeconds = 60,
|
||||||
|
logPath = "../config/xtquantservice.log4cxx",
|
||||||
|
logWatch = 1,
|
||||||
|
appendDate = 1,
|
||||||
|
timeoutSec = 0,
|
||||||
|
requestTimeoutSec = 150,
|
||||||
|
},
|
||||||
|
threadPools = {
|
||||||
|
--datasource
|
||||||
|
datasource_callback = 10,
|
||||||
|
datasource_req = 10,
|
||||||
|
datasource_other = 5,
|
||||||
|
datasource_tradedate_change = 1,
|
||||||
|
whole_quote = 2,
|
||||||
|
-- trade linkage
|
||||||
|
linkage = 1,
|
||||||
|
accounts = 5,
|
||||||
|
persistmini = 1,
|
||||||
|
subquote = 3,
|
||||||
|
--python策略运行
|
||||||
|
pystrategy_run = 3,
|
||||||
|
--msgservice
|
||||||
|
msg_service = 1,
|
||||||
|
uicontrol_hpf_data_model = 1,
|
||||||
|
server_miniquote = 1,
|
||||||
|
server_vbaservice = 1,
|
||||||
|
--指标全推
|
||||||
|
index_quote = 2,
|
||||||
|
},
|
||||||
|
client_xtservice = {
|
||||||
|
tagTemplate = "xtservice",
|
||||||
|
address = g_xtservice_address,
|
||||||
|
isGetdAddressFromNameServer = 0,
|
||||||
|
reconnectSecond = -1,
|
||||||
|
timeoutSecond = 120,
|
||||||
|
requestTimeoutSecond = 150,
|
||||||
|
isUseSSL = 0,
|
||||||
|
sslCaPath = "../data/server.crt",
|
||||||
|
},
|
||||||
|
client_xtmarketinfo = {
|
||||||
|
tagTemplate = "xtmarketinfo",
|
||||||
|
isGetdAddressFromNameServer=0,
|
||||||
|
proxyType=0,
|
||||||
|
requestTimeoutSecond=150,
|
||||||
|
proxyNeedCheck=0,
|
||||||
|
isUsedAloneIO=0,
|
||||||
|
isUseSSL=0,
|
||||||
|
address = g_defaultPorts["xtmarketinfo"],
|
||||||
|
keepAliveCheckSecond=5,
|
||||||
|
proxyPort=80,
|
||||||
|
reconnectSecond = 3,
|
||||||
|
timeoutSecond=120,
|
||||||
|
},
|
||||||
|
server_xtquant = {
|
||||||
|
tag = "server_miniquote",
|
||||||
|
address = "0.0.0.0:58610",
|
||||||
|
isGetdAddressFromNameServer=0,
|
||||||
|
timeoutSecond=0,
|
||||||
|
keepAliveCheckSecond=0,
|
||||||
|
maxConnectionNum = 10,
|
||||||
|
isAutoBind = 0,
|
||||||
|
isUseSSL=0,
|
||||||
|
},
|
||||||
|
metaInfo = {
|
||||||
|
["2"] = "86400000",
|
||||||
|
["1008"] = "0",
|
||||||
|
["1009"] = "0",
|
||||||
|
["1010"] = "0",
|
||||||
|
["1011"] = "0",
|
||||||
|
["1801"] = "0",
|
||||||
|
["1802"] = "0",
|
||||||
|
["1803"] = "60000",
|
||||||
|
["1804"] = "0",
|
||||||
|
["1806"] = "0",
|
||||||
|
["1808"] = "60000,86400000",
|
||||||
|
["1820"] = "0",
|
||||||
|
["1830"] = "0",
|
||||||
|
["2000"] = "86400000",
|
||||||
|
["2001"] = "86400000",
|
||||||
|
["2002"] = "86400000",
|
||||||
|
["2002"] = "86400000",
|
||||||
|
["2003"] = "86400000",
|
||||||
|
["2004"] = "86400000",
|
||||||
|
["2006"] = "86400000",
|
||||||
|
["3000"] = "0",
|
||||||
|
["3001"] = "60000,300000,3600000,86400000",
|
||||||
|
["3002"] = "60000",
|
||||||
|
["3004"] = "60000",
|
||||||
|
["3013"] = "86400000",
|
||||||
|
["3030"] = "0",
|
||||||
|
["4000"] = "86400000",
|
||||||
|
["4002"] = "60000,300000,3600000,86400000",
|
||||||
|
["4011"] = "60000,86400000",
|
||||||
|
["4999"] = "86400000",
|
||||||
|
["5000"] = "0",
|
||||||
|
["5002"] = "86400000",
|
||||||
|
["5003"] = "0",
|
||||||
|
["9000"] = "0",
|
||||||
|
},
|
||||||
|
config = {
|
||||||
|
configdir = "../config",
|
||||||
|
datadir = "../userdata/xtquant",
|
||||||
|
modeldir = "../config/user",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
end
|
23
src/xtquant/config/xtquoterconfig.xml
Normal file
23
src/xtquant/config/xtquoterconfig.xml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<!DOCTYPE TTQuoterConfigFile>
|
||||||
|
<TTQuoterConfigFile>
|
||||||
|
<ConfigVersion version_id="1"/>
|
||||||
|
<QuoterServers autoupdate="0" current_trade_option="" current_trade_future="" current_shkstock="" autochangeserver="0" current_trade_hkstock="" current_trade_stock="" current_future="" reconnectonlost="1" current_future_option="" current_trade_future_option="" current_trade_shkstock="" current_trade_gold="" autohide="1" current_trade_neeq="" current_gold="" current_hkstock="" current_option="" nWholeQuoteGear="5" current_neeq="" showdlgonlost="0" current_stock="">
|
||||||
|
<QuoterServer username="" address="115.231.218.73" password="" servername="VIP迅投绍兴电信" platform="1" port="55310" type="2" quotertype="0" markets="" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="115.231.218.79" password="" servername="VIP迅投绍兴电信" platform="1" port="55310" type="2" quotertype="0" markets="" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="218.16.123.11" password="" servername="VIP迅投东莞电信" platform="1" port="55310" type="2" quotertype="0" markets="" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="218.16.123.27" password="" servername="VIP迅投东莞电信" platform="1" port="55310" type="2" quotertype="0" markets="" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="218.16.123.121" password="" servername="迅投东莞主站" platform="1" port="55300" type="2" quotertype="0" markets="" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="218.16.123.122" password="" servername="迅投东莞主站" platform="1" port="55300" type="2" quotertype="0" markets="" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="43.242.96.162" password="" servername="迅投上海主站" platform="1" port="55300" type="2" quotertype="0" markets="" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="43.242.96.164" password="" servername="迅投上海主站" platform="1" port="55300" type="2" quotertype="0" markets="" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="115.231.218.73" password="" servername="VIP迅投绍兴电信" platform="1" port="55310" type="2" quotertype="1" markets="SZO|ZF|DF|INE|IF|SZ|SH|HK|SF|GF|SHO" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="115.231.218.79" password="" servername="VIP迅投绍兴电信" platform="1" port="55310" type="2" quotertype="1" markets="SZO|ZF|DF|INE|IF|SZ|SH|HK|SF|GF|SHO" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="218.16.123.11" password="" servername="VIP迅投东莞电信" platform="1" port="55310" type="2" quotertype="1" markets="SZO|ZF|DF|INE|IF|SZ|SH|HK|SF|GF|SHO" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="218.16.123.27" password="" servername="VIP迅投东莞电信" platform="1" port="55310" type="2" quotertype="1" markets="SZO|ZF|DF|INE|IF|SZ|SH|HK|SF|GF|SHO" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="218.16.123.121" password="" servername="迅投东莞主站" platform="1" port="55300" type="2" quotertype="1" markets="SZO|ZF|DF|INE|IF|SZ|SH|HK|SF|GF|SHO" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="218.16.123.122" password="" servername="迅投东莞主站" platform="1" port="55300" type="2" quotertype="1" markets="SZO|ZF|DF|INE|IF|SZ|SH|HK|SF|GF|SHO" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="43.242.96.162" password="" servername="迅投浦东主站" platform="1" port="55300" type="2" quotertype="1" markets="SZO|ZF|DF|INE|IF|SZ|SH|HK|SF|GF|SHO" tradeusersynchro="0"/>
|
||||||
|
<QuoterServer username="" address="43.242.96.164" password="" servername="迅投浦东主站" platform="1" port="55300" type="2" quotertype="1" markets="SZO|ZF|DF|INE|IF|SZ|SH|HK|SF|GF|SHO" tradeusersynchro="0"/>
|
||||||
|
</QuoterServers>
|
||||||
|
</TTQuoterConfigFile>
|
420
src/xtquant/config/xtstocktype.lua
Normal file
420
src/xtquant/config/xtstocktype.lua
Normal file
@ -0,0 +1,420 @@
|
|||||||
|
-- 取分类配置
|
||||||
|
|
||||||
|
-- 全部配置都必须用英文半角标点,不支持中文标点,不支持全角
|
||||||
|
|
||||||
|
g_stocktype_info = {
|
||||||
|
-- 存放基础类型
|
||||||
|
-- 基础类型必须在同一个市场,跨市场的请用扩展类型,最后两位是市场和对应枚举值,前面随便扩展
|
||||||
|
-- 目前enum_EXTRcCategoryID里头已经有的,必须和其保持一致
|
||||||
|
-- 格式说明:每个类型有一个英文名,含义,市场,以及序号组成,每行写做: 英文名 = "含义,市场,序号",--注释
|
||||||
|
-- 含义是指类型对应的代码段,比如600代码段,就是600***,“|”符号表示或,比如510XXX|520XXX 就是510代码段或者520代码段 当然,也可以直接写死代码,不用号段掩码
|
||||||
|
-- 市场目前只有SH,SZ等,必须大写,和目前的MarketType.h定义保持一致
|
||||||
|
-- 除开紧急情况,序号和英文名应当保持一致性,一个是枚举的字符串,一个是枚举的值
|
||||||
|
baseTypes = {
|
||||||
|
XT_GE_MARKET_SH = "******,SH,1",--沪市
|
||||||
|
XT_GE_MARKET_SZ = "******,SZ,2",--深市
|
||||||
|
XT_GE_MARKET_ZJ = "******,IF,3",--中金
|
||||||
|
XT_GE_MARKET_SQ = "******,SF,4",--上期
|
||||||
|
XT_GE_MARKET_DS = "******,DF,5",--大商
|
||||||
|
XT_GE_MARKET_ZS = "******,ZF,6",--郑商
|
||||||
|
XT_GE_MARKET_OF = "******,OF,7",--开放基金
|
||||||
|
XT_GE_MARKET_OP = "******,SHO,8",--股票期权
|
||||||
|
XT_GE_MARKET_NEW3BOARD = "******,NEEQ,9",--新三板
|
||||||
|
|
||||||
|
XT_GE_SH_A = "60****|65****|688***|689***,SH,101",--沪市A股
|
||||||
|
XT_GE_SH_B = "90****,SH,102",--沪市B股
|
||||||
|
XT_GE_SH_FUND = "50****,SH,103",--沪市封基
|
||||||
|
XT_GE_SH_INDEX = "000***,SH,104", --沪市指数
|
||||||
|
XT_GE_SH_ETF = "510***|511***|512***|513***|515***|516***|517***|518***|560***|561***|562***|563***|588***,SH,105", --沪市ETF
|
||||||
|
XT_GE_SH_WARRANT = "000000,SH,106", --沪市权证
|
||||||
|
XT_GE_SH_SUBSCRIBE = "73****|78****|712***|715***|795***,SH,107", --沪市申购
|
||||||
|
XT_GE_SH_EXCHANGEABLE_LOAN = "132***|1370**|1371**|1372**|1373**|1374**,SH,108", --沪市可交换公司债券
|
||||||
|
XT_GE_SH_EXCHANGEABLE_LOAN_PLEDGE = "133***,SH,109", --沪市可交换公司债券质押券出入库
|
||||||
|
XT_GE_SH_EXCHANGEABLE_LOAN_SWAP = "192***,SH,110", --沪市可交换公司债券换股
|
||||||
|
XT_GE_SH_PRIVATELY_LOAN_TRANSFER = "1355**|1356**|1357**|1358**|1359**,SH,111", --沪市并购重组私募债券挂牌转让
|
||||||
|
XT_GE_SH_SHORTTERM_CORPORATE_LOAN_TRANSFER = "1350**|1351**|1352**|1353**|1354**|1175**|1176**|1177**|1178**|1179**,SH,112", --沪市证券公司短期债券挂牌转让
|
||||||
|
XT_GE_SH_ABS = "128***,SH,113", --信贷资产支持证券
|
||||||
|
XT_GE_SH_CORPORATE_LOAN_PLEDGE = "102***|134***|154***|164***,SH,114", --沪市公司债券质押券入库
|
||||||
|
XT_GE_SH_CORPORATE_BOND = "1230**|1231**|1232**|1233**|1234**|136***|143***|1220**|1221**|1222**|1223**|1224**|155***|163***|175***|185***|188***|1375**|1376**|1377**|1378**|1379**|1385**|1386**|1387**|1388**|1389**|115***|240***|241***,SH,115", --沪市公司债
|
||||||
|
XT_GE_SH_PUBLIC_PREFERED_SHARES = "330***,SH,116", --沪市公开发行优先股交易
|
||||||
|
XT_GE_SH_NON_PUBLIC_PREFERED_SHARES_TRANSFER = "360***,SH,117",--沪市非公开发行优先股转让
|
||||||
|
XT_GE_SH_PUBLIC_PREFERED_SHARES_SUBSCRIBE = "770***,SH,118", --沪市公开发行优先股申购
|
||||||
|
XT_GE_SH_PUBLIC_PREFERED_SHARES_PLACEMENTS = "771***,SH,119", --沪市公开发行优先股配股/配售
|
||||||
|
XT_GE_SH_PUBLIC_PREFERED_SHARES_SUBSCRIBE_PRICE = "772***,SH,120", --沪市公开发行优先股申购款分配
|
||||||
|
XT_GE_SH_PUBLIC_PREFERED_SHARES_SUBSCRIBE_DISTRIBUTION = "773***,SH,121", --沪市公开发行优先股申购配号
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN_REPURCHASE_TRUSTEE_SHIP = "201***,SH,122", --沪市国债回购(席位托管方式)
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN_REPURCHASE_ENTERPRISE = "202***,SH,123", --沪市企业债回购
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN_REPURCHASE_BUYOUT = "203***,SH,124", --沪市国债买断式回购
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN_REPURCHASE_IMPAWN = "204***,SH,125", --沪市新质押式国债回购
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN_REPURCHASE = "201***|202***|203***|204***,SH,126", --
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN_INTEREST_BEARING = "010***|019***,SH,127", --沪市附息国债
|
||||||
|
XT_GE_SH_FINANCIAL_BONDS = "018***,SH,128", --沪市金融债
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN_DISCOUNT = "020***,SH,129", --沪市贴现国债
|
||||||
|
XT_GE_SH_CENTRAL_GOVERNMENT_LOAN = "010***|019***|020***,SH,130", --沪市中央政府债(国债)
|
||||||
|
XT_GE_SH_SEPERATION_BOND = "126***,SH,131", --沪市分离债
|
||||||
|
XT_GE_SH_ASSET_SECURITIZATION = "121***,SH,132", --沪市资产证券化
|
||||||
|
XT_GE_SH_CREDIT_ASSET_SUPPORTED = "128***,SH,133", --信贷资产支持证券
|
||||||
|
XT_GE_SH_ENTERPRISE_BOND = "120***|124***|1270**|1271**|1272**|1273**|1274**|1275**|1276**|1277**|1278**|129***|139***|1225**|1226**|1227**|1228**|1229**|152***|1840**|1841**|1842**|1843**|1844**|1845**|1846**|1847**|270***|271***|272***,SH,134", --沪市企业债(席位托管方式)
|
||||||
|
XT_GE_SH_CONVERTIBALE_BOND = "1000**|1001**|1002**|1003**|1004**|1005**|1006**|1007**|1008**|110***|112***|113***|1110**|1111**|1112**|1113**|1114**|1180**|1181**|1182**|1183**|1184**,SH,135", --沪市可转债
|
||||||
|
XT_GE_SH_LOCAL_GOVERNMENT_LOAN = "130***|140***|147***|157***|160***|173***|171***|186***|101***|109***|198***|230***|231***,SH,136", --沪市地方债
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN = "010***|019***|020***|130***|160***|171***|009***|140***|147***|157***|173***|186***|101***|109***|198***|230***|231***,SH,137", --沪市政府债(国债+地方债)
|
||||||
|
XT_GE_SH_CPB = "1370**|1371**|1372**|1373**|1374**,SH,138",--上海可交换私募债
|
||||||
|
XT_GE_SH_STANDARD_BOND = "888880|SHRQ88,SH,139", --沪市标准券
|
||||||
|
XT_GE_SH_CLOSED_ENDED_FUNDS = "500***|5058**,SH,140", --沪市封闭式基金
|
||||||
|
XT_GE_SH_POLICY_JRZ = "018***|028***|038***,SH,141", --沪市政策性金融债
|
||||||
|
XT_GE_SH_PLEDGE = "09****|102***|103***|104***|105***|106***|107***|108***|133***|134***|141***|144***|148***|153***|154***|158***|161***|164***|172***|174***|176***|187***,SH,142", --沪市上海质押代码
|
||||||
|
XT_GE_SH_OLD_GOVERNMENT_LOAN = "009***,SH,143", --2000年前发行国债
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN_DISCOUNT_REPURCHASE = "107***,SH,144", --记账式贴现国债质押式回购标准券入库
|
||||||
|
XT_GE_SH_CORPORATE_BOND_REPURCHASE = "1040**|1041**|1042**|1043**|1044**,SH,145", --公司债质押式回购标准券入库
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN_DISTRIBUTE_SALE = "7510**|7511**|7516**|7517**,SH,146", --国债分销
|
||||||
|
XT_GE_SH_LOCAL_GOVERNMENT_LOAN_REPURCHASE = "106***|141***|148***|158***|161***|172***|174***,SH,147", --地方政府债质押式回购标准券入库
|
||||||
|
XT_GE_SH_LOCAL_GOVERNMENT_LOAN_DISTRIBUTE_SALE = "7514**|7515**,SH,148", --地方政府债分销
|
||||||
|
XT_GE_SH_SEPERATION_BOND_REPURCHASE = "1050**|1051**|1052**|1053**|1054**|1055**|1056**|1057*|1058**,SH,149", --分离债质押式回购标准券入库
|
||||||
|
XT_GE_SH_BOND_OFFER_REPURCHASE = "205***,SH,150", --债券质押式报价回购
|
||||||
|
XT_GE_SH_MS_PRIVATE_PLACEMENT_BOND = "125***|145***|150***|151***,SH,151", --中小企业私募债券在固定收益平台转让
|
||||||
|
XT_GE_SH_CROSS_BORDER_ETF = "513**0|5109*0,SH,152", --跨境ETF
|
||||||
|
XT_GE_SH_CROSS_BORDER_LOF = "501018|501021|501023|501025|50130*|501310|501311|501313,SH,153", --跨境LOF
|
||||||
|
XT_GE_SH_INNOVATION_CLOSED_ENDED_FUNDS = "5058**,SH,154", --上海创新型封闭式基金
|
||||||
|
XT_GE_SF_FIXED_INCOME_ETF = "511***,SH,155",--上海的固定收益类
|
||||||
|
XT_GE_SH_GOLD = "518**0,SH,156",--上海黄金
|
||||||
|
XT_GE_SH_RTMF = "5198**,SH,157",--上海实时申赎货币基金
|
||||||
|
XT_GE_SH_TMF = "5116**|5117**|5118**|5119**,SH,158",--上海交易型货币基金
|
||||||
|
XT_GE_SH_STOCK_IPO = "730***|732***|780***|712***|795***|787***|707***,SH,159",--上海股票申购代码
|
||||||
|
XT_GE_SH_LOAN_IPO = "733***|783***|754***|7590**|713***|718***,SH,160",--上海债券申购代码
|
||||||
|
XT_GE_SH_FUND_IPO = "735***,SH,161",--上海基金申购代码
|
||||||
|
XT_GE_SH_NEW_SHARES_DISTRIBUTION = "741***|791***|736***|713***|716***|789***|796***|708***,SH,162",--上海新股配号
|
||||||
|
XT_GE_SH_PLACING_FIRST_DISTRIBUTION = "747***|797***,SH,163",--上海配售首发配号
|
||||||
|
XT_GE_SH_CONVERTIBLE_BOUND_DISTRIBUTION = "744***|794***|756***|714***|719***,SH,164",--上海可转债资金申购配号
|
||||||
|
XT_GE_SH_SUBSCRIPTION_PRICE = "740***|790***|734***,SH,165",--上海申购款
|
||||||
|
XT_GE_SH_BONDS_FUNDS = "743***|793***|755***,SH,166",--上海发债款
|
||||||
|
XT_GE_SH_SHARES_ALLOTMEN = "700***|701***|702***|760***|742***|717***|762***|785***|797***,SH,167",--上海配股代码
|
||||||
|
XT_GE_SH_SHARES_CONVERTIBLE_BOND = "704***|764***|753***|715***|726***,SH,168",--上海配转债代码
|
||||||
|
XT_GE_SH_LOF = "501***|502***|506***,SH,169", --上海LOF
|
||||||
|
XT_GE_SH_GF = "502***,SH,170", --上海分级基金
|
||||||
|
XT_GE_SH_XGED = "SHXGED,SH,171", --沪新股额
|
||||||
|
XT_GE_SH_SEO = "730***|731***|780***|781***,SH,172",--沪增发股
|
||||||
|
XT_GE_SH_PR_CB_ETF = "513031|513501|513101|5109*1|513601|513661|513051,SH,173",--上海跨境ETF申赎代码
|
||||||
|
XT_GE_SH_LOAN_ETF = "5110**|5111**|5112**|5113**|5114**|5115**,SH,174",--上海债券ETF
|
||||||
|
XT_GE_SH_EPB_TRANSFER = "139***,SH,175",--上海企业债券挂牌转让
|
||||||
|
XT_GE_SH_CPB_LOAN = "1370**|1371**|1372**|1373**|1374**,SH,176",--上海可交换私募债
|
||||||
|
XT_GE_SH_LOAN_REPURCHASE_DAY_1 = "204001,SH,177", --沪市1天回购
|
||||||
|
XT_GE_SH_LOAN_REPURCHASE_DAY_2 = "204002,SH,178", --沪市2天回购
|
||||||
|
XT_GE_SH_LOAN_REPURCHASE_DAY_3 = "204003,SH,179", --沪市3天回购
|
||||||
|
XT_GE_SH_LOAN_REPURCHASE_DAY_4 = "204004,SH,180", --沪市4天回购
|
||||||
|
XT_GE_SH_LOAN_REPURCHASE_DAY_7 = "204007,SH,181", --沪市7天回购
|
||||||
|
XT_GE_SH_LOAN_REPURCHASE_DAY_14 = "204014,SH,182", --沪市14天回购
|
||||||
|
XT_GE_SH_LOAN_REPURCHASE_DAY_28 = "204028,SH,183", --沪市28天回购
|
||||||
|
XT_GE_SH_LOAN_REPURCHASE_DAY_28_UPPER = "204091|204182,SH,184", --沪市28天以上回购
|
||||||
|
XT_GE_SH_GSF = "502**1|502**2|502**4|502**5|502**7|502008|502018|502028|502038|502058|502049|502050,SH,185", --上海分级基金子基金
|
||||||
|
XT_GE_SH_ASS = "121***|1235**|1236**|1237**|1238**|1239**|128***|131***|142***|146***|149***|156***|159***|165***|168***|169***|179***|180***|183***|189***|1931**|1932**|1933**|1934**|1935**|1936**|1937**|1938**|1939**|112***|199***|260***|261***,SH,186",--上海资产支持证券
|
||||||
|
XT_GE_SH_LOAN_CBB_SCB_NEW = "733***|783***|754***|7590**|713***|718***,SH,187",--上海可转债可交换债新债申购代码
|
||||||
|
--XT_GE_SH_EXCHANGEABLE_BOND = "759***,SH,188",--上海可交换债券
|
||||||
|
XT_GE_SH_OPEN_END_FUND = "5190**|5191**|5192**|5193**|5194**|5195**|5196**|5197**|5199**,SH,189",--上海开放式基金申赎代码
|
||||||
|
XT_GE_SH_OTHER_MF = "5195**|5199**,SH,190",--上海除交易型和实时申赎型之外的货币市场基金
|
||||||
|
XT_GE_SH_ENTERPROSE_SUPPORT_BOND = "142***|131***|149***,SH,191",--企业支持债券(固收系统可交易)
|
||||||
|
--XT_GE_SH_INNOVATE = "605***,SH,192",--上海创新企业股票
|
||||||
|
XT_GE_SH_INNOVATE_DISTRIBUTION = "716***,SH,193",--上海创新企业配号
|
||||||
|
--XT_GE_SH_INNOVATE_SUBSCRIBE = "715***,SH,194", --上海创新企业申购
|
||||||
|
XT_GE_SH_INNOVATE_ALLOTMEN = "717***,SH,195", --上海创新企业配股
|
||||||
|
XT_GE_SH_CDR = "6091**|6092**|6093**|6094**|6095**|6096**|6097**|6098**|6099**|689***,SH,196",--上海CDR
|
||||||
|
XT_GE_SH_CDR_DISTRIBUTION = "713***|796***,SH,197",--上海CDR配号
|
||||||
|
XT_GE_SH_CDR_SUBSCRIBE = "712***|795***,SH,198", --上海CDR申购
|
||||||
|
XT_GE_SH_CDR_ALLOTMEN = "714***|797***,SH,199", --上海CDR配股
|
||||||
|
XT_GE_SH_LOAN_CR_ETF = "511011|511021|511031|511051|511061|511181|511221|511261|511271|511281|511311|511361|511381|5114*1|5115*1,SH,200", --上海债券etf申赎
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_1 = "205001,SH,201", --沪市1天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_7 = "205007,SH,202", --沪市7天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_14 = "205008,SH,203", --沪市14天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_28 = "205010,SH,204", --沪市28天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_42 = "205042,SH,205", --沪市42天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_63 = "205063,SH,206", --沪市64天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_91 = "205030,SH,207", --沪市91天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_119 = "205119,SH,208", --沪市119天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_154 = "205154,SH,209", --沪市154天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_182 = "205182,SH,210", --沪市182天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_273 = "205273,SH,211", --沪市273天报价回购
|
||||||
|
XT_GE_SH_PUBLIC_LOAN_PLEDGE = "144***|176***|187***,SH,212", --沪市公开发行公司债券出入库
|
||||||
|
XT_GE_SH_CONVERTIBLE_BOND_STOCK = "181***|190***|191***|1950**|1951**|1952**|1953**|1954**,SH,213", --上海可转债转股
|
||||||
|
XT_GE_SH_BOND_RESALE = "1820**|1821**|1822**|1009**,SH,214", --上海债券回售
|
||||||
|
XT_GE_SH_BOND_PROTOCOL_REPURCHASE = "206***,SH,215", --债券质押式协议回购
|
||||||
|
XT_GE_SH_BOND_TRIPARTITE_REPURCHASE = "207***,SH,216", --债券质押式三方回购
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN_FUTURES = "310***,SH,217", --上海国债期货
|
||||||
|
XT_GE_SH_OPEN_GF_LOF = "503***,SH,218", --上海多空分级LOF开放式基金
|
||||||
|
XT_GE_SH_OPEN_END_FUND_SUBSCRIPTION = "521***,SH,219", --上海开放式基金认购
|
||||||
|
XT_GE_SH_OPEN_END_FUND_CROSS_MARKET = "522***,SH,220", --上海开放式基金跨市场转托管
|
||||||
|
XT_GE_SH_OPEN_END_FUND_DIVIDEND = "523***,SH,221", --上海开放式基金分红
|
||||||
|
XT_GE_SH_OPEN_END_FUND_CONVERSION = "524***,SH,222", --上海开放式基金基金转换
|
||||||
|
XT_GE_SH_FUND_RAISING = "705***,SH,223", --上海基金扩募
|
||||||
|
XT_GE_SH_TENDER_OFFER = "706***,SH,224", --上海要约收购
|
||||||
|
XT_GE_SH_ONLINE_VOTING = "738***|752***|788***,SH,225", --上海网上投票
|
||||||
|
XT_GE_SH_ONLINE_VOTING_B = "938***,SH,226", --上海网上投票(B股)
|
||||||
|
XT_GE_SH_FUND_SUBSCRIPTION = "745***,SH,227", --上海基金申购款
|
||||||
|
XT_GE_SH_FUND_SUBSCRIPTION_DISTRIBUTION = "746***,SH,228", --上海基金申购配号
|
||||||
|
XT_GE_SH_INTEREST_RATE_GOVERNMENT_LOAN = "75180*,SH,229", --上海利率招标国债预发行
|
||||||
|
XT_GE_SH_PRICE_GOVERNMENT_LOAN = "75181*,SH,230", --上海价格招标国债预发行
|
||||||
|
XT_GE_SH_PUBLIC_LOAN_DISTRIBUTE_SALE = "75185*|75186*|75187*|75188*|75189*,SH,231", --上海公开发行公司债券网上分销
|
||||||
|
XT_GE_SH_LOAN_ISSUANCE_DISTRIBUTE_SALE = "75197*|75198*|75199*,SH,232", --上海公司债发行分销
|
||||||
|
XT_GE_SH_EXCHANGEABLE_BOND_DISTRIBUTION = "7580**,SH,233", --上海可交换债配号
|
||||||
|
XT_GE_SH_DESIGNATED_TRANSACTION = "799***,SH,234", --上海指定交易
|
||||||
|
XT_GE_SH_NON_PUBLIC_CONVERTIBLE_BOND_STOCK = "1930**,SH,235", --创新创业公司非公开发行可转换公司债券转股
|
||||||
|
XT_GE_SH_ONLINE_VOTING_PASSWORD_SERVICE_B = "939***,SH,236", --上海网上投票密码服务(B股)
|
||||||
|
XT_GE_SH_GOVERNMENT_LOAN_PLEDGE = "090***,SH,237", --上海新国债质押式回购质押券出入库
|
||||||
|
XT_GE_SH_COUPON_GOVERNMENT_LOAN_PLEDGE = "091***|099***,SH,238", --上海附息国债出入库
|
||||||
|
XT_GE_SH_CONVERTIBALE_BOND_RESALE = "1009**|1820**|1821**,SH,239", --上海可转债回售
|
||||||
|
XT_GE_SH_ENTERPRISE_LOAN_PLEDGE = "1059**,SH,240", --沪市企业债出入库
|
||||||
|
XT_GE_SH_LOW_CORPORATE_BOND = "1230**|1231**|1232**|1233**|1234**,SH,241", --沪市非担保交收公司债低等级公司债券和定向发行的次级债券等
|
||||||
|
XT_GE_SH_ASSET_BACKED_SECURITIES = "1235**|1236**|1237**|1238**|1239**|168***|169***|1931**|1932**|1933**|1934**|1935**|1936**|1937**|1938**|1939**,SH,242", --沪市资产支持证券
|
||||||
|
XT_GE_SH_TRANSACTION_ETF = "5100*0|5101*0|5102*0|5104*0|5106*0|5107*0|5108*0,SH,243", --上海ETF交易
|
||||||
|
XT_GE_SH_CR_ETF = "5100*1|5101*1|5102*1|5103*1|5104*1|5105*1|5106*1|5107*1|5108*1|5880*1|5881*1|5882*1|5883*1|5884*1|517**1,SH,244", --上海ETF申赎
|
||||||
|
XT_GE_SH_FUND_ETF = "5100*2|5101*2|5102*2|5103*2|5104*2|5105*2|5106*2|5107*2|5108*2|5880*2|5881*2|5882*2|5883*2|5884*2,SH,245", --上海ETF沪市资金
|
||||||
|
XT_GE_SH_SUBSCRIPTION_ETF = "5100*3|5101*3|5102*3|5103*3|5104*3|5105*3|5106*3|5107*3|5108*3,SH,246", --上海ETF认购
|
||||||
|
XT_GE_SH_SUBSCRIPTION_REPAYMENT_ETF = "5100*4|5101*4|5102*4|5103*4|5104*4|5105*4|5106*4|5107*4|5108*4,SH,247", --上海ETF认购扣款还款代码
|
||||||
|
XT_GE_SH_NON_FUND_ETF = "5100*5|5101*5|5102*5|5103*5|5104*5|5105*5|5106*5|5107*5|5108*5,SH,248", --上海ETF非沪市资金
|
||||||
|
XT_GE_SH_TRANSACTION_ETF_CROSS_MARKET = "5103*0|5105*0|512**0|515**0|516**0|560**0|561**0|562**0|563**0|517**0|5883*0|5884*0|5885**|5886**,SH,249", --上海跨市场ETF交易
|
||||||
|
XT_GE_SH_CR_ETF_CROSS_MARKET = "5103*1|5105*1|512**1,SH,250", --上海跨市场ETF申赎
|
||||||
|
XT_GE_SH_FUND_ETF_CROSS_MARKET = "5103*2|5105*2|512**2,SH,251", --上海跨市场ETF沪市资金
|
||||||
|
XT_GE_SH_SUBSCRIPTION_ETF_CROSS_MARKET = "5103*3|5105*3|512**3,SH,252", --上海跨市场ETF认购
|
||||||
|
XT_GE_SH_SUBSCRIPTION_REPAYMENT_ETF_CROSS_MARKET = "5103*4|5105*4|512**4,SH,253", --上海跨市场ETF认购扣款还款
|
||||||
|
XT_GE_SH_NON_FUND_ETF_CROSS_MARKET = "5103*5|5105*5|512**5,SH,254", --上海跨市场ETF非沪市资金
|
||||||
|
XT_GE_SH_FUND_ETF_CROSS_BORDER = "5109*2|513**2,SH,255", --上海跨境ETF沪市资金
|
||||||
|
XT_GE_SH_SUBSCRIPTION_ETF_CROSS_BORDER = "5109*3|513**3,SH,256", --上海跨境ETF认购
|
||||||
|
XT_GE_SH_SUBSCRIPTION_REPAYMENT_ETF_CROSS_BORDER = "5109*4|513**4,SH,257", --上海跨境ETF认购扣款还款
|
||||||
|
XT_GE_SH_NON_FUND_ETF_CROSS_BORDER = "5109*5|513**5,SH,258", --上海跨境ETF非沪市资金
|
||||||
|
XT_GE_SH_TRANSACTION_ETF_LOAN = "5110*0|5112*0,SH,259", --上海债券ETF交易
|
||||||
|
XT_GE_SH_SUBSCRIPTION_ETF_LOAN = "5110*3|5112*3,SH,260", --上海债券ETF认购
|
||||||
|
XT_GE_SH_SUBSCRIPTION_REPAYMENT_ETF_LOAN = "5110*4|5112*4,SH,261", --上海债券ETF认购扣款还款
|
||||||
|
XT_GE_SH_NON_FUND_ETF_LOAN = "5110*5|5112*5,SH,262", --上海债券ETF非沪市资金
|
||||||
|
XT_GE_SH_TRANSACTION_ETF_CR_LOAN = "5113*0|5114*0|5115*0,SH,263", --上海现金申赎债券ETF交易
|
||||||
|
XT_GE_SH_FUND_ETF_CR_LOAN = "5113*2|5114*2|5115*2,SH,264", --上海现金申赎债券ETF沪市资金
|
||||||
|
XT_GE_SH_SUBSCRIPTION_ETF_CR_LOAN = "5113*3|5114*3|5115*3,SH,265", --上海现金申赎债券ETF认购
|
||||||
|
XT_GE_SH_SUBSCRIPTION_REPAYMENT_ETF_CR_LOAN = "5113*4|5114*4|5115*4,SH,266", --上海现金申赎债券ETF认购扣款还款
|
||||||
|
XT_GE_SH_NON_FUND_ETF_CR_LOAN = "5113*5|5114*5|5115*5,SH,267", --上海现金申赎债券ETF非沪市资金
|
||||||
|
XT_GE_SH_TRANSACTION_ETF_MONETARY_FUND= "5116*0|5117*0|5118*0|5119*0,SH,268", --上海货币ETF交易
|
||||||
|
XT_GE_SH_SUBSCRIPTION_ETF_MONETARY_FUND= "5116*3|5117*3|5118*3|5119*3,SH,269", --上海货币ETF认购
|
||||||
|
XT_GE_SH_SUBSCRIPTION_REPAYMENT_ETF_MONETARY_FUND= "5116*4|5117*4|5118*4|5119*4,SH,270", --上海货币ETF认购扣款还款
|
||||||
|
XT_GE_SH_NON_FUND_ETF_MONETARY_FUND= "5116*5|5117*5|5118*5|5119*5,SH,271", --上海货币ETF非沪市资金
|
||||||
|
XT_GE_SH_SUBSCRIPTION_ETF_GOLD = "518**3,SH,272", --上海黄金ETF认购
|
||||||
|
XT_GE_SH_SUBSCRIPTION_FUND_ETF_GOLD = "518**4,SH,273", --上海黄金ETF基金认购资金
|
||||||
|
XT_GE_SH_CR_FUND_ETF_GOLD = "518**5,SH,274", --上海黄金ETF基金申赎资金
|
||||||
|
XT_GE_SH_MONETARY_FUND_SUBSCRIPTION = "5218**,SH,275", --上海新货币式基金认购
|
||||||
|
XT_GE_SH_WARRANT_CREATION_CANCEL = "581***,SH,276", --上海权证创设/注销
|
||||||
|
XT_GE_SH_TECH_BOARD = "688***|689***,SH,277", --上海科创板
|
||||||
|
XT_GE_SH_SUBSCRIPTION_TECH_BOARD = "787***,SH,278", --上海科创板新股申购
|
||||||
|
XT_PLEDGE_REPURCHASE_SH_DAY_1 = "206001,SH,279", --上海一天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SH_DAY_7 = "206007,SH,280", --上海七天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SH_DAY_14 = "206014,SH,281", --上海14天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SH_DAY_21 = "206021,SH,282", --上海21天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SH_DAY_30 = "206030,SH,283", --上海30天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SH_DAY_90 = "206090,SH,284", --上海90天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SH_DAY_180 = "206180,SH,285", --上海180天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SH_DAY_270 = "206270,SH,286", --上海270天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SH_DAY_365 = "206365,SH,287", --上海365天协议回购
|
||||||
|
XT_GE_SH_TECH_BOARD_CDR = "689***,SH,288",--上海科创板CDR
|
||||||
|
XT_GE_SH_TECH_BOARD_CDR_DISTRIBUTION = "796***,SH,289",--上海科创板CDR配号
|
||||||
|
XT_GE_SH_TECH_BOARD_CDR_SUBSCRIBE = "795***,SH,290", --上海科创板CDR申购
|
||||||
|
XT_GE_SH_TECH_BOARD_CDR_ALLOTMEN = "797***,SH,291", --上海科创板CDR配股
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_21 = "205021,SH,292", --沪市21天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_35 = "205035,SH,293", --沪市35天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_210 = "205210,SH,294", --沪市210天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_245 = "205245,SH,295", --沪市245天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_301 = "205301,SH,296", --沪市301天报价回购
|
||||||
|
XT_GE_SH_QUOTATION_REPURCHASE_DAY_357 = "205357,SH,297", --沪市357天报价回购
|
||||||
|
XT_GE_SH_NON_PUBLIC_CORPORATE_LOAN = "125***|135***|145***|150***|151***|162***|166***|167***|177***|178***|194***|196***|197***|1823**|1824**|1825**|1826**|1827**|1828**|1829**|114***|250***|251***|252***|253***|254***,SH,298", --上海非公开发行公司债券
|
||||||
|
XT_GE_SH_ENTERPROSE_SUPPORT_AUCTION_BOND = "142***|131***,SH,299",--企业支持债券(竞价系统可交易)
|
||||||
|
XT_GE_SH_ABS_TRANSFER = "149***,SH,300", --沪市资产支持证券挂牌转让
|
||||||
|
XT_GE_SH_TMFR = "5116*1|5117*1|5118*1|5119*1|5195*1|5199*1,SH,301",--上海交易型货币及其它货币基金申赎
|
||||||
|
XT_GE_SH_PUBLIC_INFRASTRUCTURE_FUND = "5080**,SH,302",--上海公募基础设施基金
|
||||||
|
XT_GE_SH_DIRECTIONAL_CONVERTIBALE_BOND = "1108**|1109**,SH,303", --沪市定向可转债
|
||||||
|
XT_GE_SH_50_ETF = "510050,SH,350",
|
||||||
|
XT_GE_SH_300_ETF = "510300,SH,351",
|
||||||
|
XT_GE_SH_PUBLIC_CORPORATE_TRADE_LOAN = "115***|136***|143***|163***|175***|185***|188***|240***|241***,SH,352", --上海公开发行公司债现券交易
|
||||||
|
XT_GE_SH_TECH_BOARD_ETF = "5880*1|5881*1|5882*1|5883*1|5884*1,SH,353", -- 上海科创板etf申赎
|
||||||
|
XT_GE_SH_NON_PUBLIC_CONVERTIBLE_CORPORATE_LOAN = "1108**|1109**,SH,354", -- 上海非公开发行可转换公司债券
|
||||||
|
XT_GE_SH_TECH_BOARD_CONVERTIBLE_BOND = "1180**|1181**|1182**|1183**|1184**,SH,355", -- 上海科创板可转债
|
||||||
|
XT_GE_SH_TECH_BOARD_CONVERTIBLE_BOND_IPO = "718***,SH,356", -- 上海科创板可转债申购
|
||||||
|
XT_GE_SH_TECH_BOARD_CONVERTIBLE_BOND_IPO_DISTRIBUTION = "719***,SH,357", -- 上海科创板可转债申购配号
|
||||||
|
XT_GE_SH_TECH_BOARD_SHARES_CONVERTIBLE_BOND = "726***,SH,358", -- 上海科创板可转债配债
|
||||||
|
XT_GE_SH_ALLOW_PLEDGE_BOND = "009***|010***|019***|020***|105***|110***|113***|120***|122***|123***|126***|127***|129***|130***|136***|137***|140***|143***|147***|152***|155***|157***|160***|163***|171***|173***|175***|188***,SH,359", --上海允许质押出入库债券
|
||||||
|
XT_GE_SH_CPB_SWAP = "1380**|1381**|1382**|1383**|1384**,SH,360",--上海可交换私募债换股
|
||||||
|
XT_GE_SH_GOVERNMENT_BANK_FINANCE_LOAN_DISTRIBUTE_SALE = "7512**|7513**,SH,361", --政策性银行金融债券分销
|
||||||
|
XT_GE_SH_LOCAL_GOVERNMENT_LOAN_ONLINE_DISTRIBUTE_SALE = "75190*|75191*|75192*|75193*|75194*|75195*|75196*,SH,362", --地方政府债券网上分销
|
||||||
|
XT_GE_SH_PLACING = "703***,SH,363", -- 上海配售
|
||||||
|
XT_GE_SH_TECH_BOARD_ETF_ETFCODE = "5880*0|5881*0|5882*0|5883*0|5884*0,SH,364", -- 上海科创板etf
|
||||||
|
XT_GE_SH_MAIN_BOARD = "60****,SH,365", -- 沪市主板
|
||||||
|
XT_GE_SH_500_ETF = "510500,SH,366", -- 上海500etf
|
||||||
|
XT_GE_SH_TECH_BOARD_50_ETF = "588080,SH,367", --科创板50ETF
|
||||||
|
XT_GE_SH_TECH_50_ETF = "588000,SH,368", --科创50ETF
|
||||||
|
XT_GE_SH_GOV_ALLOW = "1279**|1848**|1849**,SH,369",--上海政府支持债券
|
||||||
|
|
||||||
|
XT_GE_SZ_A = "00****|30****,SZ,10001",--深市A股
|
||||||
|
XT_GE_SZ_B = "20****,SZ,10002",--深市B股
|
||||||
|
XT_GE_SZ_FUND = "15****|16****|18****,SZ,10003",--深市封基
|
||||||
|
XT_GE_SZ_MAIN_BOARD = "000***|001***|002***|003***|004***,SZ,10004",--深市主板
|
||||||
|
XT_GE_SZ_SME_BOARD = "000000,SZ,10005",--深市中小板
|
||||||
|
XT_GE_SZ_GEM_BORAD = "30****,SZ,10006",--深市创业板
|
||||||
|
XT_GE_SZ_INDEX = "39****|98****,SZ,10007",--深市指数
|
||||||
|
XT_GE_SZ_ETF = "158***|159***,SZ,10008",--深市ETF
|
||||||
|
XT_GE_SZ_WARRANT = "03****,SZ,10009",--深市权证
|
||||||
|
XT_GE_SZ_GLR = "131990,SZ,10010",--深市国债回购(131990不是的,需要业务支持)
|
||||||
|
XT_GE_SZ_GLIB = "100***|101***|102***|103***|104***|105***|106***|107***,SZ,10011",--深市附息国债
|
||||||
|
XT_GE_SZ_GLD = "108***|110***,SZ,10012",--深市贴现国债
|
||||||
|
XT_GE_SZ_CB = "112***,SZ,10013",--深市公司债
|
||||||
|
XT_GE_SZ_EB = "111***,SZ,10014",--深市企业债
|
||||||
|
XT_GE_SZ_SB = "115***,SZ,10015",--深市分离债
|
||||||
|
XT_GE_SZ_MSP_PB = "118***|114***|133***|134***,SZ,10016",--深市私募债
|
||||||
|
XT_GE_SZ_SFMP = "119***,SZ,10017",--深市专项资金管理规划
|
||||||
|
XT_GE_SZ_LGL = "109***|104***|105***|19****|173***,SZ,10018",--深市地方政府债
|
||||||
|
XT_GE_SZ_CBB = "121***|122***|123***|124***|125***|126***|127***|128***|129***|10165*|10166*|10167*|10168*|10169*,SZ,10019",--深市可转债
|
||||||
|
XT_GE_SZ_STANDAR_B = "131990|131991|SZRQ88,SZ,10020",--深市标准券
|
||||||
|
XT_GE_SZ_CEF = "184***,SZ,10021",--深市封闭式基金
|
||||||
|
XT_GE_SZ_LOF = "16****,SZ,10022",--深市LOF
|
||||||
|
XT_GE_SZ_GF = "150***|151***|160516|161207|162509|161715|161816|161812|161819|160417|160718|502040|163406|163109|165310|164809|164808,SZ,10023",--深市分级基金
|
||||||
|
XT_GE_SZ_SCB_PB = "117***|1156**|1157**|1158**|1159**,SZ,10024",--深市 中小企业可交换私募债
|
||||||
|
XT_GE_SZ_SC_SB = "1189**|1151**|1152**|1153**|1154**|1155**,SZ,10025",--深市证券公司次级债
|
||||||
|
XT_GE_SZ_SPB = "1180**|1181**|1182**|1183**|1184**|1185**|1186**|1187**|1188**,SZ,10026",--深市其他中小企业私募债
|
||||||
|
XT_GE_SZ_ASS = "1161**|1162**|1163**|1164**|1191**|1192**|1193**|1194**|138***|139***|135***|136***|137***|143***|144***|146***,SZ,10027",--深市企业资产支持证券
|
||||||
|
XT_GE_SZ_GSF = "150***|151***,SZ,10028",--深市分级基金子基金
|
||||||
|
XT_GE_SZ_CB_ETF = "159920|159941|159954|159960|159963|159605|159607|159612|159615|159632|159636|159655|159711|159712|159718|159726|159735|159740|159741|159742|159747|159750|159751|159776|159788|159792|159822|159823|159850|159866|159892|159519|159696|159699|159509|159513|159506|159501|159659,SZ,10029",--深市跨境ETF
|
||||||
|
XT_GE_SZ_CB_LOF = "160125|160416|160717|160719|161116|161210|161714|161815|162411|164701|164705|164815|164824|165510|165513|164906|163208|162719|162416|162415|161831|161229|161130|161129|161128|161127|161126|161125|161124|160924|160923|160922|160723|160644|160322|160216|160140|160138|159691|159660|159688|159687,SZ,10030",--深市跨境LOF
|
||||||
|
XT_GE_SZ_ICEF = "150***,SZ,10031",--深市创新型封闭式基金
|
||||||
|
XT_GE_SZ_ZB_CCB = "127***,SZ,10032",--深市主板可转换公司债券
|
||||||
|
XT_GE_SZ_CYB_CCB = "123***,SZ,10033",--深市创业板可转换公司债券
|
||||||
|
XT_GE_SZ_ZXB_CCB = "128***,SZ,10034",--深市中小板可转换公司债券
|
||||||
|
XT_GE_SZ_GLRA = "131***,SZ,10035",--深市国债回购(131900不是的,需要业务支持)
|
||||||
|
XT_GE_SZ_GOLD = "159934|159937|159812|159830|159831|159832|159833|159834,SZ,10036",--深市黄金
|
||||||
|
XT_GE_SZ_RTMF = "1590**,SZ,10037",--深市实时申赎货币基金
|
||||||
|
XT_GE_SZ_XGED = "SZXGED,SZ,10038",--深新股额
|
||||||
|
XT_GE_SZ_SEO = "07****|37****,SZ,10039",--深增发股
|
||||||
|
XT_GE_SZ_SA = "08****|380***,SZ,10040",--深圳配股
|
||||||
|
XT_GE_SZ_LOAN_ETF = "159926|159972|159988|159816|159649|159650|159651,SZ,10041",--深圳债券ETF
|
||||||
|
XT_GE_SZ_LOAN_REPURCHASE_DAY_1 = "131810,SZ,10042", --深市1天回购
|
||||||
|
XT_GE_SZ_LOAN_REPURCHASE_DAY_2 = "131811,SZ,10043", --深市2天回购
|
||||||
|
XT_GE_SZ_LOAN_REPURCHASE_DAY_3 = "131800,SZ,10044", --深市3天回购
|
||||||
|
XT_GE_SZ_LOAN_REPURCHASE_DAY_4 = "131809,SZ,10045", --深市4天回购
|
||||||
|
XT_GE_SZ_LOAN_REPURCHASE_DAY_7 = "131801,SZ,10046", --深市7天回购
|
||||||
|
XT_GE_SZ_LOAN_REPURCHASE_DAY_14 = "131802,SZ,10047", --深市14天回购
|
||||||
|
XT_GE_SZ_LOAN_REPURCHASE_DAY_28 = "131803,SZ,10048", --深市28天回购
|
||||||
|
XT_GE_SZ_LOAN_REPURCHASE_DAY_28_UPPER = "131805|131806,SZ,10049", --深市28天以上回购
|
||||||
|
XT_GE_SZ_POB = "148***|149***,SZ,10050",--深圳公募公司债券
|
||||||
|
XT_GE_BANK_LOAN = "1086**|1087**|1088**|1089**,SZ,10051",--深圳政策性金融债
|
||||||
|
XT_GE_SZ_GOV_ALLOW = "1119**|130***,SZ,10052",--政府支持债券
|
||||||
|
XT_GE_SZ_INNOVATE_KZZ = "1210**|1211**|1212**|1213**|1214**,SZ,10053",--创新创业可转债
|
||||||
|
XT_GE_SZ_LOAN_IPO = "07****|37****|120***,SZ,10054",--深圳债券申购代码
|
||||||
|
XT_GE_SZ_ENTERPROSE_SUPPORT_BOND = "116***|119***|138***|139***|135***|136***|137***|143***|144***|146***,SZ,10055",--企业支持债券
|
||||||
|
XT_GE_SZ_CDR_ALLOTMEN = "08****,SZ,10056",--深圳CDR配股代码
|
||||||
|
XT_PLEDGE_REPURCHASE_SZ_DAY_1 = "131981,SZ,10057", --深圳1天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SZ_DAY_7 = "131982,SZ,10058", --深圳7天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SZ_DAY_14 = "131983,SZ,10059", --深圳14天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SZ_DAY_21 = "131984,SZ,10060", --深圳21天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SZ_DAY_30 = "131985,SZ,10061", --深圳30天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SZ_DAY_90 = "131986,SZ,10062", --深圳90天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SZ_DAY_180 = "131987,SZ,10063", --深圳180天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SZ_DAY_270 = "131988,SZ,10064", --深圳270天协议回购
|
||||||
|
XT_PLEDGE_REPURCHASE_SZ_DAY_365 = "131989,SZ,10065", --深圳365天协议回购
|
||||||
|
XT_GE_SZ_NON_PUBLIC_PREFERED_SHARES_TRANSFER = "140***,SZ,10066", --深市非公开优先股转让
|
||||||
|
XT_GE_SZ_LOAN_ISSUANCE_DISTRIBUTE_SALE = "10165*|10166*|10167*|10168*|10169*,SZ,10067", --深市债券分销
|
||||||
|
XT_GE_SZ_LOAN_REITS = "1215**|1216**|1217**|1218**|1219**,SZ,10068", --深市债券REITS代码
|
||||||
|
XT_GE_SZ_LOAN_DIRECTIONAL = "10165*|10166*|10167*|10168*|10169*|124***,SZ,10069", --深市定向可转债
|
||||||
|
XT_GE_SZ_300_ETF = "159919,SZ,10071",
|
||||||
|
XT_GE_SZ_OLDSHARES_PREFERRED_CONVERTIBLE_BOND = "38****,SZ,10072",--创业板上市公司可转债老股东优先配售代码
|
||||||
|
XT_GE_SZ_GEM_BORAD_DR = "3098**|3099**,SZ,10073", --创业板存托凭证代码区间309800-309999
|
||||||
|
XT_GE_SZ_MAIN_SME_BORAD_DR = "0010*1|0010*2|0010*3|0010*4|0010*5|0010*6|0010*7|0010*8|0010*9|0011**,SZ,10074", --主板中小板存托凭证代码区间 001001-001199
|
||||||
|
XT_GE_SZ_SHARES_CONVERTIBLE_BOND = "08****|38****,SZ,10075",--深市配转债代码
|
||||||
|
XT_GE_SZ_PUBLIC_INFRASTRUCTURE_FUND = "180***,SZ,10076",--深市公募基础设施基金
|
||||||
|
XT_GE_SZ_DIRECTIONAL_CONVERTIBALE_BOND = "124***,SZ,10077", --深市定向可转债
|
||||||
|
XT_GE_SZ_EXCHANGEABLE_LOAN = "120***|117***|1156**|1157**|1158**|1159**,SZ,10078",--深圳可交换公司债
|
||||||
|
XT_GE_SZ_ETF_CROSS_MARKET = "159602|159603|159606|159608|159609|159610|159611|159613|159616|159617|159618|159619|159620|159621|159623|159625|159628|159629|159630|159631|159633|159635|159637|159638|159639|159640|159641|159642|159643|159645|159646|159647|159658|159663|159667|159701|159702|159703|159707|159710|159713|159715|159717|159719|159720|159723|159725|159728|159729|159730|159731|159732|159733|159736|159738|159739|159743|159745|159748|159752|159755|159757|159758|159760|159761|159763|159766|159767|159768|159769|159770|159775|159778|159779|159780|159781|159782|159786|159787|159789|159791|159793|159795|159796|159797|159798|159813|159815|159819|159820|159824|159825|159827|159828|159835|159837|159838|159839|159840|159841|159842|159843|159845|159847|159848|159849|159851|159852|159853|159855|159856|159857|159858|159859|159861|159862|159863|159864|159865|159867|159870|159871|159872|159873|159875|159876|159877|159880|159881|159883|159885|159886|159887|159888|159889|159890|159891|159895|159896|159898|159899|159999|159980|159981|159985|159649|159650|159801|159805|159806|159807|159811|159919|159922|159923|159925|159928|159929|159930|159931|159933|159935|159936|159938|159939|159940|159944|159945|159951|159953|159959|159962|159965|159968|159973|159974|159982|159986|159987|159990|159992|159993|159994|159995|159996|159997|159998|159809|159983|159978|159979|159976|159984|159869|159790|159783|159601|159672|159676|159685|159653|159657|159689|159671|159678|159679|159666|159652|159656|159669|159683|159665|159677|159675|159662|159627|159680|159907|159515|159517|159698|159510|159511|159516|159512|159503|159686|159673|159508|159690|159507|159692|159695|159622|159670|159697,SZ,10079",--深圳跨市etf
|
||||||
|
XT_GE_SZ_100_ETF = "159901,SZ,10080",--深证100ETF股票期权
|
||||||
|
XT_GE_SZ_500_ETF = "159922,SZ,10081",--深圳500ETF
|
||||||
|
XT_GE_SZ_CYB_ETF = "159915,SZ,10082",--创业板ETF
|
||||||
|
|
||||||
|
XT_GE_MARKET_NEW3BOARD_DELISTED = "400***|420***,NEEQ,20000", --两网及退市公司股票 新三板
|
||||||
|
XT_GE_NEW3BOARD_PREFERED_SHARES_TRANSFER = "820***,NEEQ,20001", --全国股转非公开优先股转让
|
||||||
|
XT_GE_BJ = "43****|83****|87****,BJ,20002",--北交
|
||||||
|
XT_GE_BJ_SUBSCRIBE = "889***,BJ,20003",--北交所申购
|
||||||
|
},
|
||||||
|
--存放扩展类型,能用基础类型描述的,尽量用基础类型,基础类型执行效率高于扩展类型
|
||||||
|
--扩展类型可以是基础类型,也可以由基础类型通过简单的&(且)|(或)运算得出,允许用小括号调整运算优先级
|
||||||
|
--扩展类型的表达式里可以使用之前已经定义的扩展类型
|
||||||
|
--例句 xtf = "(xtd|xta&(xtb&xtc))|((xta&xtb))|xta,303",
|
||||||
|
extraTypes = {
|
||||||
|
XT_GE_EXTRA_STOCK_A = "XT_GE_SH_A|XT_GE_SZ_A|XT_GE_BJ,100001",--沪深A股
|
||||||
|
XT_GE_EXTRA_STOCK_B = "XT_GE_SH_B|XT_GE_SZ_B,100002",--沪深B股
|
||||||
|
XT_GE_EXTRA_STOCK = "XT_GE_EXTRA_STOCK_A|XT_GE_EXTRA_STOCK_B,100003",--沪深狭义股票
|
||||||
|
XT_GE_EXTRA_FUND = "XT_GE_SH_FUND|XT_GE_SZ_FUND,100004",--沪深封基
|
||||||
|
XT_GE_EXTRA_STOCK_INDEX = "XT_GE_SZ_INDEX|XT_GE_SH_INDEX,100005",--指数
|
||||||
|
XT_GE_EXTRA_MARKET_CF = "XT_GE_MARKET_SQ|XT_GE_MARKET_DS|XT_GE_MARKET_ZS,100006",--商品期货
|
||||||
|
XT_GE_EXTRA_MARKET_FU = "XT_GE_MARKET_ZJ|XT_GE_EXTRA_MARKET_CF,100007",--期货市场
|
||||||
|
XT_GE_EXTRA_MARKET_ST = "XT_GE_MARKET_SH|XT_GE_MARKET_SZ,100008",--股票
|
||||||
|
XT_GE_EXTRA_SZ_CGL = "XT_GE_SZ_GLIB|XT_GE_SZ_GLD,100009",--深市中央政府债(国债)
|
||||||
|
XT_GE_EXTRA_SZ_GL = "XT_GE_EXTRA_SZ_CGL|XT_GE_SZ_LGL,100010",--深市政府债
|
||||||
|
XT_GE_EXTRA_SZ_LOAN = "XT_GE_SZ_GLIB|XT_GE_SZ_GLD|XT_GE_SZ_CB|XT_GE_SZ_CBB|XT_GE_SZ_EB|XT_GE_SZ_SB|XT_GE_SZ_MSP_PB|XT_GE_SZ_SFMP|XT_GE_SZ_LGL|XT_GE_SZ_POB|XT_GE_SZ_SCB_PB|XT_GE_SZ_ZB_CCB|XT_GE_SZ_CYB_CCB|XT_GE_SZ_ZXB_CCB|XT_GE_SZ_LOAN_REITS|XT_GE_SZ_LOAN_DIRECTIONAL|XT_GE_SZ_EXCHANGEABLE_LOAN|XT_GE_SZ_ENTERPROSE_SUPPORT_BOND|XT_GE_SZ_GOV_ALLOW,100011",--深市所有债券
|
||||||
|
XT_GE_EXTRA_STOCK_EX = "!XT_GE_EXTRA_STOCK_INDEX,100012",--广义的股票
|
||||||
|
XT_GE_EXTRA_ETF = "XT_GE_SH_ETF|XT_GE_SZ_ETF,100013",--ETF
|
||||||
|
XT_GE_EXTRA_CLOSED_ENDED_FUNDS = "XT_GE_SH_CLOSED_ENDED_FUNDS|XT_GE_SZ_CEF,100014",--封闭式基金
|
||||||
|
XT_GE_EXTRA_WARRANT = "XT_GE_SH_WARRANT|XT_GE_SZ_WARRANT,100015",--权证
|
||||||
|
XT_GE_EXTRA_LOAN = "XT_GE_EXTRA_SH_LOAN|XT_GE_EXTRA_SZ_LOAN,100016",--债券
|
||||||
|
XT_GE_EXTRA_SZ_GLR = "XT_GE_SZ_GLRA&(!XT_GE_SZ_GLR),100017",--深市国债回购
|
||||||
|
XT_GE_EXTRA_STANDARD_BOND = "XT_GE_SH_STANDARD_BOND|XT_GE_SZ_STANDAR_B,100018",--标准券
|
||||||
|
XT_GE_EXTRA_POLICY_JRZ = "XT_GE_SH_POLICY_JRZ,100019",
|
||||||
|
XT_GE_EXTRA_GLR = "XT_GE_SH_GOVERNMENT_LOAN_REPURCHASE|XT_GE_EXTRA_SZ_GLR,100020",--债券回购
|
||||||
|
XT_GE_EXTRA_REPURCHASE_IMPAWN = "XT_GE_SH_GOVERNMENT_LOAN_REPURCHASE_IMPAWN|XT_GE_EXTRA_SZ_GLR,100021",--质押式回购
|
||||||
|
XT_GE_EXTRA_GOLD = "XT_GE_SH_GOLD|XT_GE_SZ_GOLD,100022",--黄金
|
||||||
|
XT_GE_EXTRA_RTMF = "XT_GE_SH_RTMF|XT_GE_SZ_RTMF,100023",--实时申赎货币基金
|
||||||
|
XT_GE_EXTRA_MONETARY_FUND = "XT_GE_EXTRA_RTMF|XT_GE_SH_TMF|XT_GE_SH_OTHER_MF,100024",--货币基金
|
||||||
|
XT_GE_EXTRA_SH_IPO = "XT_GE_SH_STOCK_IPO|XT_GE_SH_LOAN_IPO|XT_GE_SH_FUND_IPO,100025",--上海申购代码
|
||||||
|
XT_GE_EXTRA_CB_ETF = "XT_GE_SZ_CB_ETF|XT_GE_SH_CROSS_BORDER_ETF,100026",--跨境ETF
|
||||||
|
XT_GE_EXTRA_CB_LOF = "XT_GE_SH_CROSS_BORDER_LOF|XT_GE_SZ_CB_LOF,100027",--跨境LOF
|
||||||
|
XT_GE_EXTRA_STOCK_TRANABLE = "XT_GE_EXTRA_STOCK|XT_GE_EXTRA_FUND|XT_GE_EXTRA_ETF|XT_GE_EXTRA_WARRANT|XT_GE_SH_SUBSCRIBE|XT_GE_SZ_GEM_BORAD|XT_GE_EXTRA_GLR|XT_GE_EXTRA_LOAN|XT_GE_SF_FIXED_INCOME_ETF|XT_GE_EXTRA_GOLD|XT_GE_EXTRA_MONETARY_FUND|XT_GE_EXTRA_SZ_CGL|XT_GE_SH_CENTRAL_GOVERNMENT_LOAN|XT_GE_SH_LOCAL_GOVERNMENT_LOAN|XT_GE_SZ_LGL|XT_GE_EXTRA_SH_IPO|XT_GE_SH_PLEDGE|XT_GE_EXTRA_CB_ETF|XT_GE_EXTRA_CB_LOF|XT_GE_SH_SHARES_ALLOTMEN|XT_GE_SH_SHARES_CONVERTIBLE_BOND|XT_GE_SZ_CBB|XT_GE_SH_CONVERTIBALE_BOND|XT_GE_SH_SEO|XT_GE_SZ_SEO|XT_GE_SH_LOAN_CBB_SCB_NEW|XT_GE_SZ_LOAN_IPO|XT_GE_SZ_CDR_ALLOTMEN|XT_GE_SH_TECH_BOARD|XT_GE_SH_BOND_OFFER_REPURCHASE|XT_GE_SH_SUBSCRIPTION_TECH_BOARD|XT_GE_SH_TRANSACTION_ETF_CROSS_MARKET|XT_GE_BOND_DISTRIBUTION|XT_GE_SH_PUBLIC_PREFERED_SHARES|XT_GE_SH_NON_PUBLIC_PREFERED_SHARES_TRANSFER|XT_GE_SH_BOND_RESALE|XT_GE_SH_CONVERTIBALE_BOND_RESALE|XT_GE_SH_CONVERTIBLE_BOND_STOCK|XT_GE_SZ_NON_PUBLIC_PREFERED_SHARES_TRANSFER|XT_GE_SZ_SA|XT_GE_SZ_OLDSHARES_PREFERRED_CONVERTIBLE_BOND|XT_GE_SH_ENTERPROSE_SUPPORT_AUCTION_BOND|XT_GE_EXTRA_PUBLIC_INFRASTRUCTURE_FUND|XT_GE_BJ_SUBSCRIBE,100028",--可交易的
|
||||||
|
XT_GE_EXTRA_MAIN_BOARD = "XT_GE_SH_MAIN_BOARD|XT_GE_SZ_MAIN_BOARD,100029", --主板
|
||||||
|
XT_GE_EXTRA_INTRA_DAY = "XT_GE_EXTRA_LOAN|XT_GE_EXTRA_GOLD|XT_GE_SF_FIXED_INCOME_ETF|XT_GE_EXTRA_WARRANT|XT_GE_EXTRA_CB_ETF|XT_GE_EXTRA_CB_LOF|XT_GE_SH_TMF|XT_GE_SZ_RTMF|XT_GE_EXTRA_LOAN_ETF|XT_GE_EXTRA_MARKET_FU|XT_GE_MARKET_OP,100030", --回转交易
|
||||||
|
XT_GE_EXTRA_SH_DISTRIBUTION = "XT_GE_SH_NEW_SHARES_DISTRIBUTION|XT_GE_SH_PLACING_FIRST_DISTRIBUTION|XT_GE_SH_CONVERTIBLE_BOUND_DISTRIBUTION,100031", --上海配号
|
||||||
|
XT_GE_EXTRA_XGED = "XT_GE_SH_XGED|XT_GE_SZ_XGED,100032", --沪深新股申购额度
|
||||||
|
XT_GE_SHARES_ALLOTMEN = "XT_GE_SH_SHARES_ALLOTMEN|XT_GE_SZ_SA,100033", --沪深配股代码
|
||||||
|
XT_GE_EXTRA_FI_ETF = "XT_GE_SF_FIXED_INCOME_ETF,100034", --固定收益:跟踪债券指数的交易型开放式指数基金、交易型货币市场基金
|
||||||
|
XT_GE_EXTRA_ST_FIX = "XT_GE_SH_GOVERNMENT_LOAN_INTEREST_BEARING|XT_GE_SZ_GLIB|XT_GE_SH_GOVERNMENT_LOAN_DISCOUNT|XT_GE_SZ_GLD|XT_GE_SH_GOVERNMENT_LOAN|XT_GE_EXTRA_SZ_GL|XT_GE_SZ_EB|XT_GE_SH_ENTERPRISE_BOND|XT_GE_SZ_MSP_PB|XT_GE_SH_CONVERTIBALE_BOND|XT_GE_SZ_CBB|XT_GE_SH_SEPERATION_BOND|XT_GE_SZ_SB|XT_GE_EXTRA_GLR|XT_GE_EXTRA_STANDARD_BOND|XT_GE_EXTRA_MONETARY_FUND|XT_GE_SF_FIXED_INCOME_ETF|XT_GE_SZ_LOAN_REITS|XT_GE_SZ_LOAN_DIRECTIONAL|XT_GE_EXTRA_FICC|XT_GE_EXTRA_LOAN,100035", --固定收益类
|
||||||
|
XT_GE_EXTRA_GF = "XT_GE_SH_GF|XT_GE_SZ_GF,100036", --分级基金
|
||||||
|
XT_GE_EXTRA_LOF = "XT_GE_SH_LOF|XT_GE_SZ_LOF,100037", --LOF
|
||||||
|
XT_GE_EXTRA_LOAN_ETF = "XT_GE_SH_LOAN_ETF|XT_GE_SZ_LOAN_ETF,100038", --债券ETF
|
||||||
|
XT_GE_EXTRA_SH_LOAN = "XT_GE_SH_GOVERNMENT_LOAN_INTEREST_BEARING|XT_GE_SH_GOVERNMENT_LOAN_DISCOUNT|XT_GE_SH_LOCAL_GOVERNMENT_LOAN|XT_GE_SH_CONVERTIBALE_BOND|XT_GE_SH_CORPORATE_BOND|XT_GE_SH_ENTERPRISE_BOND|XT_GE_SH_ASSET_SECURITIZATION|XT_GE_SH_SEPERATION_BOND_REPURCHASE|XT_GE_SH_FINANCIAL_BONDS|XT_GE_SH_CREDIT_ASSET_SUPPORTED|XT_GE_SH_EXCHANGEABLE_LOAN|XT_GE_SH_PRIVATELY_LOAN_TRANSFER|XT_GE_SH_SHORTTERM_CORPORATE_LOAN_TRANSFER|XT_GE_SH_EPB_TRANSFER|XT_GE_SH_CPB|XT_GE_SH_CPB_LOAN|XT_GE_SH_GOVERNMENT_LOAN|XT_GE_SH_SEPERATION_BOND|XT_GE_SH_LOAN_CBB_SCB_NEW|XT_GE_SH_MS_PRIVATE_PLACEMENT_BOND|XT_GE_SH_ENTERPROSE_SUPPORT_BOND|XT_GE_SH_PUBLIC_CORPORATE_TRADE_LOAN|XT_GE_SH_NON_PUBLIC_CORPORATE_LOAN|XT_GE_SH_ASS|XT_GE_SH_GOV_ALLOW,100039", --上海债券
|
||||||
|
XT_GE_EXTRA_REPURCHASE_DAY_1 = "XT_GE_SH_LOAN_REPURCHASE_DAY_1|XT_GE_SZ_LOAN_REPURCHASE_DAY_1,100040", --1天逆回购
|
||||||
|
XT_GE_EXTRA_REPURCHASE_DAY_2 = "XT_GE_SH_LOAN_REPURCHASE_DAY_2|XT_GE_SZ_LOAN_REPURCHASE_DAY_2,100041", --2天逆回购
|
||||||
|
XT_GE_EXTRA_REPURCHASE_DAY_3 = "XT_GE_SH_LOAN_REPURCHASE_DAY_3|XT_GE_SZ_LOAN_REPURCHASE_DAY_3,100042", --3天逆回购
|
||||||
|
XT_GE_EXTRA_REPURCHASE_DAY_4 = "XT_GE_SH_LOAN_REPURCHASE_DAY_4|XT_GE_SZ_LOAN_REPURCHASE_DAY_4,100043", --4天逆回购
|
||||||
|
XT_GE_EXTRA_REPURCHASE_DAY_7 = "XT_GE_SH_LOAN_REPURCHASE_DAY_7|XT_GE_SZ_LOAN_REPURCHASE_DAY_7,100044", --7天逆回购
|
||||||
|
XT_GE_EXTRA_REPURCHASE_DAY_14 = "XT_GE_SH_LOAN_REPURCHASE_DAY_14|XT_GE_SZ_LOAN_REPURCHASE_DAY_14,100045", --14天逆回购
|
||||||
|
XT_GE_EXTRA_REPURCHASE_DAY_28 = "XT_GE_SH_LOAN_REPURCHASE_DAY_28|XT_GE_SZ_LOAN_REPURCHASE_DAY_28,100046", --28天逆回购
|
||||||
|
XT_GE_EXTRA_REPURCHASE_DAY_28_UPPER = "XT_GE_SH_LOAN_REPURCHASE_DAY_28_UPPER|XT_GE_SZ_LOAN_REPURCHASE_DAY_28_UPPER,100047", --28天以上逆回购
|
||||||
|
XT_GE_EXTRA_NOT_CLOSING_AUCTION_MATCH = "XT_GE_EXTRA_SH_LOAN|XT_GE_SH_FUND|XT_GE_SH_ETF|XT_GE_SH_BOND_OFFER_REPURCHASE|XT_GE_SH_GOVERNMENT_LOAN_REPURCHASE|XT_GE_SH_PLEDGE,100070", --上交所不执行收盘集合竞价的品种
|
||||||
|
XT_GE_EXTRA_RATE_BOND = "XT_GE_SH_GOVERNMENT_LOAN|XT_GE_EXTRA_SZ_GL|XT_GE_SH_POLICY_JRZ|XT_GE_BANK_LOAN,100080", --利率类债券
|
||||||
|
XT_GE_EXTRA_FICC = "XT_GE_SH_CORPORATE_BOND|XT_GE_SH_OLD_GOVERNMENT_LOAN|XT_GE_SH_CENTRAL_GOVERNMENT_LOAN|XT_GE_SH_GOVERNMENT_LOAN_INTEREST_BEARING|XT_GE_SH_CONVERTIBALE_BOND|XT_GE_SH_ENTERPRISE_BOND|XT_GE_SH_MS_PRIVATE_PLACEMENT_BOND|XT_GE_SH_SEPERATION_BOND|XT_GE_SH_CREDIT_ASSET_SUPPORTED|XT_GE_SH_GOVERNMENT_LOAN|XT_GE_SH_ENTERPROSE_SUPPORT_BOND|XT_GE_SH_EXCHANGEABLE_LOAN|XT_GE_SH_PRIVATELY_LOAN_TRANSFER|XT_GE_SH_SHORTTERM_CORPORATE_LOAN_TRANSFER|XT_GE_SH_CPB_LOAN|XT_GE_SH_NON_PUBLIC_CONVERTIBLE_BOND_STOCK|XT_GE_SH_BOND_TRIPARTITE_REPURCHASE|XT_GE_SH_LOW_CORPORATE_BOND|XT_GE_SH_ASSET_BACKED_SECURITIES|XT_GE_SH_LOCAL_GOVERNMENT_LOAN|XT_GE_SH_NON_PUBLIC_CORPORATE_LOAN|XT_GE_SH_ASS|XT_GE_SH_FINANCIAL_BONDS|XT_GE_SH_BOND_PROTOCOL_REPURCHASE|XT_GE_SH_BOND_TRIPARTITE_REPURCHASE|XT_GE_SZ_LOAN_REITS|XT_GE_SZ_LOAN_DIRECTIONAL|XT_GE_SH_PUBLIC_CORPORATE_TRADE_LOAN|XT_GE_SH_PUBLIC_INFRASTRUCTURE_FUND|XT_GE_SH_GOV_ALLOW,100090", --固收
|
||||||
|
XT_GE_BOND_DISTRIBUTION = "XT_GE_SH_GOVERNMENT_LOAN_DISTRIBUTE_SALE|XT_GE_SH_LOCAL_GOVERNMENT_LOAN_DISTRIBUTE_SALE|XT_GE_SH_PUBLIC_LOAN_DISTRIBUTE_SALE|XT_GE_SH_LOAN_ISSUANCE_DISTRIBUTE_SALE|XT_GE_SZ_LOAN_ISSUANCE_DISTRIBUTE_SALE|XT_GE_SH_GOVERNMENT_BANK_FINANCE_LOAN_DISTRIBUTE_SALE|XT_GE_SH_LOCAL_GOVERNMENT_LOAN_ONLINE_DISTRIBUTE_SALE,100200", --债券分销
|
||||||
|
XT_GE_EXTRA_50_ETF = "XT_GE_SH_50_ETF,100100",
|
||||||
|
XT_GE_EXTRA_300_ETF = "XT_GE_SH_300_ETF|XT_GE_SZ_300_ETF,100101",
|
||||||
|
XT_GE_EXTRA_BLOCK_TRADING = "XT_GE_EXTRA_STOCK_TRANABLE|XT_GE_SH_ASS|XT_GE_SZ_ASS,100102", --大宗平台可交易
|
||||||
|
XT_GE_EXTRA_PUBLIC_INFRASTRUCTURE_FUND = "XT_GE_SH_PUBLIC_INFRASTRUCTURE_FUND|XT_GE_SZ_PUBLIC_INFRASTRUCTURE_FUND,100103",--公募基础设施基金
|
||||||
|
XT_GE_EXTRA_DIRECTIONAL_CONVERTIBALE_BOND = "XT_GE_SH_DIRECTIONAL_CONVERTIBALE_BOND|XT_GE_SZ_DIRECTIONAL_CONVERTIBALE_BOND,100104",--定向可转债
|
||||||
|
XT_GE_EXTRA_ALLOW_PLEDGE = "XT_GE_SH_ALLOW_PLEDGE_BOND|XT_GE_EXTRA_SZ_LOAN,100105",--允许质押出入库债券
|
||||||
|
XT_GE_EXTRA_SH_CORPORATE_LOAN = "XT_GE_SH_NON_PUBLIC_CONVERTIBLE_CORPORATE_LOAN|XT_GE_SH_CPB|XT_GE_SH_NON_PUBLIC_CORPORATE_LOAN|XT_GE_SH_MS_PRIVATE_PLACEMENT_BOND,100106",--上海私募债
|
||||||
|
XT_GE_EXTRA_SZ_CORPORATE_LOAN = "XT_GE_SZ_LOAN_DIRECTIONAL|XT_GE_SZ_SCB_PB|XT_GE_SZ_MSP_PB|XT_GE_SZ_SPB,100107",--深圳私募债
|
||||||
|
XT_GE_EXTRA_100_ETF = "XT_GE_SZ_100_ETF,100108",--100ETF股票期权
|
||||||
|
XT_GE_EXTRA_500_ETF = "XT_GE_SH_500_ETF|XT_GE_SZ_500_ETF,100109",--500ETF
|
||||||
|
XT_GE_EXTRA_CYB_ETF = "XT_GE_SZ_CYB_ETF,100110",--创业板ETF
|
||||||
|
},
|
||||||
|
optionTypes
|
||||||
|
= {
|
||||||
|
XT_GE_SF_FTOPTION = "au*****?|cu*****?|al*****?|ru*****?|zn*****?|ag*****?|rb*****?|br*****?,SFO,100050", --上期所期权 四位到期数字,一位C/P
|
||||||
|
XT_GE_ZF_FTOPTION = "SR****?|CF****?|TA****?|MA****?|RM****?|ZC****?|OI****?|PK****?|PX****?|SH****?,ZFO,100051", --郑商所期权 三位到期数字,一位C/P
|
||||||
|
XT_GE_DF_FTOPTION = "m****-*-?|c****-*-?|i****-*-?|y****-*-?|p****-*-?|j****-*-?|jm****-*-?|pg****-*-?|v****-*-?|l****-*-?|pp****-*-?|a****-*-?|b****-*-?|eg****-*-?|eb****-*-?,DFO,100052", --大商所期权 四位到期数字,一位C/P
|
||||||
|
XT_GE_IF_FTOPTION = "HO?|IO?|MO?|ZO?|IF?^&&IO?,IFO,100053", --中金所期权,HO\IO期权专用,有IF套利期权合约
|
||||||
|
XT_GE_SF_ARBITAGE_FTOPTION = ",SFO,100054",--上期所套利期权
|
||||||
|
XT_GE_ZF_ARBITAGE_FTOPTION = ",ZFO,100055",--郑商所套利期权
|
||||||
|
XT_GE_DF_ARBITAGE_FTOPTION = ",DFO,100056",--大商所套利期权
|
||||||
|
XT_GE_IF_ARBITAGE_FTOPTION = "IF?^&&IO?|HO?^&&?|IO?^&&?,IFO,100057",--中金所套利期权
|
||||||
|
XT_GE_INE_FTOPTION = "bc*****?|lu*****?|nr*****?|sc*****?,INE,100058", --能源中心期权 四位到期数字,一位C/P
|
||||||
|
XT_GE_INE_ARBITAGE_FTOPTION = ",INE,100059",--能源中心套利期权
|
||||||
|
XT_GE_GF_FTOPTION = "si*****?|lc*****?,GFO,100060",--广期所期权
|
||||||
|
},
|
||||||
|
abroadMarkets = {
|
||||||
|
abroadFutureMarkets = "OSE,LPPM,CBF,LBMA,NYB,EUREX,COMEX,CME,ICE,CBOT,SGX,LME,NYMEX,MX,LIFFE,ASX,HKFE",
|
||||||
|
},
|
||||||
|
}
|
1885
src/xtquant/doc/xtdata.md
Normal file
1885
src/xtquant/doc/xtdata.md
Normal file
File diff suppressed because it is too large
Load Diff
1912
src/xtquant/doc/xttrader.md
Normal file
1912
src/xtquant/doc/xttrader.md
Normal file
File diff suppressed because it is too large
Load Diff
BIN
src/xtquant/libeay32.dll
Normal file
BIN
src/xtquant/libeay32.dll
Normal file
Binary file not shown.
BIN
src/xtquant/log4cxx.dll
Normal file
BIN
src/xtquant/log4cxx.dll
Normal file
Binary file not shown.
13
src/xtquant/metatable/__init__.py
Normal file
13
src/xtquant/metatable/__init__.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
# coding:utf-8
|
||||||
|
|
||||||
|
from .meta_config import (
|
||||||
|
get_metatable_config,
|
||||||
|
get_metatable_list,
|
||||||
|
get_metatable_info,
|
||||||
|
get_metatable_fields,
|
||||||
|
)
|
||||||
|
|
||||||
|
from . import get_arrow
|
||||||
|
|
||||||
|
get_tabular_data = get_arrow.get_tabular_fe_data
|
||||||
|
get_tabular_bson = get_arrow.get_tabular_fe_bson
|
373
src/xtquant/metatable/get_arrow.py
Normal file
373
src/xtquant/metatable/get_arrow.py
Normal file
@ -0,0 +1,373 @@
|
|||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from .meta_config import (
|
||||||
|
__TABULAR_PERIODS__,
|
||||||
|
__META_FIELDS__,
|
||||||
|
__META_TABLES__,
|
||||||
|
__META_INFO__,
|
||||||
|
_init_metainfos,
|
||||||
|
)
|
||||||
|
from .get_bson import get_tabular_bson_head
|
||||||
|
|
||||||
|
def _get_tabular_feather_single_ori(
|
||||||
|
codes: list,
|
||||||
|
table: str,
|
||||||
|
int_period: int,
|
||||||
|
start_timetag: int,
|
||||||
|
end_timetag: int,
|
||||||
|
count: int = -1,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
from .. import xtdata
|
||||||
|
from pyarrow import feather as fe
|
||||||
|
import os
|
||||||
|
|
||||||
|
CONSTFIELD_TIME = '_time'
|
||||||
|
CONSTFIELD_CODE = '_stock'
|
||||||
|
|
||||||
|
file_path = os.path.join(xtdata.get_data_dir(), "EP", f"{table}_Xdat2", "data.fe")
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
return
|
||||||
|
|
||||||
|
fe_table = fe.read_table(file_path)
|
||||||
|
|
||||||
|
schema = fe_table.schema
|
||||||
|
fe_fields = [f.name for f in schema]
|
||||||
|
def _old_arrow_filter():
|
||||||
|
from pyarrow import dataset as ds
|
||||||
|
nonlocal fe_table, fe_fields
|
||||||
|
|
||||||
|
expressions = []
|
||||||
|
if CONSTFIELD_TIME in fe_fields:
|
||||||
|
if start_timetag > 0:
|
||||||
|
expressions.append(ds.field(CONSTFIELD_TIME) >= start_timetag)
|
||||||
|
|
||||||
|
if end_timetag > 0:
|
||||||
|
expressions.append(ds.field(CONSTFIELD_TIME) <= end_timetag)
|
||||||
|
|
||||||
|
if CONSTFIELD_CODE in fe_fields and len(codes) > 0:
|
||||||
|
expressions.append(ds.field(CONSTFIELD_CODE).isin(codes))
|
||||||
|
|
||||||
|
if len(expressions) > 0:
|
||||||
|
expr = expressions[0]
|
||||||
|
for e in expressions[1:]:
|
||||||
|
expr = expr & e
|
||||||
|
return ds.dataset(fe_table).to_table(filter=expr)
|
||||||
|
else:
|
||||||
|
return fe_table
|
||||||
|
|
||||||
|
|
||||||
|
def _new_arrow_filter():
|
||||||
|
from pyarrow import compute as pc
|
||||||
|
nonlocal fe_table, fe_fields
|
||||||
|
|
||||||
|
expressions = []
|
||||||
|
if CONSTFIELD_TIME in fe_fields:
|
||||||
|
if start_timetag > 0:
|
||||||
|
expressions.append(pc.field(CONSTFIELD_TIME) >= start_timetag)
|
||||||
|
if end_timetag > 0:
|
||||||
|
expressions.append(pc.field(CONSTFIELD_TIME) <= end_timetag)
|
||||||
|
|
||||||
|
if CONSTFIELD_CODE in fe_fields and len(codes) > 0:
|
||||||
|
expressions.append(pc.field(CONSTFIELD_CODE).isin(codes))
|
||||||
|
|
||||||
|
if len(expressions) > 0:
|
||||||
|
expr = expressions[0]
|
||||||
|
for e in expressions[1:]:
|
||||||
|
expr = expr & e
|
||||||
|
return fe_table.filter(expr)
|
||||||
|
else:
|
||||||
|
return fe_table
|
||||||
|
|
||||||
|
def do_filter():
|
||||||
|
import pyarrow as pa
|
||||||
|
from distutils import version
|
||||||
|
nonlocal count
|
||||||
|
# python3.6 pyarrow-6.0.1
|
||||||
|
# python3.7 pyarrow-12.0.1
|
||||||
|
# python3.8~12 pyarrow-17.0.0
|
||||||
|
paver = version.LooseVersion(pa.__version__)
|
||||||
|
if paver <= version.LooseVersion('9.0.0'):
|
||||||
|
_table = _old_arrow_filter()
|
||||||
|
else:
|
||||||
|
_table = _new_arrow_filter()
|
||||||
|
|
||||||
|
if count > 0:
|
||||||
|
start_index = max(0, _table.num_rows - count)
|
||||||
|
_table = _table.slice(start_index, count)
|
||||||
|
|
||||||
|
return _table
|
||||||
|
|
||||||
|
return do_filter(), fe_fields
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_fields(fields):
|
||||||
|
if not __META_FIELDS__:
|
||||||
|
_init_metainfos()
|
||||||
|
|
||||||
|
tmp = OrderedDict() # { table: { show_fields: list(), fe_fields: list() } }
|
||||||
|
for field in fields:
|
||||||
|
if field.find('.') == -1:
|
||||||
|
table = field
|
||||||
|
|
||||||
|
if table not in __META_TABLES__:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if table not in tmp:
|
||||||
|
tmp[table] = {'show': list(), 'fe': list()}
|
||||||
|
|
||||||
|
metaid = __META_TABLES__[table]
|
||||||
|
for key, f in __META_INFO__[metaid]['fields'].items():
|
||||||
|
if 'G' == key:
|
||||||
|
tmp[table]['fe'].append('_time')
|
||||||
|
elif 'S' == key:
|
||||||
|
tmp[table]['fe'].append('_stock')
|
||||||
|
else:
|
||||||
|
tmp[table]['fe'].append(f['modelName'])
|
||||||
|
|
||||||
|
tmp[table]['show'].append(f['modelName'])
|
||||||
|
|
||||||
|
else:
|
||||||
|
table = field.split('.')[0]
|
||||||
|
ifield = field.split('.')[1]
|
||||||
|
|
||||||
|
if field not in __META_FIELDS__:
|
||||||
|
continue
|
||||||
|
|
||||||
|
metaid, key = __META_FIELDS__[field]
|
||||||
|
|
||||||
|
if table not in tmp:
|
||||||
|
tmp[table] = {'show': list(), 'fe': list()}
|
||||||
|
|
||||||
|
if 'G' == key:
|
||||||
|
tmp[table]['fe'].append('_time')
|
||||||
|
elif 'S' == key:
|
||||||
|
tmp[table]['fe'].append('_stock')
|
||||||
|
else:
|
||||||
|
tmp[table]['fe'].append(ifield)
|
||||||
|
|
||||||
|
tmp[table]['show'].append(ifield)
|
||||||
|
|
||||||
|
return [(tb, sd['show'], sd['fe']) for tb, sd in tmp.items()]
|
||||||
|
|
||||||
|
def _parse_keys(fields):
|
||||||
|
if not __META_FIELDS__:
|
||||||
|
_init_metainfos()
|
||||||
|
|
||||||
|
tmp = OrderedDict() # { table: { show_keys: list(), fe_fields: list() } }
|
||||||
|
for field in fields:
|
||||||
|
if field.find('.') == -1:
|
||||||
|
table = field
|
||||||
|
|
||||||
|
if table not in __META_TABLES__:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if table not in tmp:
|
||||||
|
tmp[table] = {'show': list(), 'fe': list()}
|
||||||
|
|
||||||
|
metaid = __META_TABLES__[table]
|
||||||
|
for key, f in __META_INFO__[metaid]['fields'].items():
|
||||||
|
if 'G' == key:
|
||||||
|
tmp[table]['fe'].append('_time')
|
||||||
|
elif 'S' == key:
|
||||||
|
tmp[table]['fe'].append('_stock')
|
||||||
|
else:
|
||||||
|
tmp[table]['fe'].append(f['modelName'])
|
||||||
|
|
||||||
|
tmp[table]['show'].append(key)
|
||||||
|
|
||||||
|
else:
|
||||||
|
table = field.split('.')[0]
|
||||||
|
ifield = field.split('.')[1]
|
||||||
|
|
||||||
|
if field not in __META_FIELDS__:
|
||||||
|
continue
|
||||||
|
|
||||||
|
metaid, key = __META_FIELDS__[field]
|
||||||
|
|
||||||
|
if table not in tmp:
|
||||||
|
tmp[table] = {'show': list(), 'fe': list()}
|
||||||
|
|
||||||
|
if 'G' == key:
|
||||||
|
tmp[table]['fe'].append('_time')
|
||||||
|
elif 'S' == key:
|
||||||
|
tmp[table]['fe'].append('_stock')
|
||||||
|
else:
|
||||||
|
tmp[table]['fe'].append(ifield)
|
||||||
|
|
||||||
|
tmp[table]['show'].append(key)
|
||||||
|
|
||||||
|
return [(tb, sd['show'], sd['fe']) for tb, sd in tmp.items()]
|
||||||
|
|
||||||
|
|
||||||
|
def get_tabular_fe_data(
|
||||||
|
codes: list,
|
||||||
|
fields: list,
|
||||||
|
period: str,
|
||||||
|
start_time: str,
|
||||||
|
end_time: str,
|
||||||
|
count: int = -1,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
time_format = None
|
||||||
|
if period in ('1m', '5m', '15m', '30m', '60m', '1h'):
|
||||||
|
time_format = '%Y-%m-%d %H:%M:%S'
|
||||||
|
elif period in ('1d', '1w', '1mon', '1q', '1hy', '1y'):
|
||||||
|
time_format = '%Y-%m-%d'
|
||||||
|
elif period == '':
|
||||||
|
time_format = '%Y-%m-%d %H:%M:%S.%f'
|
||||||
|
|
||||||
|
if not time_format:
|
||||||
|
raise Exception('Unsupported period')
|
||||||
|
|
||||||
|
int_period = __TABULAR_PERIODS__[period]
|
||||||
|
|
||||||
|
if not isinstance(count, int) or count == 0:
|
||||||
|
count = -1
|
||||||
|
|
||||||
|
table_fields = _parse_fields(fields)
|
||||||
|
|
||||||
|
def datetime_to_timetag(timelabel, format=''):
|
||||||
|
'''
|
||||||
|
timelabel: str '20221231' '20221231235959'
|
||||||
|
format: str '%Y%m%d' '%Y%m%d%H%M%S'
|
||||||
|
return: int 1672502399000
|
||||||
|
'''
|
||||||
|
import datetime as dt
|
||||||
|
if not format:
|
||||||
|
format = '%Y%m%d' if len(timelabel) == 8 else '%Y%m%d%H%M%S'
|
||||||
|
try:
|
||||||
|
return dt.datetime.strptime(timelabel, format).timestamp() * 1000
|
||||||
|
except:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
start_timetag = datetime_to_timetag(start_time)
|
||||||
|
end_timetag = datetime_to_timetag(end_time)
|
||||||
|
|
||||||
|
dfs = []
|
||||||
|
ordered_fields = []
|
||||||
|
for table, show_fields, fe_fields in table_fields:
|
||||||
|
fe_table, fe_table_fields = _get_tabular_feather_single_ori(codes, table, int_period, start_timetag, end_timetag, count)
|
||||||
|
if not fe_table:
|
||||||
|
continue
|
||||||
|
|
||||||
|
ifields = list(set(fe_table_fields) & set(fe_fields))
|
||||||
|
if not ifields:
|
||||||
|
continue
|
||||||
|
|
||||||
|
fe_table = fe_table.select(ifields)
|
||||||
|
fe_df = fe_table.to_pandas()
|
||||||
|
# 补充请求的字段
|
||||||
|
default_null_columns = [f for f in fe_fields if f not in fe_table_fields]
|
||||||
|
for c in default_null_columns:
|
||||||
|
fe_df.loc[:, c] = pd.NA
|
||||||
|
|
||||||
|
rename_fields = {}
|
||||||
|
|
||||||
|
for i in range(min(len(show_fields), len(fe_fields))):
|
||||||
|
show_field = f'{table}.{show_fields[i]}'
|
||||||
|
rename_fields[fe_fields[i]] = show_field
|
||||||
|
ordered_fields.append(show_field)
|
||||||
|
|
||||||
|
fe_df.rename(columns=rename_fields, inplace=True)
|
||||||
|
dfs.append(fe_df)
|
||||||
|
|
||||||
|
if not dfs:
|
||||||
|
return pd.DataFrame()
|
||||||
|
|
||||||
|
result = pd.concat(dfs, ignore_index=True)
|
||||||
|
return result[ordered_fields]
|
||||||
|
|
||||||
|
|
||||||
|
def get_tabular_fe_bson(
|
||||||
|
codes: list,
|
||||||
|
fields: list,
|
||||||
|
period: str,
|
||||||
|
start_time: str,
|
||||||
|
end_time: str,
|
||||||
|
count: int = -1,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
from .. import xtbson
|
||||||
|
time_format = None
|
||||||
|
if period in ('1m', '5m', '15m', '30m', '60m', '1h'):
|
||||||
|
time_format = '%Y-%m-%d %H:%M:%S'
|
||||||
|
elif period in ('1d', '1w', '1mon', '1q', '1hy', '1y'):
|
||||||
|
time_format = '%Y-%m-%d'
|
||||||
|
elif period == '':
|
||||||
|
time_format = '%Y-%m-%d %H:%M:%S.%f'
|
||||||
|
|
||||||
|
if not time_format:
|
||||||
|
raise Exception('Unsupported period')
|
||||||
|
|
||||||
|
int_period = __TABULAR_PERIODS__[period]
|
||||||
|
|
||||||
|
if not isinstance(count, int) or count == 0:
|
||||||
|
count = -1
|
||||||
|
|
||||||
|
table_fields = _parse_keys(fields)
|
||||||
|
|
||||||
|
def datetime_to_timetag(timelabel, format=''):
|
||||||
|
'''
|
||||||
|
timelabel: str '20221231' '20221231235959'
|
||||||
|
format: str '%Y%m%d' '%Y%m%d%H%M%S'
|
||||||
|
return: int 1672502399000
|
||||||
|
'''
|
||||||
|
import datetime as dt
|
||||||
|
if not format:
|
||||||
|
format = '%Y%m%d' if len(timelabel) == 8 else '%Y%m%d%H%M%S'
|
||||||
|
try:
|
||||||
|
return dt.datetime.strptime(timelabel, format).timestamp() * 1000
|
||||||
|
except:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
start_timetag = datetime_to_timetag(start_time)
|
||||||
|
end_timetag = datetime_to_timetag(end_time)
|
||||||
|
|
||||||
|
def _get_convert():
|
||||||
|
import pyarrow as pa
|
||||||
|
from distutils import version
|
||||||
|
# python3.6 pyarrow-6.0.1
|
||||||
|
# python3.7 pyarrow-12.0.1
|
||||||
|
# python3.8~12 pyarrow-17.0.0
|
||||||
|
def _old_arrow_convert(table):
|
||||||
|
return table.to_pandas().to_dict(orient='records')
|
||||||
|
|
||||||
|
def _new_arrow_convert(table):
|
||||||
|
return table.to_pylist()
|
||||||
|
|
||||||
|
paver = version.LooseVersion(pa.__version__)
|
||||||
|
if paver < version.LooseVersion('7.0.0'):
|
||||||
|
return _old_arrow_convert
|
||||||
|
else:
|
||||||
|
return _new_arrow_convert
|
||||||
|
|
||||||
|
convert = _get_convert()
|
||||||
|
ret_bsons = []
|
||||||
|
for table, show_fields, fe_fields in table_fields:
|
||||||
|
table_head = get_tabular_bson_head(fields)
|
||||||
|
ret_bsons.append(xtbson.encode(table_head))
|
||||||
|
|
||||||
|
fe_table, fe_table_fields = _get_tabular_feather_single_ori(codes, table, int_period, start_timetag, end_timetag, count)
|
||||||
|
|
||||||
|
ifields = list()
|
||||||
|
new_columns = list()
|
||||||
|
for i in range(len(fe_fields)):
|
||||||
|
if fe_fields[i] in fe_table_fields:
|
||||||
|
ifields.append(fe_fields[i])
|
||||||
|
new_columns.append(show_fields[i])
|
||||||
|
|
||||||
|
if not ifields:
|
||||||
|
continue
|
||||||
|
|
||||||
|
fe_table = fe_table.select(ifields)
|
||||||
|
fe_table = fe_table.rename_columns(new_columns) # key_column
|
||||||
|
|
||||||
|
fe_datas = convert(fe_table)
|
||||||
|
for data in fe_datas:
|
||||||
|
ret_bsons.append(xtbson.encode(data))
|
||||||
|
|
||||||
|
return ret_bsons
|
||||||
|
|
295
src/xtquant/metatable/get_bson.py
Normal file
295
src/xtquant/metatable/get_bson.py
Normal file
@ -0,0 +1,295 @@
|
|||||||
|
# coding:utf-8
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from .meta_config import (
|
||||||
|
__TABULAR_PERIODS__,
|
||||||
|
__META_FIELDS__,
|
||||||
|
__META_TABLES__,
|
||||||
|
__META_INFO__,
|
||||||
|
_init_metainfos,
|
||||||
|
_meta_type,
|
||||||
|
_check_metatable_key,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_request_from_fields(fields):
|
||||||
|
'''
|
||||||
|
根据字段解析metaid和field
|
||||||
|
'''
|
||||||
|
table_field = OrderedDict() # {metaid: {key}}
|
||||||
|
key2field = OrderedDict() # {metaid: {key: field}}
|
||||||
|
columns = [] # table.field
|
||||||
|
if not __META_FIELDS__:
|
||||||
|
_init_metainfos()
|
||||||
|
|
||||||
|
for field in fields:
|
||||||
|
if field.find('.') == -1: # 获取整个table的数据
|
||||||
|
metaid = __META_TABLES__[field]
|
||||||
|
if metaid in __META_INFO__:
|
||||||
|
metainfo = __META_INFO__[metaid]
|
||||||
|
table = metainfo['modelName']
|
||||||
|
meta_table_fields = metainfo.get('fields', {})
|
||||||
|
if not meta_table_fields:
|
||||||
|
continue
|
||||||
|
|
||||||
|
table_field[metaid] = {k: _meta_type(v['type']) for k, v in meta_table_fields.items()}
|
||||||
|
key2field[metaid] = {
|
||||||
|
key: f'{table}.{field_info["modelName"]}' for key, field_info in meta_table_fields.items()
|
||||||
|
}
|
||||||
|
columns.extend(key2field[metaid].values())
|
||||||
|
|
||||||
|
elif field in __META_FIELDS__:
|
||||||
|
metaid, key = __META_FIELDS__[field]
|
||||||
|
metainfo = __META_INFO__[metaid]
|
||||||
|
|
||||||
|
if metaid not in table_field:
|
||||||
|
table_field[metaid] = {}
|
||||||
|
table_field[metaid][key] = _meta_type(metainfo['fields'][key]['type'])
|
||||||
|
|
||||||
|
if metaid not in key2field:
|
||||||
|
key2field[metaid] = {}
|
||||||
|
key2field[metaid][key] = field
|
||||||
|
|
||||||
|
columns.append(field)
|
||||||
|
|
||||||
|
return table_field, key2field, columns
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def _get_tabular_data_single_ori(
|
||||||
|
codes: list,
|
||||||
|
metaid: int,
|
||||||
|
keys: list,
|
||||||
|
int_period: int,
|
||||||
|
start_time: str,
|
||||||
|
end_time: str,
|
||||||
|
count: int = -1,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
from .. import xtbson, xtdata
|
||||||
|
import os
|
||||||
|
CONSTKEY_CODE = 'S'
|
||||||
|
|
||||||
|
ret_datas = []
|
||||||
|
|
||||||
|
scan_whole = False
|
||||||
|
scan_whole_filters = dict() # 额外对全市场数据的查询 { field : [codes] }
|
||||||
|
client = xtdata.get_client()
|
||||||
|
def read_single():
|
||||||
|
nonlocal codes, metaid, int_period, scan_whole, scan_whole_filters, client, keys, ret_datas
|
||||||
|
if not codes:
|
||||||
|
scan_whole = True
|
||||||
|
return
|
||||||
|
|
||||||
|
data_path_dict = xtdata._get_data_file_path(codes, (metaid, int_period))
|
||||||
|
print(data_path_dict)
|
||||||
|
for code, file_path in data_path_dict.items():
|
||||||
|
if not file_path:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not os.path.exists(file_path): # 如果file_path不存在
|
||||||
|
if code == 'XXXXXX.XX': # 不处理代码为XXXXXX.XX的情况
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not _check_metatable_key(metaid, CONSTKEY_CODE): # 不处理不含S字段的表
|
||||||
|
continue
|
||||||
|
|
||||||
|
if CONSTKEY_CODE not in scan_whole_filters:
|
||||||
|
scan_whole_filters[CONSTKEY_CODE] = []
|
||||||
|
scan_whole = True
|
||||||
|
scan_whole_filters[CONSTKEY_CODE].append(code)
|
||||||
|
continue
|
||||||
|
|
||||||
|
bson_datas = client.read_local_data(file_path, start_time, end_time, count)
|
||||||
|
|
||||||
|
for data in bson_datas:
|
||||||
|
idata = xtbson.decode(data)
|
||||||
|
ndata = {k: idata[k] for k in keys if k in idata}
|
||||||
|
ret_datas.append(ndata)
|
||||||
|
|
||||||
|
def read_whole():
|
||||||
|
nonlocal scan_whole, scan_whole_filters, metaid, int_period, client, keys, ret_datas
|
||||||
|
if not scan_whole:
|
||||||
|
return
|
||||||
|
|
||||||
|
data_path_dict = xtdata._get_data_file_path(['XXXXXX.XX'], (metaid, int_period))
|
||||||
|
if 'XXXXXX.XX' not in data_path_dict:
|
||||||
|
return
|
||||||
|
file_path = data_path_dict['XXXXXX.XX']
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
return
|
||||||
|
|
||||||
|
bson_datas = client.read_local_data(file_path, start_time, end_time, -1)
|
||||||
|
data_c = count
|
||||||
|
for data in bson_datas:
|
||||||
|
idata = xtbson.decode(data)
|
||||||
|
|
||||||
|
valid = True
|
||||||
|
for k, v in scan_whole_filters.items():
|
||||||
|
if idata.get(k, None) not in v:
|
||||||
|
valid = False
|
||||||
|
break
|
||||||
|
|
||||||
|
if not valid:
|
||||||
|
continue
|
||||||
|
|
||||||
|
ndata = {k: idata[k] for k in keys if k in idata}
|
||||||
|
ret_datas.append(ndata)
|
||||||
|
|
||||||
|
data_c -= 1
|
||||||
|
if data_c == 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
read_single()
|
||||||
|
read_whole()
|
||||||
|
|
||||||
|
return ret_datas
|
||||||
|
|
||||||
|
|
||||||
|
def get_tabular_data(
|
||||||
|
codes: list,
|
||||||
|
fields: list,
|
||||||
|
period: str,
|
||||||
|
start_time: str,
|
||||||
|
end_time: str,
|
||||||
|
count: int = -1,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
time_format = None
|
||||||
|
if period in ('1m', '5m', '15m', '30m', '60m', '1h'):
|
||||||
|
time_format = '%Y-%m-%d %H:%M:%S'
|
||||||
|
elif period in ('1d', '1w', '1mon', '1q', '1hy', '1y'):
|
||||||
|
time_format = '%Y-%m-%d'
|
||||||
|
elif period == '':
|
||||||
|
time_format = '%Y-%m-%d %H:%M:%S.%f'
|
||||||
|
|
||||||
|
if not time_format:
|
||||||
|
raise Exception('Unsupported period')
|
||||||
|
|
||||||
|
int_period = __TABULAR_PERIODS__[period]
|
||||||
|
|
||||||
|
if not isinstance(count, int) or count == 0:
|
||||||
|
count = -1
|
||||||
|
|
||||||
|
table_field, key2field, ori_columns = parse_request_from_fields(fields)
|
||||||
|
|
||||||
|
dfs = []
|
||||||
|
|
||||||
|
# 额外查询 { metaid : [codes] }
|
||||||
|
for metaid, keys in table_field.items():
|
||||||
|
datas = _get_tabular_data_single_ori(codes, metaid, list(keys.keys()), int_period, start_time, end_time, count)
|
||||||
|
df = pd.DataFrame(datas)
|
||||||
|
if df.empty:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 补充请求的字段
|
||||||
|
default_null_columns = [c for c in keys if c not in df.columns]
|
||||||
|
for c in default_null_columns:
|
||||||
|
df.loc[:, c] = keys[c]
|
||||||
|
|
||||||
|
df.rename(columns=key2field[metaid], inplace=True)
|
||||||
|
dfs.append(df)
|
||||||
|
|
||||||
|
if not dfs:
|
||||||
|
return pd.DataFrame()
|
||||||
|
|
||||||
|
result = pd.concat(dfs, ignore_index=True)
|
||||||
|
result = result[ori_columns]
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def get_tabular_bson_head(
|
||||||
|
fields: list
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
根据字段解析表头
|
||||||
|
'''
|
||||||
|
ret = {'modelName': '', 'tableNameCn': '', 'fields': []}
|
||||||
|
|
||||||
|
if not __META_FIELDS__:
|
||||||
|
_init_metainfos()
|
||||||
|
|
||||||
|
for field in fields:
|
||||||
|
if field.find('.') == -1: # 获取整个table的数据
|
||||||
|
metaid = __META_TABLES__[field]
|
||||||
|
if metaid not in __META_INFO__:
|
||||||
|
continue
|
||||||
|
|
||||||
|
metainfo = __META_INFO__[metaid]
|
||||||
|
meta_table_fields = metainfo.get('fields', {})
|
||||||
|
ret['metaId'] = metaid
|
||||||
|
ret['modelName'] = metainfo['modelName']
|
||||||
|
ret['tableNameCn'] = metainfo['tableNameCn']
|
||||||
|
if not meta_table_fields:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for k, v in meta_table_fields.items():
|
||||||
|
ret['fields'].append({
|
||||||
|
'key': k,
|
||||||
|
'fieldNameCn': v['fieldNameCn'],
|
||||||
|
'modelName': v['modelName'],
|
||||||
|
'type': v['type'],
|
||||||
|
'unit': v['unit'],
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
elif field in __META_FIELDS__:
|
||||||
|
metaid, key = __META_FIELDS__[field]
|
||||||
|
metainfo = __META_INFO__[metaid]
|
||||||
|
ret['metaId'] = metaid
|
||||||
|
ret['modelName'] = metainfo['modelName']
|
||||||
|
ret['tableNameCn'] = metainfo['tableNameCn']
|
||||||
|
field_metainfo = metainfo['fields'][key]
|
||||||
|
ret['fields'].append({
|
||||||
|
'key': key,
|
||||||
|
'fieldNameCn': field_metainfo['fieldNameCn'],
|
||||||
|
'modelName': field_metainfo['modelName'],
|
||||||
|
'type': field_metainfo['type'],
|
||||||
|
'unit': field_metainfo['unit'],
|
||||||
|
})
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def get_tabular_bson(
|
||||||
|
codes: list,
|
||||||
|
fields: list,
|
||||||
|
period: str,
|
||||||
|
start_time: str,
|
||||||
|
end_time: str,
|
||||||
|
count: int = -1,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
from .. import xtbson
|
||||||
|
time_format = None
|
||||||
|
if period in ('1m', '5m', '15m', '30m', '60m', '1h'):
|
||||||
|
time_format = '%Y-%m-%d %H:%M:%S'
|
||||||
|
elif period in ('1d', '1w', '1mon', '1q', '1hy', '1y'):
|
||||||
|
time_format = '%Y-%m-%d'
|
||||||
|
elif period == '':
|
||||||
|
time_format = '%Y-%m-%d %H:%M:%S.%f'
|
||||||
|
|
||||||
|
if not time_format:
|
||||||
|
raise Exception('Unsupported period')
|
||||||
|
|
||||||
|
int_period = __TABULAR_PERIODS__[period]
|
||||||
|
|
||||||
|
if not isinstance(count, int) or count == 0:
|
||||||
|
count = -1
|
||||||
|
|
||||||
|
table_field, key2field, ori_columns = parse_request_from_fields(fields)
|
||||||
|
|
||||||
|
ret_bsons = []
|
||||||
|
for metaid, keysinfo in table_field.items():
|
||||||
|
table_head = get_tabular_bson_head(fields)
|
||||||
|
ret_bsons.append(xtbson.encode(table_head))
|
||||||
|
datas = _get_tabular_data_single_ori(codes, metaid, list(keysinfo.keys()), int_period, start_time, end_time, count)
|
||||||
|
for d in datas:
|
||||||
|
ret_bsons.append(xtbson.encode(d))
|
||||||
|
|
||||||
|
return ret_bsons
|
||||||
|
|
198
src/xtquant/metatable/meta_config.py
Normal file
198
src/xtquant/metatable/meta_config.py
Normal file
@ -0,0 +1,198 @@
|
|||||||
|
#coding:utf8
|
||||||
|
|
||||||
|
__TABULAR_PERIODS__ = {
|
||||||
|
'': 0,
|
||||||
|
'1m': 60000,
|
||||||
|
'5m': 300000,
|
||||||
|
'15m': 900000,
|
||||||
|
'30m': 1800000,
|
||||||
|
'60m': 3600000,
|
||||||
|
'1h': 3600000,
|
||||||
|
'1d': 86400000,
|
||||||
|
'1w': 604800000,
|
||||||
|
'1mon': 2592000000,
|
||||||
|
'1q': 7776000000,
|
||||||
|
'1hy': 15552000000,
|
||||||
|
'1y': 31536000000,
|
||||||
|
}
|
||||||
|
|
||||||
|
__META_INFO__ = {}
|
||||||
|
__META_FIELDS__ = {}
|
||||||
|
__META_TABLES__ = {}
|
||||||
|
|
||||||
|
def download_metatable_data():
|
||||||
|
'''
|
||||||
|
下载metatable信息
|
||||||
|
通常在客户端启动时自动获取,不需要手工调用
|
||||||
|
'''
|
||||||
|
from .. import xtdata
|
||||||
|
cl = xtdata.get_client()
|
||||||
|
|
||||||
|
ret = xtdata._BSON_call_common(
|
||||||
|
cl.commonControl, 'downloadmetatabledata', {}
|
||||||
|
)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _init_metainfos():
|
||||||
|
'''
|
||||||
|
初始化metatable
|
||||||
|
'''
|
||||||
|
import traceback
|
||||||
|
from .. import xtdata, xtbson
|
||||||
|
|
||||||
|
global __META_INFO__
|
||||||
|
global __META_FIELDS__
|
||||||
|
global __META_TABLES__
|
||||||
|
|
||||||
|
cl = xtdata.get_client()
|
||||||
|
result = xtbson.BSON.decode(cl.commonControl('getmetatabledatas', xtbson.BSON.encode({})))
|
||||||
|
all_metainfos = result['result']
|
||||||
|
|
||||||
|
for metainfo in all_metainfos:
|
||||||
|
if not isinstance(metainfo, dict):
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
metaid = metainfo['I']
|
||||||
|
__META_INFO__[metaid] = metainfo
|
||||||
|
|
||||||
|
table_name = metainfo.get('modelName', metaid)
|
||||||
|
table_name_cn = metainfo.get('tableNameCn', '')
|
||||||
|
|
||||||
|
__META_TABLES__[table_name] = metaid
|
||||||
|
__META_TABLES__[table_name_cn] = metaid
|
||||||
|
|
||||||
|
metainfo_fields = metainfo.get('fields', {})
|
||||||
|
# metainfo_fields.pop('G', None) # G公共时间字段特殊处理,跳过
|
||||||
|
for key, info in metainfo_fields.items():
|
||||||
|
field_name = info['modelName']
|
||||||
|
__META_FIELDS__[f'{table_name}.{field_name}'] = (metaid, key)
|
||||||
|
except:
|
||||||
|
traceback.print_exc()
|
||||||
|
continue
|
||||||
|
return
|
||||||
|
|
||||||
|
def _check_metatable_key(metaid, key):
|
||||||
|
metainfo = __META_INFO__.get(metaid, None)
|
||||||
|
if not metainfo:
|
||||||
|
return False
|
||||||
|
|
||||||
|
fields = metainfo.get('fields', {})
|
||||||
|
return key in fields
|
||||||
|
|
||||||
|
|
||||||
|
def get_metatable_list():
|
||||||
|
'''
|
||||||
|
获取metatable列表
|
||||||
|
|
||||||
|
return:
|
||||||
|
{ table_code1: table_name1, table_code2: table_name2, ... }
|
||||||
|
|
||||||
|
table_code: str
|
||||||
|
数据表代码
|
||||||
|
table_name: str
|
||||||
|
数据表名称
|
||||||
|
'''
|
||||||
|
if not __META_INFO__:
|
||||||
|
_init_metainfos()
|
||||||
|
|
||||||
|
ret = {}
|
||||||
|
for metaid, metainfo in __META_INFO__.items():
|
||||||
|
model_name = metainfo.get('modelName', f'{metaid}')
|
||||||
|
table_name_desc = metainfo.get('tableNameCn', '')
|
||||||
|
ret[model_name] = table_name_desc
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def get_metatable_config(table):
|
||||||
|
'''
|
||||||
|
获取metatable列表原始配置信息
|
||||||
|
'''
|
||||||
|
if not __META_INFO__:
|
||||||
|
_init_metainfos()
|
||||||
|
|
||||||
|
if table not in __META_TABLES__:
|
||||||
|
print(f'[ERROR] Unknown table {table}')
|
||||||
|
|
||||||
|
metaid = __META_TABLES__[table]
|
||||||
|
return __META_INFO__[metaid]
|
||||||
|
|
||||||
|
|
||||||
|
__META_TYPECONV__ = {
|
||||||
|
'int': int(),
|
||||||
|
'long': int(),
|
||||||
|
'double': float(),
|
||||||
|
'string': str(),
|
||||||
|
'binary': bytes(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _meta_type(t):
|
||||||
|
try:
|
||||||
|
return __META_TYPECONV__[t]
|
||||||
|
except:
|
||||||
|
raise Exception(f'Unsupported type:{t}')
|
||||||
|
|
||||||
|
|
||||||
|
def get_metatable_info(table):
|
||||||
|
'''
|
||||||
|
获取metatable数据表信息
|
||||||
|
|
||||||
|
table: str
|
||||||
|
数据表代码 table_code 或 数据表名称 table_name
|
||||||
|
return: dict
|
||||||
|
{
|
||||||
|
'code': table_code
|
||||||
|
, 'name': table_name
|
||||||
|
, 'desc': desc
|
||||||
|
, 'fields': fields
|
||||||
|
}
|
||||||
|
|
||||||
|
table_code: str
|
||||||
|
数据表代码
|
||||||
|
table_name: str
|
||||||
|
数据表名称
|
||||||
|
desc: str
|
||||||
|
描述
|
||||||
|
fields: dict
|
||||||
|
{ 'code': field_code, 'name': field_name, 'type': field_type }
|
||||||
|
'''
|
||||||
|
info = get_metatable_config(table)
|
||||||
|
|
||||||
|
fields = info.get('fields', {})
|
||||||
|
ret = {
|
||||||
|
'code': info.get('modelName', ''),
|
||||||
|
'name': info.get('tableNameCn', ''),
|
||||||
|
'desc': info.get('desc', ''),
|
||||||
|
'fields': [
|
||||||
|
{
|
||||||
|
'code': field_info.get('modelName', ''),
|
||||||
|
'name': field_info.get('fieldNameCn', ''),
|
||||||
|
'type': type(_meta_type(field_info.get('type', ''))),
|
||||||
|
} for key, field_info in fields.items()
|
||||||
|
],
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def get_metatable_fields(table):
|
||||||
|
'''
|
||||||
|
获取metatable数据表字段信息
|
||||||
|
|
||||||
|
table: str
|
||||||
|
数据表代码 table_code 或 数据表名称 table_name
|
||||||
|
return: pd.DataFrame
|
||||||
|
columns = ['code', 'name', 'type']
|
||||||
|
'''
|
||||||
|
import pandas as pd
|
||||||
|
info = get_metatable_config(table)
|
||||||
|
|
||||||
|
fields = info.get('fields', {})
|
||||||
|
ret = pd.DataFrame([{
|
||||||
|
'code': field_info.get('modelName', ''),
|
||||||
|
'name': field_info.get('fieldNameCn', ''),
|
||||||
|
'type': type(_meta_type(field_info.get('type', ''))),
|
||||||
|
} for key, field_info in fields.items()])
|
||||||
|
return ret
|
||||||
|
|
BIN
src/xtquant/msvcp140.dll
Normal file
BIN
src/xtquant/msvcp140.dll
Normal file
Binary file not shown.
6
src/xtquant/qmttools/__init__.py
Normal file
6
src/xtquant/qmttools/__init__.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
|
||||||
|
from . import functions
|
||||||
|
from . import contextinfo
|
||||||
|
from . import stgframe
|
||||||
|
|
||||||
|
from .stgentry import run_file as run_strategy_file
|
353
src/xtquant/qmttools/contextinfo.py
Normal file
353
src/xtquant/qmttools/contextinfo.py
Normal file
@ -0,0 +1,353 @@
|
|||||||
|
#coding:utf-8
|
||||||
|
|
||||||
|
from . import functions as _FUNCS_
|
||||||
|
|
||||||
|
class ContextInfo:
|
||||||
|
def __init__(this):
|
||||||
|
#base
|
||||||
|
this.request_id = ''
|
||||||
|
this.quote_mode = '' #'realtime' 'history' 'all'
|
||||||
|
this.trade_mode = '' #'simulation' 'trading' 'backtest'
|
||||||
|
this.title = ''
|
||||||
|
this.user_script = ''
|
||||||
|
|
||||||
|
#quote
|
||||||
|
this.stock_code = ''
|
||||||
|
this.stockcode = ''
|
||||||
|
this.market = ''
|
||||||
|
this.period = ''
|
||||||
|
this.start_time = ''
|
||||||
|
this.end_time = ''
|
||||||
|
this.dividend_type = ''
|
||||||
|
this.start_time_num = None
|
||||||
|
this.end_time_num = None
|
||||||
|
|
||||||
|
#bar frame
|
||||||
|
this.timelist = []
|
||||||
|
this.barpos = -1
|
||||||
|
this.lastrunbarpos = -1
|
||||||
|
this.result = {}
|
||||||
|
this.push_result = {}
|
||||||
|
|
||||||
|
#backtest
|
||||||
|
this.asset = 1000000.0 # 初始资金
|
||||||
|
this.margin_ratio = 0.05 # 保证金比例
|
||||||
|
this.slippage_type = 2 # 滑点类型
|
||||||
|
this.slippage = 0.0 # 滑点值
|
||||||
|
this.max_vol_rate = 0.0 # 最大成交比例
|
||||||
|
this.comsisson_type = 0 # 手续费类型
|
||||||
|
this.open_tax = 0.0 # 买入印花税
|
||||||
|
this.close_tax = 0.0 # 卖出印花税
|
||||||
|
this.min_commission = 0.0 # 最低佣金
|
||||||
|
this.open_commission = 0.0 # 买入佣金
|
||||||
|
this.close_commission = 0.0 # 平昨佣金
|
||||||
|
this.close_today_commission = 0.0 # 平今佣金
|
||||||
|
this.benchmark = '000300.SH' # 业绩基准
|
||||||
|
|
||||||
|
this.do_back_test = None
|
||||||
|
|
||||||
|
#reserved
|
||||||
|
this.refresh_rate = None
|
||||||
|
this.fund_name = None
|
||||||
|
this.link_fund_name = None
|
||||||
|
this.data_info_level = None
|
||||||
|
this.time_tick_size = None
|
||||||
|
this.subscribe_once = False
|
||||||
|
return
|
||||||
|
|
||||||
|
@property
|
||||||
|
def start(this):
|
||||||
|
return this.start_time
|
||||||
|
|
||||||
|
@start.setter
|
||||||
|
def start(this, value):
|
||||||
|
this.start_time = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def end(this):
|
||||||
|
return this.end_time
|
||||||
|
|
||||||
|
@end.setter
|
||||||
|
def end(this, value):
|
||||||
|
this.end_time = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def capital(this):
|
||||||
|
return this.asset
|
||||||
|
|
||||||
|
@capital.setter
|
||||||
|
def capital(this, value):
|
||||||
|
this.asset = value
|
||||||
|
|
||||||
|
### qmt strategy frame ###
|
||||||
|
|
||||||
|
def init(this):
|
||||||
|
return
|
||||||
|
|
||||||
|
def after_init(this):
|
||||||
|
return
|
||||||
|
|
||||||
|
def handlebar(this):
|
||||||
|
return
|
||||||
|
|
||||||
|
def on_backtest_finished(this):
|
||||||
|
return
|
||||||
|
|
||||||
|
def stop(this):
|
||||||
|
return
|
||||||
|
|
||||||
|
def account_callback(this, account_info):
|
||||||
|
return
|
||||||
|
|
||||||
|
def order_callback(this, order_info):
|
||||||
|
return
|
||||||
|
|
||||||
|
def deal_callback(this, deal_info):
|
||||||
|
return
|
||||||
|
|
||||||
|
def position_callback(this, position_info):
|
||||||
|
return
|
||||||
|
|
||||||
|
def orderError_callback(this, passorder_info, msg):
|
||||||
|
return
|
||||||
|
|
||||||
|
### qmt functions - bar ###
|
||||||
|
|
||||||
|
def is_last_bar(this):
|
||||||
|
return this.barpos >= len(this.timelist) - 1
|
||||||
|
|
||||||
|
def is_new_bar(this):
|
||||||
|
return this.barpos > this.lastbarpos
|
||||||
|
|
||||||
|
def get_bar_timetag(this, barpos = None):
|
||||||
|
try:
|
||||||
|
return this.timelist[barpos] if barpos is not None else this.timelist[this.barpos]
|
||||||
|
except Exception as e:
|
||||||
|
return None
|
||||||
|
|
||||||
|
### qmt functions - graph ###
|
||||||
|
|
||||||
|
def paint(this, name, value, index = -1, drawstyle = 0, color = '', limit = ''):
|
||||||
|
vp = {str(this.get_bar_timetag()): value}
|
||||||
|
|
||||||
|
if name not in this.result:
|
||||||
|
this.result[name] = {}
|
||||||
|
this.result[name].update(vp)
|
||||||
|
|
||||||
|
if name not in this.push_result:
|
||||||
|
this.push_result[name] = {}
|
||||||
|
this.push_result[name].update(vp)
|
||||||
|
return
|
||||||
|
|
||||||
|
### qmt functions - quote ###
|
||||||
|
|
||||||
|
def subscribe_quote(this, stock_code = '', period = '', dividend_type = '', result_type = '', callback = None):
|
||||||
|
if not stock_code:
|
||||||
|
stock_code = this.stock_code
|
||||||
|
if not period or period == 'follow':
|
||||||
|
period = this.period
|
||||||
|
if not dividend_type or dividend_type == 'follow':
|
||||||
|
dividend_type = this.dividend_type
|
||||||
|
return _FUNCS_.subscribe_quote(stock_code, period, dividend_type, 0, result_type, callback)
|
||||||
|
|
||||||
|
def subscribe_whole_quote(this, code_list, callback = None):
|
||||||
|
return _FUNCS_.subscribe_whole_quote(code_list, callback)
|
||||||
|
|
||||||
|
def unsubscribe_quote(this, subscribe_id):
|
||||||
|
return _FUNCS_.unsubscribe_quote(subscribe_id)
|
||||||
|
|
||||||
|
def get_market_data(
|
||||||
|
this, fields = [], stock_code = [], start_time = '', end_time = ''
|
||||||
|
, skip_paused = True, period = '', dividend_type = '', count = -1
|
||||||
|
):
|
||||||
|
if not stock_code:
|
||||||
|
stock_code = [this.stock_code]
|
||||||
|
if not period or period == 'follow':
|
||||||
|
period = this.period
|
||||||
|
if not dividend_type or dividend_type == 'follow':
|
||||||
|
dividend_type = this.dividend_type
|
||||||
|
if period != 'tick' and count == -1 and len(fields) == 1:
|
||||||
|
if not end_time or end_time == 'follow':
|
||||||
|
if this.barpos >= 0:
|
||||||
|
end_time = _FUNCS_.timetag_to_datetime(this.get_bar_timetag(this.barpos))
|
||||||
|
count = -2
|
||||||
|
if period == 'tick' and count == -1 and len(fields) == 1 and start_time == '' and end_time == '':
|
||||||
|
count = -2
|
||||||
|
|
||||||
|
return _FUNCS_.get_market_data(
|
||||||
|
fields, stock_code, start_time, end_time
|
||||||
|
, skip_paused, period, dividend_type, count
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_market_data_ex(
|
||||||
|
this, fields = [], stock_code = [], period = ''
|
||||||
|
, start_time = '', end_time = '', count = -1
|
||||||
|
, dividend_type = '', fill_data = True, subscribe = True
|
||||||
|
):
|
||||||
|
if not stock_code:
|
||||||
|
stock_code = [this.stock_code]
|
||||||
|
if not period or period == 'follow':
|
||||||
|
period = this.period
|
||||||
|
if not dividend_type or dividend_type == 'follow':
|
||||||
|
dividend_type = this.dividend_type
|
||||||
|
|
||||||
|
if not this.subscribe_once and subscribe:
|
||||||
|
this.subscribe_once = True
|
||||||
|
if period != "tick":
|
||||||
|
for stk in stock_code:
|
||||||
|
_FUNCS_.subscribe_quote(stk, period, dividend_type, -1)
|
||||||
|
else:
|
||||||
|
for stk in stock_code:
|
||||||
|
this.subscribe_whole_quote(stk)
|
||||||
|
|
||||||
|
return _FUNCS_.get_market_data_ex(
|
||||||
|
fields, stock_code, period
|
||||||
|
, start_time, end_time, count
|
||||||
|
, dividend_type, fill_data, subscribe
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_full_tick(this, stock_code = []):
|
||||||
|
if not stock_code:
|
||||||
|
stock_code = [this.stock_code]
|
||||||
|
return _FUNCS_.get_full_tick(stock_code)
|
||||||
|
|
||||||
|
def get_divid_factors(this, stock_code = '', date = None):
|
||||||
|
if not stock_code:
|
||||||
|
stock_code = this.stock_code
|
||||||
|
return _FUNCS_.get_divid_factors(stock_code, date)
|
||||||
|
|
||||||
|
### qmt functions - finance ###
|
||||||
|
|
||||||
|
def get_financial_data(this, field_list, stock_list, start_date, end_date, report_type = 'announce_time'):
|
||||||
|
raise 'not implemented, use get_raw_financial_data instead'
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_raw_financial_data(this, field_list, stock_list, start_date, end_date, report_type = 'announce_time'):
|
||||||
|
return _FUNCS_.get_raw_financial_data(field_list, stock_list, start_date, end_date, report_type)
|
||||||
|
|
||||||
|
### qmt functions - option ###
|
||||||
|
|
||||||
|
def get_option_detail_data(this, optioncode):
|
||||||
|
return _FUNCS_.get_option_detail_data(optioncode)
|
||||||
|
|
||||||
|
def get_option_undl_data(this, undl_code_ref):
|
||||||
|
return _FUNCS_.get_option_undl_data(undl_code_ref)
|
||||||
|
|
||||||
|
def get_option_list(this, undl_code,dedate,opttype = "",isavailavle = False):
|
||||||
|
return _FUNCS_.get_option_list(undl_code, dedate, opttype, isavailavle)
|
||||||
|
|
||||||
|
def get_option_iv(this, opt_code):
|
||||||
|
return _FUNCS_.get_opt_iv(opt_code, this.request_id)
|
||||||
|
|
||||||
|
def bsm_price(this, optType, targetPrice, strikePrice, riskFree, sigma, days, dividend = 0):
|
||||||
|
optionType = ""
|
||||||
|
if(optType.upper() == "C"):
|
||||||
|
optionType = "CALL"
|
||||||
|
if(optType.upper() == "P"):
|
||||||
|
optionType = "PUT"
|
||||||
|
if(type(targetPrice) == list):
|
||||||
|
result = []
|
||||||
|
for price in targetPrice:
|
||||||
|
bsmPrice= _FUNCS_.calc_bsm_price(optionType,strikePrice,float(price),riskFree,sigma,days,dividend, this.request_id)
|
||||||
|
bsmPrice = round(bsmPrice,4)
|
||||||
|
result.append(bsmPrice)
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
bsmPrice = _FUNCS_.calc_bsm_price(optionType,strikePrice,targetPrice,riskFree,sigma,days,dividend, this.request_id)
|
||||||
|
result = round(bsmPrice,4)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def bsm_iv(this, optType, targetPrice, strikePrice, optionPrice, riskFree, days, dividend = 0):
|
||||||
|
if(optType.upper() == "C"):
|
||||||
|
optionType = "CALL"
|
||||||
|
if(optType.upper() == "P"):
|
||||||
|
optionType = "PUT"
|
||||||
|
result = _FUNCS_.calc_bsm_iv(optionType, strikePrice, targetPrice, optionPrice, riskFree, days, dividend, this.request_id)
|
||||||
|
result = round(result,4)
|
||||||
|
return result
|
||||||
|
|
||||||
|
### qmt functions - static ###
|
||||||
|
|
||||||
|
def get_instrument_detail(this, stock_code = '', iscomplete = False):
|
||||||
|
if not stock_code:
|
||||||
|
stock_code = this.stock_code
|
||||||
|
return _FUNCS_.get_instrument_detail(stock_code, iscomplete)
|
||||||
|
|
||||||
|
get_instrumentdetail = get_instrument_detail # compat
|
||||||
|
|
||||||
|
def get_trading_dates(this, stock_code, start_date, end_date, count, period = '1d'):
|
||||||
|
return _FUNCS_.get_trading_dates(stock_code, start_date, end_date, count, period)
|
||||||
|
|
||||||
|
def get_stock_list_in_sector(this, sector_name):
|
||||||
|
return _FUNCS_.get_stock_list_in_sector(sector_name)
|
||||||
|
|
||||||
|
def passorder(
|
||||||
|
this
|
||||||
|
, opType, orderType, accountid
|
||||||
|
, orderCode, prType, modelprice, volume
|
||||||
|
, strategyName, quickTrade, userOrderId
|
||||||
|
):
|
||||||
|
return _FUNCS_._passorder_impl(
|
||||||
|
opType, orderType, accountid
|
||||||
|
, orderCode, prType, modelprice, volume
|
||||||
|
, strategyName, quickTrade, userOrderId
|
||||||
|
, this.barpos, this.get_bar_timetag()
|
||||||
|
, "passorder", ""
|
||||||
|
, this.request_id
|
||||||
|
)
|
||||||
|
|
||||||
|
def set_auto_trade_callback(this, enable):
|
||||||
|
return _FUNCS_._set_auto_trade_callback_impl(enable, this.request_id)
|
||||||
|
|
||||||
|
def set_account(this, accountid):
|
||||||
|
return _FUNCS_.set_account(accountid, this.request_id)
|
||||||
|
|
||||||
|
def get_his_st_data(this, stock_code):
|
||||||
|
return _FUNCS_.get_his_st_data(stock_code)
|
||||||
|
|
||||||
|
### private ###
|
||||||
|
|
||||||
|
def trade_callback(this, type, result, error):
|
||||||
|
class DetailData(object):
|
||||||
|
def __init__(self, _obj):
|
||||||
|
if _obj:
|
||||||
|
self.__dict__.update(_obj)
|
||||||
|
|
||||||
|
if type == 'accountcallback':
|
||||||
|
this.account_callback(DetailData(result))
|
||||||
|
elif type == 'ordercallback':
|
||||||
|
this.order_callback(DetailData(result))
|
||||||
|
elif type == 'dealcallback':
|
||||||
|
this.deal_callback(DetailData(result))
|
||||||
|
elif type == 'positioncallback':
|
||||||
|
this.position_callback(DetailData(result))
|
||||||
|
elif type == 'ordererrorcallback':
|
||||||
|
this.orderError_callback(DetailData(result.get('passorderArg')), result.get('strMsg'))
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
def register_callback(this, reqid):
|
||||||
|
_FUNCS_.register_external_resp_callback(reqid, this.trade_callback)
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_callback_cache(this, type):
|
||||||
|
return _FUNCS_._get_callback_cache_impl(type, this.request_id)
|
||||||
|
|
||||||
|
def get_ipo_info(this, start_time = '', end_time = ''):
|
||||||
|
return _FUNCS_.get_ipo_info(start_time, end_time)
|
||||||
|
|
||||||
|
def get_backtest_index(this, path):
|
||||||
|
_FUNCS_.get_backtest_index(this.request_id, path)
|
||||||
|
|
||||||
|
def get_group_result(this, path, fields):
|
||||||
|
_FUNCS_.get_group_result(this.request_id, path, fields)
|
||||||
|
|
||||||
|
def is_suspended_stock(this, stock_code, type):
|
||||||
|
if this.barpos > len(this.timelist):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if type == 1 or len(this.timelist) == 0:
|
||||||
|
inst = this.get_instrument_detail(stock_code)
|
||||||
|
return inst.get('InstrumentStatus', 0) >= 1
|
||||||
|
|
||||||
|
return _FUNCS_.is_suspended_stock(stock_code, this.period, this.timelist[this.barpos])
|
537
src/xtquant/qmttools/functions.py
Normal file
537
src/xtquant/qmttools/functions.py
Normal file
@ -0,0 +1,537 @@
|
|||||||
|
#coding:utf-8
|
||||||
|
|
||||||
|
import datetime as _DT_
|
||||||
|
|
||||||
|
from xtquant import xtdata
|
||||||
|
from xtquant import xtbson as _BSON_
|
||||||
|
|
||||||
|
def datetime_to_timetag(timelabel, format = ''):
|
||||||
|
'''
|
||||||
|
timelabel: str '20221231' '20221231235959'
|
||||||
|
format: str '%Y%m%d' '%Y%m%d%H%M%S'
|
||||||
|
return: int 1672502399000
|
||||||
|
'''
|
||||||
|
if not format:
|
||||||
|
format = '%Y%m%d' if len(timelabel) == 8 else '%Y%m%d%H%M%S'
|
||||||
|
return _DT_.datetime.strptime(timelabel, format).timestamp() * 1000
|
||||||
|
|
||||||
|
def timetag_to_datetime(timetag, format = ''):
|
||||||
|
'''
|
||||||
|
timetag: int 1672502399000
|
||||||
|
format: str '%Y%m%d' '%Y%m%d%H%M%S'
|
||||||
|
return: str '20221231' '20221231235959'
|
||||||
|
'''
|
||||||
|
if not format:
|
||||||
|
format = '%Y%m%d' if timetag % 86400000 == 57600000 else '%Y%m%d%H%M%S'
|
||||||
|
return _DT_.datetime.fromtimestamp(timetag / 1000).strftime(format)
|
||||||
|
|
||||||
|
def fetch_ContextInfo():
|
||||||
|
import sys
|
||||||
|
frame = sys._getframe()
|
||||||
|
while (frame):
|
||||||
|
loc = list(frame.f_locals.values())
|
||||||
|
for val in loc:
|
||||||
|
if type(val).__name__ == "ContextInfo":
|
||||||
|
return val
|
||||||
|
frame = frame.f_back
|
||||||
|
return None
|
||||||
|
|
||||||
|
def subscribe_quote(stock_code, period, dividend_type, count = 0, result_type = '', callback = None):
|
||||||
|
return xtdata.subscribe_quote(stock_code, period, '', '', count, callback)
|
||||||
|
|
||||||
|
def subscribe_whole_quote(code_list, callback = None):
|
||||||
|
return xtdata.subscribe_whole_quote(code_list, callback)
|
||||||
|
|
||||||
|
def unsubscribe_quote(subscribe_id):
|
||||||
|
return xtdata.unsubscribe_quote(subscribe_id)
|
||||||
|
|
||||||
|
def get_market_data(
|
||||||
|
fields = [], stock_code = [], start_time = '', end_time = ''
|
||||||
|
, skip_paused = True, period = '', dividend_type = '', count = -1
|
||||||
|
):
|
||||||
|
res = {}
|
||||||
|
if period == 'tick':
|
||||||
|
refixed = False
|
||||||
|
if count == -2:
|
||||||
|
refixed = True
|
||||||
|
count = 1
|
||||||
|
if 'quoter' not in fields:
|
||||||
|
return xtdata.get_market_data_ori(
|
||||||
|
field_list=fields, stock_list=stock_code, period=period
|
||||||
|
, start_time=start_time, end_time=end_time, count=count
|
||||||
|
, dividend_type=dividend_type, fill_data=skip_paused
|
||||||
|
)
|
||||||
|
|
||||||
|
fields = []
|
||||||
|
data = xtdata.get_market_data_ori(
|
||||||
|
field_list=fields, stock_list=stock_code, period=period
|
||||||
|
, start_time=start_time, end_time=end_time, count=count
|
||||||
|
, dividend_type=dividend_type, fill_data=skip_paused
|
||||||
|
)
|
||||||
|
fields = ['quoter']
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
stime_fmt = '%Y%m%d' if period == '1d' else '%Y%m%d%H%M%S'
|
||||||
|
for stock in data:
|
||||||
|
pd_data = pd.DataFrame(data[stock])
|
||||||
|
pd_data['stime'] = [timetag_to_datetime(t, stime_fmt) for t in pd_data['time']]
|
||||||
|
pd_data.index = pd.to_datetime((pd_data['time'] + 28800000) * 1000000)
|
||||||
|
ans = {}
|
||||||
|
for j, timetag in enumerate(pd_data['time']):
|
||||||
|
d_map = {}
|
||||||
|
for key in pd_data:
|
||||||
|
d_map[key] = pd_data[key][j]
|
||||||
|
ans[str(pd_data.index[j])] = {}
|
||||||
|
ans[str(pd_data.index[j])]['quoter'] = d_map
|
||||||
|
res[stock] = ans
|
||||||
|
|
||||||
|
oriData = res
|
||||||
|
# if not pd_data.empty:
|
||||||
|
# if count > 0:
|
||||||
|
# return list(pd_data.T.to_dict().values())
|
||||||
|
# return pd_data.iloc[-1].to_dict()
|
||||||
|
# return {}
|
||||||
|
if refixed:
|
||||||
|
count = -2
|
||||||
|
else:
|
||||||
|
refixed = False
|
||||||
|
if count == -2:
|
||||||
|
refixed = True
|
||||||
|
count = 1
|
||||||
|
index, data = xtdata.get_market_data_ori(
|
||||||
|
field_list=fields, stock_list=stock_code, period=period
|
||||||
|
, start_time=start_time, end_time=end_time, count=count
|
||||||
|
, dividend_type=dividend_type, fill_data=skip_paused
|
||||||
|
)
|
||||||
|
if refixed:
|
||||||
|
end_time = ''
|
||||||
|
count = -1
|
||||||
|
for i, stock in enumerate(index[0]):
|
||||||
|
ans = {}
|
||||||
|
for j, timetag in enumerate(index[1]):
|
||||||
|
d_map = {}
|
||||||
|
for key in data:
|
||||||
|
d_map[key] = data[key][i][j]
|
||||||
|
ans[timetag] = d_map
|
||||||
|
res[stock] = ans
|
||||||
|
oriData = res
|
||||||
|
|
||||||
|
resultDict = {}
|
||||||
|
for code in oriData:
|
||||||
|
for timenode in oriData[code]:
|
||||||
|
values = []
|
||||||
|
for field in fields:
|
||||||
|
values.append(oriData[code][timenode][field])
|
||||||
|
key = code + timenode
|
||||||
|
resultDict[key] = values
|
||||||
|
|
||||||
|
if len(fields) == 1 and len(stock_code) <= 1 and (
|
||||||
|
(start_time == '' and end_time == '') or start_time == end_time) and (count == -1 or count == -2):
|
||||||
|
# if resultDict:
|
||||||
|
# keys = list(resultDict.keys())
|
||||||
|
# if resultDict[keys[-1]]:
|
||||||
|
# return resultDict[keys[-1]]
|
||||||
|
for key in resultDict:
|
||||||
|
return resultDict[key][0]
|
||||||
|
return -1
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
if len(stock_code) <= 1 and start_time == '' and end_time == '' and (count == -1 or count == -2):
|
||||||
|
for key in resultDict:
|
||||||
|
result = pd.Series(resultDict[key], index=fields)
|
||||||
|
return result
|
||||||
|
if len(stock_code) > 1 and start_time == '' and end_time == '' and (count == -1 or count == -2):
|
||||||
|
values = []
|
||||||
|
for code in stock_code:
|
||||||
|
if code in oriData:
|
||||||
|
if not oriData[code]:
|
||||||
|
values.append([np.nan])
|
||||||
|
for timenode in oriData[code]:
|
||||||
|
key = code + timenode
|
||||||
|
values.append(resultDict[key])
|
||||||
|
else:
|
||||||
|
values.append([np.nan])
|
||||||
|
result = pd.DataFrame(values, index=stock_code, columns=fields)
|
||||||
|
return result
|
||||||
|
if len(stock_code) <= 1 and ((start_time != '' or end_time != '') or count >= 0):
|
||||||
|
values = []
|
||||||
|
times = []
|
||||||
|
for code in oriData:
|
||||||
|
for timenode in oriData[code]:
|
||||||
|
key = code + timenode
|
||||||
|
times.append(timenode)
|
||||||
|
values.append(resultDict[key])
|
||||||
|
result = pd.DataFrame(values, index=times, columns=fields)
|
||||||
|
return result
|
||||||
|
if len(stock_code) > 1 and ((start_time != '' or end_time != '') or count >= 0):
|
||||||
|
values = {}
|
||||||
|
for code in stock_code:
|
||||||
|
times = []
|
||||||
|
value = []
|
||||||
|
if code in oriData:
|
||||||
|
for timenode in oriData[code]:
|
||||||
|
key = code + timenode
|
||||||
|
times.append(timenode)
|
||||||
|
value.append(resultDict[key])
|
||||||
|
values[code] = pd.DataFrame(value, index=times, columns=fields)
|
||||||
|
try:
|
||||||
|
result = pd.Panel(values)
|
||||||
|
return result
|
||||||
|
except:
|
||||||
|
return oriData
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_market_data_ex(
|
||||||
|
fields = [], stock_code = [], period = ''
|
||||||
|
, start_time = '', end_time = '', count = -1
|
||||||
|
, dividend_type = '', fill_data = True, subscribe = True
|
||||||
|
):
|
||||||
|
res = xtdata.get_market_data_ex(
|
||||||
|
field_list = fields, stock_list = stock_code, period = period
|
||||||
|
, start_time = start_time, end_time = end_time, count = count
|
||||||
|
, dividend_type = dividend_type, fill_data = fill_data
|
||||||
|
)
|
||||||
|
for stock in res:
|
||||||
|
res[stock].index.name = "stime"
|
||||||
|
return res
|
||||||
|
|
||||||
|
def get_full_tick(stock_code):
|
||||||
|
return xtdata.get_full_tick(stock_code)
|
||||||
|
|
||||||
|
def get_divid_factors(stock_code, date = None):
|
||||||
|
client = xtdata.get_client()
|
||||||
|
if date:
|
||||||
|
data = client.get_divid_factors(stock_code, date, date)
|
||||||
|
else:
|
||||||
|
data = client.get_divid_factors(stock_code, '19700101', '20380119')
|
||||||
|
|
||||||
|
res = {}
|
||||||
|
for value in data.values():
|
||||||
|
res[value['time']] = list(value.values())[1:]
|
||||||
|
return res
|
||||||
|
|
||||||
|
def download_history_data(stockcode, period, startTime, endTime):
|
||||||
|
return xtdata.download_history_data(stockcode, period, startTime, endTime)
|
||||||
|
|
||||||
|
def get_raw_financial_data(field_list, stock_list, start_date, end_date, report_type = 'announce_time'):
|
||||||
|
client = xtdata.get_client()
|
||||||
|
data = client.get_financial_data(stock_list, field_list, start_date, end_date, report_type)
|
||||||
|
|
||||||
|
import time
|
||||||
|
res = {}
|
||||||
|
for stock in data:
|
||||||
|
stock_data = data[stock]
|
||||||
|
res[stock] = {}
|
||||||
|
|
||||||
|
for field in field_list:
|
||||||
|
fs = field.split('.')
|
||||||
|
table_data = stock_data.get(fs[0])
|
||||||
|
|
||||||
|
if not table_data:
|
||||||
|
continue
|
||||||
|
|
||||||
|
ans = {}
|
||||||
|
for row_data in table_data:
|
||||||
|
if row_data.get(report_type, None) == None:
|
||||||
|
continue
|
||||||
|
date = time.strftime('%Y%m%d', time.localtime(row_data[report_type] / 1000))
|
||||||
|
if start_date == '' or start_date <= date:
|
||||||
|
if end_date == '' or date <= end_date:
|
||||||
|
ans[int(row_data[report_type])] = row_data[fs[1]]
|
||||||
|
res[stock][field] = ans
|
||||||
|
return res
|
||||||
|
|
||||||
|
#def download_financial_data(stock_list, table_list): #暂不提供
|
||||||
|
# return xtdata.download_financial_data(stock_list, table_list)
|
||||||
|
|
||||||
|
def get_instrument_detail(stock_code, iscomplete = False):
|
||||||
|
return xtdata.get_instrument_detail(stock_code, iscomplete)
|
||||||
|
|
||||||
|
#def get_instrument_type(stock_code): #暂不提供
|
||||||
|
# return xtdata.get_instrument_type(stock_code)
|
||||||
|
|
||||||
|
def get_trading_dates(stock_code, start_date, end_date, count = -1, period = '1d'):
|
||||||
|
if period != '1d':
|
||||||
|
return []
|
||||||
|
market = stock_code.split('.')[0]
|
||||||
|
trade_dates = xtdata.get_trading_dates(market, start_date, end_date)
|
||||||
|
if count == -1:
|
||||||
|
return trade_dates
|
||||||
|
if count > 0:
|
||||||
|
return trade_dates[-count:]
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_stock_list_in_sector(sector_name):
|
||||||
|
return xtdata.get_stock_list_in_sector(sector_name)
|
||||||
|
|
||||||
|
def download_sector_data():
|
||||||
|
return xtdata.download_sector_data()
|
||||||
|
|
||||||
|
download_sector_weight = download_sector_data #compat
|
||||||
|
|
||||||
|
def get_his_st_data(stock_code):
|
||||||
|
return xtdata.get_his_st_data(stock_code)
|
||||||
|
|
||||||
|
|
||||||
|
def _passorder_impl(
|
||||||
|
optype, ordertype, accountid
|
||||||
|
, ordercode, prtype, modelprice, volume
|
||||||
|
, strategyName, quickTrade, userOrderId
|
||||||
|
, barpos, bartime, func, algoName
|
||||||
|
, requestid
|
||||||
|
):
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
data['optype'] = optype
|
||||||
|
data['ordertype'] = ordertype
|
||||||
|
data['accountid'] = accountid
|
||||||
|
data['ordercode'] = ordercode
|
||||||
|
data['prtype'] = prtype
|
||||||
|
data['modelprice'] = modelprice
|
||||||
|
data['volume'] = volume
|
||||||
|
data['strategyname'] = strategyName
|
||||||
|
data['remark'] = userOrderId
|
||||||
|
data['quicktrade'] = quickTrade
|
||||||
|
data['func'] = func
|
||||||
|
data['algoname'] = algoName
|
||||||
|
data['barpos'] = barpos
|
||||||
|
data['bartime'] = bartime
|
||||||
|
|
||||||
|
client = xtdata.get_client()
|
||||||
|
client.callFormula(requestid, 'passorder', _BSON_.BSON.encode(data))
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def passorder(
|
||||||
|
opType, orderType, accountid
|
||||||
|
, orderCode, prType, modelprice, volume
|
||||||
|
, strategyName, quickTrade, userOrderId
|
||||||
|
, C
|
||||||
|
):
|
||||||
|
return C.passorder(
|
||||||
|
opType, orderType, accountid
|
||||||
|
, orderCode, prType, modelprice, volume
|
||||||
|
, strategyName, quickTrade, userOrderId
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_trade_detail_data(accountid, accounttype, datatype, strategyname = ''):
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
C = fetch_ContextInfo()
|
||||||
|
if C is None:
|
||||||
|
raise Exception("contextinfo could not be found in the stack")
|
||||||
|
request_id = C.request_id
|
||||||
|
|
||||||
|
data['accountid'] = accountid
|
||||||
|
data['accounttype'] = accounttype
|
||||||
|
data['datatype'] = datatype
|
||||||
|
data['strategyname'] = strategyname
|
||||||
|
|
||||||
|
client = xtdata.get_client()
|
||||||
|
result_bson = client.callFormula(request_id, 'gettradedetail', _BSON_.BSON.encode(data))
|
||||||
|
result = _BSON_.BSON.decode(result_bson)
|
||||||
|
|
||||||
|
class DetailData(object):
|
||||||
|
def __init__(self, _obj):
|
||||||
|
if _obj:
|
||||||
|
self.__dict__.update(_obj)
|
||||||
|
|
||||||
|
out = []
|
||||||
|
if not result:
|
||||||
|
return out
|
||||||
|
|
||||||
|
for item in result.get('result'):
|
||||||
|
out.append(DetailData(item))
|
||||||
|
return out
|
||||||
|
|
||||||
|
def register_external_resp_callback(reqid, callback):
|
||||||
|
client = xtdata.get_client()
|
||||||
|
|
||||||
|
status = [False, 0, 1, '']
|
||||||
|
|
||||||
|
def on_callback(type, data, error):
|
||||||
|
try:
|
||||||
|
result = _BSON_.BSON.decode(data)
|
||||||
|
callback(type, result, error)
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
status[0] = True
|
||||||
|
status[3] = 'exception'
|
||||||
|
return True
|
||||||
|
|
||||||
|
client.register_external_resp_callback(reqid, on_callback)
|
||||||
|
|
||||||
|
def _set_auto_trade_callback_impl(enable, requestid):
|
||||||
|
data = {}
|
||||||
|
data['enable'] = enable
|
||||||
|
|
||||||
|
client = xtdata.get_client()
|
||||||
|
client.callFormula(requestid, 'setautotradecallback', _BSON_.BSON.encode(data))
|
||||||
|
return
|
||||||
|
|
||||||
|
def set_auto_trade_callback(C,enable):
|
||||||
|
return C.set_auto_trade_callback(enable)
|
||||||
|
|
||||||
|
def set_account(accountid, requestid):
|
||||||
|
data = {}
|
||||||
|
data['accountid'] = accountid
|
||||||
|
|
||||||
|
client = xtdata.get_client()
|
||||||
|
client.callFormula(requestid, 'setaccount', _BSON_.BSON.encode(data))
|
||||||
|
return
|
||||||
|
|
||||||
|
def _get_callback_cache_impl(type, requestid):
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
data['type'] = type
|
||||||
|
|
||||||
|
client = xtdata.get_client()
|
||||||
|
result_bson = client.callFormula(requestid, 'getcallbackcache', _BSON_.BSON.encode(data))
|
||||||
|
return _BSON_.BSON.decode(result_bson)
|
||||||
|
|
||||||
|
def get_account_callback_cache(data, C):
|
||||||
|
data = C.get_callback_cache("account").get('')
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_order_callback_cache(data, C):
|
||||||
|
data = C.get_callback_cache("order")
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_deal_callback_cache(data, C):
|
||||||
|
data = C.get_callback_cache("deal")
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_position_callback_cache(data, C):
|
||||||
|
data = C.get_callback_cache("position")
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_ordererror_callback_cache(data, C):
|
||||||
|
data = C.get_callback_cache("ordererror")
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_option_detail_data(stock_code):
|
||||||
|
return xtdata.get_option_detail_data(stock_code)
|
||||||
|
|
||||||
|
def get_option_undl_data(undl_code_ref):
|
||||||
|
return xtdata.get_option_undl_data(undl_code_ref)
|
||||||
|
|
||||||
|
def get_option_list(undl_code,dedate,opttype = "",isavailavle = False):
|
||||||
|
return xtdata.get_option_list(undl_code, dedate, opttype, isavailavle)
|
||||||
|
|
||||||
|
def get_opt_iv(opt_code, requestid):
|
||||||
|
data = {}
|
||||||
|
data['code'] = opt_code
|
||||||
|
|
||||||
|
client = xtdata.get_client()
|
||||||
|
result_bson = client.callFormula(requestid, 'getoptiv', _BSON_.BSON.encode(data))
|
||||||
|
result = _BSON_.BSON.decode(result_bson)
|
||||||
|
|
||||||
|
out = result.get('result', 0)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def calc_bsm_price(optionType,strikePrice, targetPrice, riskFree, sigma, days, dividend, requestid):
|
||||||
|
data = {}
|
||||||
|
data['optiontype'] = optionType
|
||||||
|
data['strikeprice'] = strikePrice
|
||||||
|
data['targetprice'] = targetPrice
|
||||||
|
data['riskfree'] = riskFree
|
||||||
|
data['sigma'] = sigma
|
||||||
|
data['days'] = days
|
||||||
|
data['dividend'] = dividend
|
||||||
|
|
||||||
|
client = xtdata.get_client()
|
||||||
|
result_bson = client.callFormula(requestid, 'calcbsmprice', _BSON_.BSON.encode(data))
|
||||||
|
result = _BSON_.BSON.decode(result_bson)
|
||||||
|
|
||||||
|
out = result.get('result', 0)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def calc_bsm_iv(optionType, strikePrice, targetPrice, optionPrice, riskFree, days, dividend, requestid):
|
||||||
|
data = {}
|
||||||
|
data['optiontype'] = optionType
|
||||||
|
data['strikeprice'] = strikePrice
|
||||||
|
data['targetprice'] = targetPrice
|
||||||
|
data['optionprice'] = optionPrice
|
||||||
|
data['riskfree'] = riskFree
|
||||||
|
data['days'] = days
|
||||||
|
data['dividend'] = dividend
|
||||||
|
|
||||||
|
client = xtdata.get_client()
|
||||||
|
result_bson = client.callFormula(requestid, 'calcbsmiv', _BSON_.BSON.encode(data))
|
||||||
|
result = _BSON_.BSON.decode(result_bson)
|
||||||
|
|
||||||
|
out = result.get('result', 0)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def get_ipo_info(start_time, end_time):
|
||||||
|
return xtdata.get_ipo_info(start_time, end_time)
|
||||||
|
|
||||||
|
def get_backtest_index(requestid, path):
|
||||||
|
import os
|
||||||
|
path = os.path.abspath(path)
|
||||||
|
if not os.path.exists(path):
|
||||||
|
os.makedirs(path, exist_ok = True)
|
||||||
|
|
||||||
|
data = {'savePath': path}
|
||||||
|
client = xtdata.get_client()
|
||||||
|
bresult = client.callFormula(requestid, 'backtestresult', _BSON_.BSON.encode(data))
|
||||||
|
return _BSON_.BSON.decode(bresult)
|
||||||
|
|
||||||
|
def get_group_result(requestid, path, fields):
|
||||||
|
import os
|
||||||
|
path = os.path.abspath(path)
|
||||||
|
if not os.path.exists(path):
|
||||||
|
os.makedirs(path, exist_ok = True)
|
||||||
|
|
||||||
|
data = {'savePath': path, 'fields': fields}
|
||||||
|
client = xtdata.get_client()
|
||||||
|
bresult = client.callFormula(requestid, 'groupresult', _BSON_.BSON.encode(data))
|
||||||
|
return _BSON_.BSON.decode(bresult)
|
||||||
|
|
||||||
|
def subscribe_formula(formula_name, stock_code, period, start_time = "", end_time = "", count=-1, dividend_type = "none", extend_params = {}, callback = None):
|
||||||
|
return xtdata.subscribe_formula(formula_name, stock_code, period, start_time, end_time, count, dividend_type, extend_params, callback)
|
||||||
|
|
||||||
|
def call_formula_batch(formula_names, stock_codes, period, start_time = "", end_time = "", count=-1, dividend_type = "none", extend_params = []):
|
||||||
|
import copy
|
||||||
|
params = []
|
||||||
|
for name in formula_names:
|
||||||
|
for stock in stock_codes:
|
||||||
|
param = {
|
||||||
|
'formulaname': name, 'stockcode': stock, 'period': period
|
||||||
|
, 'starttime': start_time, 'endtime': end_time, 'count': count
|
||||||
|
, 'dividendtype': dividend_type, 'extendparam': {}
|
||||||
|
, 'create': True, 'datademand': 0
|
||||||
|
}
|
||||||
|
|
||||||
|
if extend_params:
|
||||||
|
for extend in extend_params:
|
||||||
|
param['extendparam'] = extend
|
||||||
|
params.append(copy.deepcopy(param))
|
||||||
|
else:
|
||||||
|
params.append(param)
|
||||||
|
|
||||||
|
client = xtdata.get_client()
|
||||||
|
result = client.commonControl(
|
||||||
|
'callformulabatch'
|
||||||
|
, _BSON_.BSON.encode(
|
||||||
|
{"params": params}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
result = _BSON_.BSON.decode(result)
|
||||||
|
return result.get("result", {})
|
||||||
|
|
||||||
|
def is_suspended_stock(stock_code, period, timetag):
|
||||||
|
client = xtdata.get_client()
|
||||||
|
|
||||||
|
result = client.commonControl(
|
||||||
|
'issuspendedstock'
|
||||||
|
, _BSON_.BSON.encode({
|
||||||
|
"stockcode": stock_code
|
||||||
|
, "period": period
|
||||||
|
, "timetag": timetag
|
||||||
|
})
|
||||||
|
)
|
||||||
|
result = _BSON_.BSON.decode(result)
|
||||||
|
return result.get('result', True)
|
73
src/xtquant/qmttools/stgentry.py
Normal file
73
src/xtquant/qmttools/stgentry.py
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
#coding:utf-8
|
||||||
|
|
||||||
|
from .functions import *
|
||||||
|
|
||||||
|
|
||||||
|
def run_file(user_script, param = {}):
|
||||||
|
import os, sys, time, types
|
||||||
|
from .contextinfo import ContextInfo
|
||||||
|
from .stgframe import StrategyLoader
|
||||||
|
|
||||||
|
pypath = param.get('pythonpath')
|
||||||
|
if pypath:
|
||||||
|
lib_search = [os.path.abspath(p) for p in pypath.split(';')]
|
||||||
|
sys.path = lib_search + [p for p in sys.path if p not in lib_search]
|
||||||
|
|
||||||
|
user_module = compile(open(user_script, 'rb').read(), user_script, 'exec', optimize = 2)
|
||||||
|
#print({'user_module': user_module})
|
||||||
|
|
||||||
|
try:
|
||||||
|
pywentrance = param.get('pywentrance', '')
|
||||||
|
user_variable = compile(open(os.path.join(pywentrance, "..", "user_config.py"), "rb").read(),
|
||||||
|
"user_config.py", 'exec', optimize=2)
|
||||||
|
exec(user_variable, globals())
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
|
||||||
|
exec(user_module, globals())
|
||||||
|
|
||||||
|
_C = ContextInfo()
|
||||||
|
_C._param = param
|
||||||
|
_C.user_script = user_script
|
||||||
|
|
||||||
|
def try_set_func(C, func_name):
|
||||||
|
func = globals().get(func_name)
|
||||||
|
if func:
|
||||||
|
C.__setattr__(func_name, types.MethodType(func, C))
|
||||||
|
return
|
||||||
|
|
||||||
|
try_set_func(_C, 'init')
|
||||||
|
try_set_func(_C, 'after_init')
|
||||||
|
try_set_func(_C, 'handlebar')
|
||||||
|
try_set_func(_C, 'on_backtest_finished')
|
||||||
|
try_set_func(_C, 'stop')
|
||||||
|
|
||||||
|
try_set_func(_C, 'account_callback')
|
||||||
|
try_set_func(_C, 'order_callback')
|
||||||
|
try_set_func(_C, 'deal_callback')
|
||||||
|
try_set_func(_C, 'position_callback')
|
||||||
|
try_set_func(_C, 'orderError_callback')
|
||||||
|
|
||||||
|
loader = StrategyLoader()
|
||||||
|
|
||||||
|
loader.C = _C
|
||||||
|
|
||||||
|
loader.init()
|
||||||
|
loader.start()
|
||||||
|
loader.run()
|
||||||
|
loader.stop()
|
||||||
|
loader.shutdown()
|
||||||
|
|
||||||
|
mode = _C.trade_mode
|
||||||
|
if mode == 'backtest':
|
||||||
|
from .stgframe import BackTestResult
|
||||||
|
return BackTestResult(_C.request_id)
|
||||||
|
|
||||||
|
if mode in ['simulation', 'trading']:
|
||||||
|
while True:
|
||||||
|
time.sleep(2)
|
||||||
|
from .stgframe import Result
|
||||||
|
return Result(_C.request_id)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
312
src/xtquant/qmttools/stgframe.py
Normal file
312
src/xtquant/qmttools/stgframe.py
Normal file
@ -0,0 +1,312 @@
|
|||||||
|
#coding:utf-8
|
||||||
|
|
||||||
|
from xtquant import xtdata
|
||||||
|
from xtquant import xtbson as _BSON_
|
||||||
|
|
||||||
|
|
||||||
|
class StrategyLoader:
|
||||||
|
def __init__(this):
|
||||||
|
this.C = None
|
||||||
|
this.main_quote_subid = 0
|
||||||
|
return
|
||||||
|
|
||||||
|
def init(this):
|
||||||
|
import os, uuid
|
||||||
|
from xtquant import xtdata_config
|
||||||
|
|
||||||
|
C = this.C
|
||||||
|
|
||||||
|
C.guid = C._param.get('guid', str(uuid.uuid4()))
|
||||||
|
C.request_id = C._param.get('requestid', '') + "_" + C.guid
|
||||||
|
C.quote_mode = C._param.get('quote_mode', 'history') #'realtime' 'history' 'all'
|
||||||
|
C.trade_mode = C._param.get('trade_mode', 'backtest') #'simulation' 'trading' 'backtest'
|
||||||
|
C.do_back_test = 1 if C.trade_mode == 'backtest' else 0
|
||||||
|
|
||||||
|
C.title = C._param.get('title', '')
|
||||||
|
if not C.title:
|
||||||
|
C.title = os.path.basename(os.path.abspath(C.user_script).replace('.py', ''))
|
||||||
|
|
||||||
|
C.stock_code = C._param.get('stock_code', '')
|
||||||
|
C.period = C._param.get('period', '')
|
||||||
|
C.start_time = C._param.get('start_time', '')
|
||||||
|
C.end_time = C._param.get('end_time', '')
|
||||||
|
C.start_time_str = ''
|
||||||
|
C.end_time_str = ''
|
||||||
|
if type(C.period) == int:
|
||||||
|
C.period = {
|
||||||
|
0 :'tick'
|
||||||
|
, 60000 :'1m'
|
||||||
|
, 180000 :'3m'
|
||||||
|
, 300000 :'5m'
|
||||||
|
, 600000 :'10m'
|
||||||
|
, 900000 :'15m'
|
||||||
|
, 1800000 :'30m'
|
||||||
|
, 3600000 :'1h'
|
||||||
|
, 86400000 :'1d'
|
||||||
|
, 604800000 :'1w'
|
||||||
|
, 2592000000 :'1mon'
|
||||||
|
, 7776000000 :'1q'
|
||||||
|
, 15552000000 :'1hy'
|
||||||
|
, 31536000000 :'1y'
|
||||||
|
}.get(C.period, '')
|
||||||
|
C.dividend_type = C._param.get('dividend_type', 'none')
|
||||||
|
|
||||||
|
backtest = C._param.get('backtest', {})
|
||||||
|
if backtest:
|
||||||
|
C.asset = backtest.get('asset', 1000000.0)
|
||||||
|
C.margin_ratio = backtest.get('margin_ratio', 0.05)
|
||||||
|
C.slippage_type = backtest.get('slippage_type', 2)
|
||||||
|
C.slippage = backtest.get('slippage', 0.0)
|
||||||
|
C.max_vol_rate = backtest.get('max_vol_rate', 0.0)
|
||||||
|
C.comsisson_type = backtest.get('comsisson_type', 0)
|
||||||
|
C.open_tax = backtest.get('open_tax', 0.0)
|
||||||
|
C.close_tax = backtest.get('close_tax', 0.0)
|
||||||
|
C.min_commission = backtest.get('min_commission', 0.0)
|
||||||
|
C.open_commission = backtest.get('open_commission', 0.0)
|
||||||
|
C.close_commission = backtest.get('close_commission', 0.0)
|
||||||
|
C.close_today_commission = backtest.get('close_today_commission', 0.0)
|
||||||
|
C.benchmark = backtest.get('benchmark', '000300.SH')
|
||||||
|
|
||||||
|
xtdata_config.client_guid = C._param.get('clientguid')
|
||||||
|
|
||||||
|
from .functions import datetime_to_timetag
|
||||||
|
|
||||||
|
if C.start_time:
|
||||||
|
C.start_time_str = C.start_time.replace('-', '').replace(' ', '').replace(':', '')
|
||||||
|
C.start_time_num = int(datetime_to_timetag(C.start_time_str))
|
||||||
|
if C.end_time:
|
||||||
|
C.end_time_str = C.end_time.replace('-', '').replace(' ', '').replace(':', '')
|
||||||
|
C.end_time_num = int(datetime_to_timetag(C.end_time_str))
|
||||||
|
|
||||||
|
if 1: #register
|
||||||
|
this.create_formula()
|
||||||
|
|
||||||
|
C.init()
|
||||||
|
|
||||||
|
if 1: #fix param
|
||||||
|
if '.' in C.stock_code:
|
||||||
|
pos = C.stock_code.rfind('.')
|
||||||
|
C.stockcode = C.stock_code[0:pos]
|
||||||
|
C.market = C.stock_code[pos + 1:].upper()
|
||||||
|
|
||||||
|
if C.stockcode and C.market:
|
||||||
|
C.stock_code = C.stockcode + '.' + C.market
|
||||||
|
C.period = C.period.lower()
|
||||||
|
|
||||||
|
if C.stockcode == "" or C.market == "":
|
||||||
|
raise Exception("股票代码为空")
|
||||||
|
|
||||||
|
if 1: #create view
|
||||||
|
if not C._param.get('requestid'):
|
||||||
|
this.create_view(C.title)
|
||||||
|
|
||||||
|
if 1: #post initcomplete
|
||||||
|
init_result = {}
|
||||||
|
|
||||||
|
config_ar = ['request_id', 'quote_mode', 'trade_mode']
|
||||||
|
init_result['config'] = {ar: C.__getattribute__(ar) for ar in config_ar}
|
||||||
|
|
||||||
|
quote_ar = [
|
||||||
|
'stock_code', 'stockcode', 'market', 'period'
|
||||||
|
, 'start_time', 'end_time', 'dividend_type'
|
||||||
|
]
|
||||||
|
init_result['quote'] = {ar: C.__getattribute__(ar) for ar in quote_ar}
|
||||||
|
|
||||||
|
trade_ar = []
|
||||||
|
init_result['trade'] = {ar: C.__getattribute__(ar) for ar in trade_ar}
|
||||||
|
|
||||||
|
backtest_ar = [
|
||||||
|
'start_time', 'end_time', 'asset', 'margin_ratio', 'slippage_type', 'slippage'
|
||||||
|
, 'max_vol_rate', 'comsisson_type', 'open_tax', 'close_tax'
|
||||||
|
, 'min_commission', 'open_commission', 'close_commission'
|
||||||
|
, 'close_today_commission', 'benchmark'
|
||||||
|
]
|
||||||
|
init_result['backtest'] = {ar: C.__getattribute__(ar) for ar in backtest_ar}
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
if C.start_time:
|
||||||
|
C.start_time_str = C.start_time.replace('-', '').replace(' ', '').replace(':', '')
|
||||||
|
C.start_time_num = int(datetime_to_timetag(C.start_time_str))
|
||||||
|
init_result['backtest']['start_time'] = dt.datetime.fromtimestamp(C.start_time_num / 1000).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
if C.end_time:
|
||||||
|
C.end_time_str = C.end_time.replace('-', '').replace(' ', '').replace(':', '')
|
||||||
|
C.end_time_num = int(datetime_to_timetag(C.end_time_str))
|
||||||
|
init_result['backtest']['end_time'] = dt.datetime.fromtimestamp(C.end_time_num / 1000).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
|
||||||
|
this.call_formula('initcomplete', init_result)
|
||||||
|
|
||||||
|
if 1:
|
||||||
|
this.C.register_callback(0)
|
||||||
|
return
|
||||||
|
|
||||||
|
def shutdown(this):
|
||||||
|
return
|
||||||
|
|
||||||
|
def start(this):
|
||||||
|
import time
|
||||||
|
C = this.C
|
||||||
|
|
||||||
|
if C.quote_mode in ['history', 'all']:
|
||||||
|
this.load_main_history()
|
||||||
|
|
||||||
|
C.after_init()
|
||||||
|
this.run_bar()
|
||||||
|
|
||||||
|
if C.quote_mode in ['realtime', 'all']:
|
||||||
|
this.load_main_realtime()
|
||||||
|
|
||||||
|
if C.trade_mode == 'backtest':
|
||||||
|
time.sleep(0.4)
|
||||||
|
C.on_backtest_finished()
|
||||||
|
return
|
||||||
|
|
||||||
|
def stop(this):
|
||||||
|
if this.main_quote_subid:
|
||||||
|
xtdata.unsubscribe_quote(this.main_quote_subid)
|
||||||
|
|
||||||
|
this.C.stop()
|
||||||
|
return
|
||||||
|
|
||||||
|
def run(this):
|
||||||
|
C = this.C
|
||||||
|
|
||||||
|
if C.quote_mode in ['realtime', 'all']:
|
||||||
|
xtdata.run()
|
||||||
|
return
|
||||||
|
|
||||||
|
def load_main_history(this):
|
||||||
|
C = this.C
|
||||||
|
|
||||||
|
data = xtdata.get_market_data_ex(
|
||||||
|
field_list = ['time'], stock_list = [C.stock_code], period = C.period
|
||||||
|
, start_time = '', end_time = '', count = -1
|
||||||
|
, fill_data = False
|
||||||
|
)
|
||||||
|
|
||||||
|
C.timelist = list(data[C.stock_code]['time'])
|
||||||
|
return
|
||||||
|
|
||||||
|
def load_main_realtime(this):
|
||||||
|
C = this.C
|
||||||
|
|
||||||
|
def on_data(data):
|
||||||
|
data = data.get(C.stock_code, [])
|
||||||
|
if data:
|
||||||
|
tt = data[-1]['time']
|
||||||
|
this.on_main_quote(tt)
|
||||||
|
return
|
||||||
|
|
||||||
|
this.main_quote_subid = xtdata.subscribe_quote(
|
||||||
|
stock_code = C.stock_code, period = C.period
|
||||||
|
, start_time = '', end_time = '', count = 0
|
||||||
|
, callback = on_data
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
def on_main_quote(this, timetag):
|
||||||
|
if not this.C.timelist or this.C.timelist[-1] < timetag:
|
||||||
|
this.C.timelist.append(timetag)
|
||||||
|
this.run_bar()
|
||||||
|
return
|
||||||
|
|
||||||
|
def run_bar(this):
|
||||||
|
C = this.C
|
||||||
|
|
||||||
|
push_timelist = []
|
||||||
|
bar_timelist = []
|
||||||
|
|
||||||
|
for i in range(max(C.lastrunbarpos, 0), len(C.timelist)):
|
||||||
|
C.barpos = i
|
||||||
|
bartime = C.timelist[i]
|
||||||
|
|
||||||
|
push_timelist.append(bartime)
|
||||||
|
bar_timelist.append(bartime)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not C.start_time_num or C.start_time_num <= bartime
|
||||||
|
) and (
|
||||||
|
not C.end_time_num or bartime <= C.end_time_num
|
||||||
|
):
|
||||||
|
this.call_formula('runbar', {'timelist': bar_timelist})
|
||||||
|
bar_timelist = []
|
||||||
|
|
||||||
|
C.handlebar()
|
||||||
|
|
||||||
|
C.lastrunbarpos = i
|
||||||
|
|
||||||
|
if bar_timelist:
|
||||||
|
this.call_formula('runbar', {'timelist': bar_timelist})
|
||||||
|
bar_timelist = []
|
||||||
|
|
||||||
|
if 1:
|
||||||
|
push_result = {}
|
||||||
|
push_result['timelist'] = push_timelist
|
||||||
|
push_result['outputs'] = C.push_result
|
||||||
|
C.push_result = {}
|
||||||
|
this.call_formula('index', push_result)
|
||||||
|
return
|
||||||
|
|
||||||
|
def create_formula(this, callback = None):
|
||||||
|
C = this.C
|
||||||
|
client = xtdata.get_client()
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'formulaname': '', 'stockcode': C.stock_code, 'period': C.period
|
||||||
|
, 'starttime': C.start_time_str, 'endtime': C.end_time_str, 'count': 1
|
||||||
|
, 'dividendtype': C.dividend_type, 'create': True, 'pyrunmode': 1
|
||||||
|
, 'title': C.title
|
||||||
|
, 'historycallback': 1 if callback else 0
|
||||||
|
, 'realtimecallback': 1 if callback else 0
|
||||||
|
}
|
||||||
|
|
||||||
|
client.subscribeFormula(C.request_id, _BSON_.BSON.encode(data), callback)
|
||||||
|
|
||||||
|
def call_formula(this, func, data):
|
||||||
|
C = this.C
|
||||||
|
client = xtdata.get_client()
|
||||||
|
bresult = client.callFormula(C.request_id, func, _BSON_.BSON.encode(data))
|
||||||
|
return _BSON_.BSON.decode(bresult)
|
||||||
|
|
||||||
|
def create_view(this, title):
|
||||||
|
C = this.C
|
||||||
|
client = xtdata.get_client()
|
||||||
|
data = {'viewtype': 0,'title':title, 'groupid':-1,'stockcode':C.market + C.stockcode,'period':C.period,'dividendtype':C.dividend_type}
|
||||||
|
client.createView(C.request_id, _BSON_.BSON.encode(data))
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
class BackTestResult:
|
||||||
|
def __init__(self, request_id):
|
||||||
|
self.request_id = request_id
|
||||||
|
|
||||||
|
def get_backtest_index(self):
|
||||||
|
import os, pandas as pd, uuid
|
||||||
|
from .functions import get_backtest_index
|
||||||
|
|
||||||
|
path = f'{os.getenv("TEMP")}/backtest_{uuid.uuid4()}'
|
||||||
|
get_backtest_index(self.request_id, path)
|
||||||
|
|
||||||
|
ret = pd.read_csv(f'{path}/backtestindex.csv', encoding = 'utf-8')
|
||||||
|
import shutil
|
||||||
|
shutil.rmtree(f'{path}')
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def get_group_result(self, fields = []):
|
||||||
|
import os, pandas as pd, uuid
|
||||||
|
from .functions import get_group_result
|
||||||
|
|
||||||
|
path = f'{os.getenv("TEMP")}/backtest_{uuid.uuid4()}'
|
||||||
|
get_group_result(self.request_id, path, fields)
|
||||||
|
if not fields:
|
||||||
|
fields = ['order', 'deal', 'position']
|
||||||
|
res = {}
|
||||||
|
for f in fields:
|
||||||
|
res[f] = pd.read_csv(f'{path}/{f}.csv', encoding = 'utf-8')
|
||||||
|
import shutil
|
||||||
|
shutil.rmtree(path)
|
||||||
|
return res
|
||||||
|
|
||||||
|
class RealTimeResult:
|
||||||
|
def __init__(self, request_id):
|
||||||
|
self.request_id = request_id
|
||||||
|
|
BIN
src/xtquant/ssleay32.dll
Normal file
BIN
src/xtquant/ssleay32.dll
Normal file
Binary file not shown.
BIN
src/xtquant/vcruntime140.dll
Normal file
BIN
src/xtquant/vcruntime140.dll
Normal file
Binary file not shown.
7
src/xtquant/xtbson/__init__.py
Normal file
7
src/xtquant/xtbson/__init__.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if sys.version_info.major == 3 and sys.version_info.minor == 6:
|
||||||
|
from .bson36 import *
|
||||||
|
else:
|
||||||
|
from .bson37 import *
|
1182
src/xtquant/xtbson/bson36/__init__.py
Normal file
1182
src/xtquant/xtbson/bson36/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
40
src/xtquant/xtbson/bson36/_helpers.py
Normal file
40
src/xtquant/xtbson/bson36/_helpers.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# Copyright 2021-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Setstate and getstate functions for objects with __slots__, allowing
|
||||||
|
compatibility with default pickling protocol
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def _setstate_slots(self, state):
|
||||||
|
for slot, value in state.items():
|
||||||
|
setattr(self, slot, value)
|
||||||
|
|
||||||
|
|
||||||
|
def _mangle_name(name, prefix):
|
||||||
|
if name.startswith("__"):
|
||||||
|
prefix = "_" + prefix
|
||||||
|
else:
|
||||||
|
prefix = ""
|
||||||
|
return prefix + name
|
||||||
|
|
||||||
|
|
||||||
|
def _getstate_slots(self):
|
||||||
|
prefix = self.__class__.__name__
|
||||||
|
ret = dict()
|
||||||
|
for name in self.__slots__:
|
||||||
|
mangled_name = _mangle_name(name, prefix)
|
||||||
|
if hasattr(self, mangled_name):
|
||||||
|
ret[mangled_name] = getattr(self, mangled_name)
|
||||||
|
return ret
|
352
src/xtquant/xtbson/bson36/binary.py
Normal file
352
src/xtquant/xtbson/bson36/binary.py
Normal file
@ -0,0 +1,352 @@
|
|||||||
|
# Copyright 2009-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from uuid import UUID
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
|
"""Tools for representing BSON binary data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
BINARY_SUBTYPE = 0
|
||||||
|
"""BSON binary subtype for binary data.
|
||||||
|
|
||||||
|
This is the default subtype for binary data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
FUNCTION_SUBTYPE = 1
|
||||||
|
"""BSON binary subtype for functions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
OLD_BINARY_SUBTYPE = 2
|
||||||
|
"""Old BSON binary subtype for binary data.
|
||||||
|
|
||||||
|
This is the old default subtype, the current
|
||||||
|
default is :data:`BINARY_SUBTYPE`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
OLD_UUID_SUBTYPE = 3
|
||||||
|
"""Old BSON binary subtype for a UUID.
|
||||||
|
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded
|
||||||
|
by :mod:`bson` using this subtype when using
|
||||||
|
:data:`UuidRepresentation.PYTHON_LEGACY`,
|
||||||
|
:data:`UuidRepresentation.JAVA_LEGACY`, or
|
||||||
|
:data:`UuidRepresentation.CSHARP_LEGACY`.
|
||||||
|
|
||||||
|
.. versionadded:: 2.1
|
||||||
|
"""
|
||||||
|
|
||||||
|
UUID_SUBTYPE = 4
|
||||||
|
"""BSON binary subtype for a UUID.
|
||||||
|
|
||||||
|
This is the standard BSON binary subtype for UUIDs.
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded
|
||||||
|
by :mod:`bson` using this subtype when using
|
||||||
|
:data:`UuidRepresentation.STANDARD`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class UuidRepresentation:
|
||||||
|
UNSPECIFIED = 0
|
||||||
|
"""An unspecified UUID representation.
|
||||||
|
|
||||||
|
When configured, :class:`uuid.UUID` instances will **not** be
|
||||||
|
automatically encoded to or decoded from :class:`~bson.binary.Binary`.
|
||||||
|
When encoding a :class:`uuid.UUID` instance, an error will be raised.
|
||||||
|
To encode a :class:`uuid.UUID` instance with this configuration, it must
|
||||||
|
be wrapped in the :class:`~bson.binary.Binary` class by the application
|
||||||
|
code. When decoding a BSON binary field with a UUID subtype, a
|
||||||
|
:class:`~bson.binary.Binary` instance will be returned instead of a
|
||||||
|
:class:`uuid.UUID` instance.
|
||||||
|
|
||||||
|
See :ref:`unspecified-representation-details` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
|
||||||
|
STANDARD = UUID_SUBTYPE
|
||||||
|
"""The standard UUID representation.
|
||||||
|
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded to
|
||||||
|
and decoded from . binary, using RFC-4122 byte order with
|
||||||
|
binary subtype :data:`UUID_SUBTYPE`.
|
||||||
|
|
||||||
|
See :ref:`standard-representation-details` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
|
||||||
|
PYTHON_LEGACY = OLD_UUID_SUBTYPE
|
||||||
|
"""The Python legacy UUID representation.
|
||||||
|
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded to
|
||||||
|
and decoded from . binary, using RFC-4122 byte order with
|
||||||
|
binary subtype :data:`OLD_UUID_SUBTYPE`.
|
||||||
|
|
||||||
|
See :ref:`python-legacy-representation-details` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
|
||||||
|
JAVA_LEGACY = 5
|
||||||
|
"""The Java legacy UUID representation.
|
||||||
|
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded to
|
||||||
|
and decoded from . binary subtype :data:`OLD_UUID_SUBTYPE`,
|
||||||
|
using the Java driver's legacy byte order.
|
||||||
|
|
||||||
|
See :ref:`java-legacy-representation-details` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
|
||||||
|
CSHARP_LEGACY = 6
|
||||||
|
"""The C#/.net legacy UUID representation.
|
||||||
|
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded to
|
||||||
|
and decoded from . binary subtype :data:`OLD_UUID_SUBTYPE`,
|
||||||
|
using the C# driver's legacy byte order.
|
||||||
|
|
||||||
|
See :ref:`csharp-legacy-representation-details` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
STANDARD = UuidRepresentation.STANDARD
|
||||||
|
"""An alias for :data:`UuidRepresentation.STANDARD`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
PYTHON_LEGACY = UuidRepresentation.PYTHON_LEGACY
|
||||||
|
"""An alias for :data:`UuidRepresentation.PYTHON_LEGACY`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
JAVA_LEGACY = UuidRepresentation.JAVA_LEGACY
|
||||||
|
"""An alias for :data:`UuidRepresentation.JAVA_LEGACY`.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.6
|
||||||
|
BSON binary subtype 4 is decoded using RFC-4122 byte order.
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
CSHARP_LEGACY = UuidRepresentation.CSHARP_LEGACY
|
||||||
|
"""An alias for :data:`UuidRepresentation.CSHARP_LEGACY`.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.6
|
||||||
|
BSON binary subtype 4 is decoded using RFC-4122 byte order.
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALL_UUID_SUBTYPES = (OLD_UUID_SUBTYPE, UUID_SUBTYPE)
|
||||||
|
ALL_UUID_REPRESENTATIONS = (
|
||||||
|
UuidRepresentation.UNSPECIFIED,
|
||||||
|
UuidRepresentation.STANDARD,
|
||||||
|
UuidRepresentation.PYTHON_LEGACY,
|
||||||
|
UuidRepresentation.JAVA_LEGACY,
|
||||||
|
UuidRepresentation.CSHARP_LEGACY,
|
||||||
|
)
|
||||||
|
UUID_REPRESENTATION_NAMES = {
|
||||||
|
UuidRepresentation.UNSPECIFIED: "UuidRepresentation.UNSPECIFIED",
|
||||||
|
UuidRepresentation.STANDARD: "UuidRepresentation.STANDARD",
|
||||||
|
UuidRepresentation.PYTHON_LEGACY: "UuidRepresentation.PYTHON_LEGACY",
|
||||||
|
UuidRepresentation.JAVA_LEGACY: "UuidRepresentation.JAVA_LEGACY",
|
||||||
|
UuidRepresentation.CSHARP_LEGACY: "UuidRepresentation.CSHARP_LEGACY",
|
||||||
|
}
|
||||||
|
|
||||||
|
MD5_SUBTYPE = 5
|
||||||
|
"""BSON binary subtype for an MD5 hash.
|
||||||
|
"""
|
||||||
|
|
||||||
|
COLUMN_SUBTYPE = 7
|
||||||
|
"""BSON binary subtype for columns.
|
||||||
|
|
||||||
|
.. versionadded:: 4.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
USER_DEFINED_SUBTYPE = 128
|
||||||
|
"""BSON binary subtype for any user defined structure.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Binary(bytes):
|
||||||
|
"""Representation of BSON binary data.
|
||||||
|
|
||||||
|
This is necessary because we want to represent Python strings as
|
||||||
|
the BSON string type. We need to wrap binary data so we can tell
|
||||||
|
the difference between what should be considered binary data and
|
||||||
|
what should be considered a string when we encode to BSON.
|
||||||
|
|
||||||
|
Raises TypeError if `data` is not an instance of :class:`bytes`
|
||||||
|
(:class:`str` in python 2) or `subtype` is not an instance of
|
||||||
|
:class:`int`. Raises ValueError if `subtype` is not in [0, 256).
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
In python 3 instances of Binary with subtype 0 will be decoded
|
||||||
|
directly to :class:`bytes`.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `data`: the binary data to represent. Can be any bytes-like type
|
||||||
|
that implements the buffer protocol.
|
||||||
|
- `subtype` (optional): the `binary subtype
|
||||||
|
<http://bsonspec.org/#/specification>`_
|
||||||
|
to use
|
||||||
|
|
||||||
|
.. versionchanged:: 3.9
|
||||||
|
Support any bytes-like type that implements the buffer protocol.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_type_marker = 5
|
||||||
|
|
||||||
|
def __new__(cls, data, subtype=BINARY_SUBTYPE):
|
||||||
|
if not isinstance(subtype, int):
|
||||||
|
raise TypeError("subtype must be an instance of int")
|
||||||
|
if subtype >= 256 or subtype < 0:
|
||||||
|
raise ValueError("subtype must be contained in [0, 256)")
|
||||||
|
# Support any type that implements the buffer protocol.
|
||||||
|
self = bytes.__new__(cls, memoryview(data).tobytes())
|
||||||
|
self.__subtype = subtype
|
||||||
|
return self
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_uuid(cls, uuid, uuid_representation=UuidRepresentation.STANDARD):
|
||||||
|
"""Create a BSON Binary object from a Python UUID.
|
||||||
|
|
||||||
|
Creates a :class:`~bson.binary.Binary` object from a
|
||||||
|
:class:`uuid.UUID` instance. Assumes that the native
|
||||||
|
:class:`uuid.UUID` instance uses the byte-order implied by the
|
||||||
|
provided ``uuid_representation``.
|
||||||
|
|
||||||
|
Raises :exc:`TypeError` if `uuid` is not an instance of
|
||||||
|
:class:`~uuid.UUID`.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `uuid`: A :class:`uuid.UUID` instance.
|
||||||
|
- `uuid_representation`: A member of
|
||||||
|
:class:`~bson.binary.UuidRepresentation`. Default:
|
||||||
|
:const:`~bson.binary.UuidRepresentation.STANDARD`.
|
||||||
|
See :ref:`handling-uuid-data-example` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
if not isinstance(uuid, UUID):
|
||||||
|
raise TypeError("uuid must be an instance of uuid.UUID")
|
||||||
|
|
||||||
|
if uuid_representation not in ALL_UUID_REPRESENTATIONS:
|
||||||
|
raise ValueError(
|
||||||
|
"uuid_representation must be a value " "from .binary.UuidRepresentation"
|
||||||
|
)
|
||||||
|
|
||||||
|
if uuid_representation == UuidRepresentation.UNSPECIFIED:
|
||||||
|
raise ValueError(
|
||||||
|
"cannot encode native uuid.UUID with "
|
||||||
|
"UuidRepresentation.UNSPECIFIED. UUIDs can be manually "
|
||||||
|
"converted to bson.Binary instances using "
|
||||||
|
"bson.Binary.from_uuid() or a different UuidRepresentation "
|
||||||
|
"can be configured. See the documentation for "
|
||||||
|
"UuidRepresentation for more information."
|
||||||
|
)
|
||||||
|
|
||||||
|
subtype = OLD_UUID_SUBTYPE
|
||||||
|
if uuid_representation == UuidRepresentation.PYTHON_LEGACY:
|
||||||
|
payload = uuid.bytes
|
||||||
|
elif uuid_representation == UuidRepresentation.JAVA_LEGACY:
|
||||||
|
from_uuid = uuid.bytes
|
||||||
|
payload = from_uuid[0:8][::-1] + from_uuid[8:16][::-1]
|
||||||
|
elif uuid_representation == UuidRepresentation.CSHARP_LEGACY:
|
||||||
|
payload = uuid.bytes_le
|
||||||
|
else:
|
||||||
|
# uuid_representation == UuidRepresentation.STANDARD
|
||||||
|
subtype = UUID_SUBTYPE
|
||||||
|
payload = uuid.bytes
|
||||||
|
|
||||||
|
return cls(payload, subtype)
|
||||||
|
|
||||||
|
def as_uuid(self, uuid_representation=UuidRepresentation.STANDARD):
|
||||||
|
"""Create a Python UUID from this BSON Binary object.
|
||||||
|
|
||||||
|
Decodes this binary object as a native :class:`uuid.UUID` instance
|
||||||
|
with the provided ``uuid_representation``.
|
||||||
|
|
||||||
|
Raises :exc:`ValueError` if this :class:`~bson.binary.Binary` instance
|
||||||
|
does not contain a UUID.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `uuid_representation`: A member of
|
||||||
|
:class:`~bson.binary.UuidRepresentation`. Default:
|
||||||
|
:const:`~bson.binary.UuidRepresentation.STANDARD`.
|
||||||
|
See :ref:`handling-uuid-data-example` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
if self.subtype not in ALL_UUID_SUBTYPES:
|
||||||
|
raise ValueError("cannot decode subtype %s as a uuid" % (self.subtype,))
|
||||||
|
|
||||||
|
if uuid_representation not in ALL_UUID_REPRESENTATIONS:
|
||||||
|
raise ValueError(
|
||||||
|
"uuid_representation must be a value from " "bson.binary.UuidRepresentation"
|
||||||
|
)
|
||||||
|
|
||||||
|
if uuid_representation == UuidRepresentation.UNSPECIFIED:
|
||||||
|
raise ValueError("uuid_representation cannot be UNSPECIFIED")
|
||||||
|
elif uuid_representation == UuidRepresentation.PYTHON_LEGACY:
|
||||||
|
if self.subtype == OLD_UUID_SUBTYPE:
|
||||||
|
return UUID(bytes=self)
|
||||||
|
elif uuid_representation == UuidRepresentation.JAVA_LEGACY:
|
||||||
|
if self.subtype == OLD_UUID_SUBTYPE:
|
||||||
|
return UUID(bytes=self[0:8][::-1] + self[8:16][::-1])
|
||||||
|
elif uuid_representation == UuidRepresentation.CSHARP_LEGACY:
|
||||||
|
if self.subtype == OLD_UUID_SUBTYPE:
|
||||||
|
return UUID(bytes_le=self)
|
||||||
|
else:
|
||||||
|
# uuid_representation == UuidRepresentation.STANDARD
|
||||||
|
if self.subtype == UUID_SUBTYPE:
|
||||||
|
return UUID(bytes=self)
|
||||||
|
|
||||||
|
raise ValueError(
|
||||||
|
"cannot decode subtype %s to %s"
|
||||||
|
% (self.subtype, UUID_REPRESENTATION_NAMES[uuid_representation])
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def subtype(self):
|
||||||
|
"""Subtype of this binary data."""
|
||||||
|
return self.__subtype
|
||||||
|
|
||||||
|
def __getnewargs__(self):
|
||||||
|
# Work around http://bugs.python.org/issue7382
|
||||||
|
data = super(Binary, self).__getnewargs__()[0]
|
||||||
|
if not isinstance(data, bytes):
|
||||||
|
data = data.encode("latin-1")
|
||||||
|
return data, self.__subtype
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, Binary):
|
||||||
|
return (self.__subtype, bytes(self)) == (other.subtype, bytes(other))
|
||||||
|
# We don't return NotImplemented here because if we did then
|
||||||
|
# Binary("foo") == "foo" would return True, since Binary is a
|
||||||
|
# subclass of str...
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return super(Binary, self).__hash__() ^ hash(self.__subtype)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Binary(%s, %s)" % (bytes.__repr__(self), self.__subtype)
|
94
src/xtquant/xtbson/bson36/code.py
Normal file
94
src/xtquant/xtbson/bson36/code.py
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
# Copyright 2009-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for representing JavaScript code in BSON.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Mapping as _Mapping
|
||||||
|
|
||||||
|
|
||||||
|
class Code(str):
|
||||||
|
"""BSON's JavaScript code type.
|
||||||
|
|
||||||
|
Raises :class:`TypeError` if `code` is not an instance of
|
||||||
|
:class:`basestring` (:class:`str` in python 3) or `scope`
|
||||||
|
is not ``None`` or an instance of :class:`dict`.
|
||||||
|
|
||||||
|
Scope variables can be set by passing a dictionary as the `scope`
|
||||||
|
argument or by using keyword arguments. If a variable is set as a
|
||||||
|
keyword argument it will override any setting for that variable in
|
||||||
|
the `scope` dictionary.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `code`: A string containing JavaScript code to be evaluated or another
|
||||||
|
instance of Code. In the latter case, the scope of `code` becomes this
|
||||||
|
Code's :attr:`scope`.
|
||||||
|
- `scope` (optional): dictionary representing the scope in which
|
||||||
|
`code` should be evaluated - a mapping from identifiers (as
|
||||||
|
strings) to values. Defaults to ``None``. This is applied after any
|
||||||
|
scope associated with a given `code` above.
|
||||||
|
- `**kwargs` (optional): scope variables can also be passed as
|
||||||
|
keyword arguments. These are applied after `scope` and `code`.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.4
|
||||||
|
The default value for :attr:`scope` is ``None`` instead of ``{}``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
_type_marker = 13
|
||||||
|
|
||||||
|
def __new__(cls, code, scope=None, **kwargs):
|
||||||
|
if not isinstance(code, str):
|
||||||
|
raise TypeError("code must be an instance of str")
|
||||||
|
|
||||||
|
self = str.__new__(cls, code)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.__scope = code.scope
|
||||||
|
except AttributeError:
|
||||||
|
self.__scope = None
|
||||||
|
|
||||||
|
if scope is not None:
|
||||||
|
if not isinstance(scope, _Mapping):
|
||||||
|
raise TypeError("scope must be an instance of dict")
|
||||||
|
if self.__scope is not None:
|
||||||
|
self.__scope.update(scope)
|
||||||
|
else:
|
||||||
|
self.__scope = scope
|
||||||
|
|
||||||
|
if kwargs:
|
||||||
|
if self.__scope is not None:
|
||||||
|
self.__scope.update(kwargs)
|
||||||
|
else:
|
||||||
|
self.__scope = kwargs
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def scope(self):
|
||||||
|
"""Scope dictionary for this instance or ``None``."""
|
||||||
|
return self.__scope
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Code(%s, %r)" % (str.__repr__(self), self.__scope)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, Code):
|
||||||
|
return (self.__scope, str(self)) == (other.__scope, str(other))
|
||||||
|
return False
|
||||||
|
|
||||||
|
__hash__ = None
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
389
src/xtquant/xtbson/bson36/codec_options.py
Normal file
389
src/xtquant/xtbson/bson36/codec_options.py
Normal file
@ -0,0 +1,389 @@
|
|||||||
|
# Copyright 2014-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for specifying BSON codec options."""
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import datetime
|
||||||
|
import warnings
|
||||||
|
from collections import namedtuple
|
||||||
|
from collections.abc import MutableMapping as _MutableMapping
|
||||||
|
|
||||||
|
from .binary import (
|
||||||
|
ALL_UUID_REPRESENTATIONS,
|
||||||
|
UUID_REPRESENTATION_NAMES,
|
||||||
|
UuidRepresentation,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _abstractproperty(func):
|
||||||
|
return property(abc.abstractmethod(func))
|
||||||
|
|
||||||
|
|
||||||
|
_RAW_BSON_DOCUMENT_MARKER = 101
|
||||||
|
|
||||||
|
|
||||||
|
def _raw_document_class(document_class):
|
||||||
|
"""Determine if a document_class is a RawBSONDocument class."""
|
||||||
|
marker = getattr(document_class, "_type_marker", None)
|
||||||
|
return marker == _RAW_BSON_DOCUMENT_MARKER
|
||||||
|
|
||||||
|
|
||||||
|
class TypeEncoder(abc.ABC):
|
||||||
|
"""Base class for defining type codec classes which describe how a
|
||||||
|
custom type can be transformed to one of the types BSON understands.
|
||||||
|
|
||||||
|
Codec classes must implement the ``python_type`` attribute, and the
|
||||||
|
``transform_python`` method to support encoding.
|
||||||
|
|
||||||
|
See :ref:`custom-type-type-codec` documentation for an example.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@_abstractproperty
|
||||||
|
def python_type(self):
|
||||||
|
"""The Python type to be converted into something serializable."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def transform_python(self, value):
|
||||||
|
"""Convert the given Python object into something serializable."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TypeDecoder(abc.ABC):
|
||||||
|
"""Base class for defining type codec classes which describe how a
|
||||||
|
BSON type can be transformed to a custom type.
|
||||||
|
|
||||||
|
Codec classes must implement the ``bson_type`` attribute, and the
|
||||||
|
``transform_bson`` method to support decoding.
|
||||||
|
|
||||||
|
See :ref:`custom-type-type-codec` documentation for an example.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@_abstractproperty
|
||||||
|
def bson_type(self):
|
||||||
|
"""The BSON type to be converted into our own type."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def transform_bson(self, value):
|
||||||
|
"""Convert the given BSON value into our own type."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TypeCodec(TypeEncoder, TypeDecoder):
|
||||||
|
"""Base class for defining type codec classes which describe how a
|
||||||
|
custom type can be transformed to/from one of the types :mod:`bson`
|
||||||
|
can already encode/decode.
|
||||||
|
|
||||||
|
Codec classes must implement the ``python_type`` attribute, and the
|
||||||
|
``transform_python`` method to support encoding, as well as the
|
||||||
|
``bson_type`` attribute, and the ``transform_bson`` method to support
|
||||||
|
decoding.
|
||||||
|
|
||||||
|
See :ref:`custom-type-type-codec` documentation for an example.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TypeRegistry(object):
|
||||||
|
"""Encapsulates type codecs used in encoding and / or decoding BSON, as
|
||||||
|
well as the fallback encoder. Type registries cannot be modified after
|
||||||
|
instantiation.
|
||||||
|
|
||||||
|
``TypeRegistry`` can be initialized with an iterable of type codecs, and
|
||||||
|
a callable for the fallback encoder::
|
||||||
|
|
||||||
|
>>> from .codec_options import TypeRegistry
|
||||||
|
>>> type_registry = TypeRegistry([Codec1, Codec2, Codec3, ...],
|
||||||
|
... fallback_encoder)
|
||||||
|
|
||||||
|
See :ref:`custom-type-type-registry` documentation for an example.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `type_codecs` (optional): iterable of type codec instances. If
|
||||||
|
``type_codecs`` contains multiple codecs that transform a single
|
||||||
|
python or BSON type, the transformation specified by the type codec
|
||||||
|
occurring last prevails. A TypeError will be raised if one or more
|
||||||
|
type codecs modify the encoding behavior of a built-in :mod:`bson`
|
||||||
|
type.
|
||||||
|
- `fallback_encoder` (optional): callable that accepts a single,
|
||||||
|
unencodable python value and transforms it into a type that
|
||||||
|
:mod:`bson` can encode. See :ref:`fallback-encoder-callable`
|
||||||
|
documentation for an example.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, type_codecs=None, fallback_encoder=None):
|
||||||
|
self.__type_codecs = list(type_codecs or [])
|
||||||
|
self._fallback_encoder = fallback_encoder
|
||||||
|
self._encoder_map = {}
|
||||||
|
self._decoder_map = {}
|
||||||
|
|
||||||
|
if self._fallback_encoder is not None:
|
||||||
|
if not callable(fallback_encoder):
|
||||||
|
raise TypeError("fallback_encoder %r is not a callable" % (fallback_encoder))
|
||||||
|
|
||||||
|
for codec in self.__type_codecs:
|
||||||
|
is_valid_codec = False
|
||||||
|
if isinstance(codec, TypeEncoder):
|
||||||
|
self._validate_type_encoder(codec)
|
||||||
|
is_valid_codec = True
|
||||||
|
self._encoder_map[codec.python_type] = codec.transform_python
|
||||||
|
if isinstance(codec, TypeDecoder):
|
||||||
|
is_valid_codec = True
|
||||||
|
self._decoder_map[codec.bson_type] = codec.transform_bson
|
||||||
|
if not is_valid_codec:
|
||||||
|
raise TypeError(
|
||||||
|
"Expected an instance of %s, %s, or %s, got %r instead"
|
||||||
|
% (TypeEncoder.__name__, TypeDecoder.__name__, TypeCodec.__name__, codec)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _validate_type_encoder(self, codec):
|
||||||
|
from . import _BUILT_IN_TYPES
|
||||||
|
|
||||||
|
for pytype in _BUILT_IN_TYPES:
|
||||||
|
if issubclass(codec.python_type, pytype):
|
||||||
|
err_msg = (
|
||||||
|
"TypeEncoders cannot change how built-in types are "
|
||||||
|
"encoded (encoder %s transforms type %s)" % (codec, pytype)
|
||||||
|
)
|
||||||
|
raise TypeError(err_msg)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "%s(type_codecs=%r, fallback_encoder=%r)" % (
|
||||||
|
self.__class__.__name__,
|
||||||
|
self.__type_codecs,
|
||||||
|
self._fallback_encoder,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, type(self)):
|
||||||
|
return NotImplemented
|
||||||
|
return (
|
||||||
|
(self._decoder_map == other._decoder_map)
|
||||||
|
and (self._encoder_map == other._encoder_map)
|
||||||
|
and (self._fallback_encoder == other._fallback_encoder)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_options_base = namedtuple(
|
||||||
|
"CodecOptions",
|
||||||
|
(
|
||||||
|
"document_class",
|
||||||
|
"tz_aware",
|
||||||
|
"uuid_representation",
|
||||||
|
"unicode_decode_error_handler",
|
||||||
|
"tzinfo",
|
||||||
|
"type_registry",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CodecOptions(_options_base):
|
||||||
|
"""Encapsulates options used encoding and / or decoding BSON.
|
||||||
|
|
||||||
|
The `document_class` option is used to define a custom type for use
|
||||||
|
decoding BSON documents. Access to the underlying raw BSON bytes for
|
||||||
|
a document is available using the :class:`~bson.raw_bson.RawBSONDocument`
|
||||||
|
type::
|
||||||
|
|
||||||
|
>>> from .raw_bson import RawBSONDocument
|
||||||
|
>>> from .codec_options import CodecOptions
|
||||||
|
>>> codec_options = CodecOptions(document_class=RawBSONDocument)
|
||||||
|
>>> coll = db.get_collection('test', codec_options=codec_options)
|
||||||
|
>>> doc = coll.find_one()
|
||||||
|
>>> doc.raw
|
||||||
|
'\\x16\\x00\\x00\\x00\\x07_id\\x00[0\\x165\\x91\\x10\\xea\\x14\\xe8\\xc5\\x8b\\x93\\x00'
|
||||||
|
|
||||||
|
The document class can be any type that inherits from
|
||||||
|
:class:`~collections.abc.MutableMapping`::
|
||||||
|
|
||||||
|
>>> class AttributeDict(dict):
|
||||||
|
... # A dict that supports attribute access.
|
||||||
|
... def __getattr__(self, key):
|
||||||
|
... return self[key]
|
||||||
|
... def __setattr__(self, key, value):
|
||||||
|
... self[key] = value
|
||||||
|
...
|
||||||
|
>>> codec_options = CodecOptions(document_class=AttributeDict)
|
||||||
|
>>> coll = db.get_collection('test', codec_options=codec_options)
|
||||||
|
>>> doc = coll.find_one()
|
||||||
|
>>> doc._id
|
||||||
|
ObjectId('5b3016359110ea14e8c58b93')
|
||||||
|
|
||||||
|
See :doc:`/examples/datetimes` for examples using the `tz_aware` and
|
||||||
|
`tzinfo` options.
|
||||||
|
|
||||||
|
See :doc:`examples/uuid` for examples using the `uuid_representation`
|
||||||
|
option.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `document_class`: BSON documents returned in queries will be decoded
|
||||||
|
to an instance of this class. Must be a subclass of
|
||||||
|
:class:`~collections.abc.MutableMapping`. Defaults to :class:`dict`.
|
||||||
|
- `tz_aware`: If ``True``, BSON datetimes will be decoded to timezone
|
||||||
|
aware instances of :class:`~datetime.datetime`. Otherwise they will be
|
||||||
|
naive. Defaults to ``False``.
|
||||||
|
- `uuid_representation`: The BSON representation to use when encoding
|
||||||
|
and decoding instances of :class:`~uuid.UUID`. Defaults to
|
||||||
|
:data:`~bson.binary.UuidRepresentation.UNSPECIFIED`. New
|
||||||
|
applications should consider setting this to
|
||||||
|
:data:`~bson.binary.UuidRepresentation.STANDARD` for cross language
|
||||||
|
compatibility. See :ref:`handling-uuid-data-example` for details.
|
||||||
|
- `unicode_decode_error_handler`: The error handler to apply when
|
||||||
|
a Unicode-related error occurs during BSON decoding that would
|
||||||
|
otherwise raise :exc:`UnicodeDecodeError`. Valid options include
|
||||||
|
'strict', 'replace', 'backslashreplace', 'surrogateescape', and
|
||||||
|
'ignore'. Defaults to 'strict'.
|
||||||
|
- `tzinfo`: A :class:`~datetime.tzinfo` subclass that specifies the
|
||||||
|
timezone to/from which :class:`~datetime.datetime` objects should be
|
||||||
|
encoded/decoded.
|
||||||
|
- `type_registry`: Instance of :class:`TypeRegistry` used to customize
|
||||||
|
encoding and decoding behavior.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
The default for `uuid_representation` was changed from
|
||||||
|
:const:`~bson.binary.UuidRepresentation.PYTHON_LEGACY` to
|
||||||
|
:const:`~bson.binary.UuidRepresentation.UNSPECIFIED`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.8
|
||||||
|
`type_registry` attribute.
|
||||||
|
|
||||||
|
.. warning:: Care must be taken when changing
|
||||||
|
`unicode_decode_error_handler` from its default value ('strict').
|
||||||
|
The 'replace' and 'ignore' modes should not be used when documents
|
||||||
|
retrieved from the server will be modified in the client application
|
||||||
|
and stored back to the server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls,
|
||||||
|
document_class=dict,
|
||||||
|
tz_aware=False,
|
||||||
|
uuid_representation=UuidRepresentation.UNSPECIFIED,
|
||||||
|
unicode_decode_error_handler="strict",
|
||||||
|
tzinfo=None,
|
||||||
|
type_registry=None,
|
||||||
|
):
|
||||||
|
if not (issubclass(document_class, _MutableMapping) or _raw_document_class(document_class)):
|
||||||
|
raise TypeError(
|
||||||
|
"document_class must be dict, bson.son.SON, "
|
||||||
|
"bson.raw_bson.RawBSONDocument, or a "
|
||||||
|
"sublass of collections.abc.MutableMapping"
|
||||||
|
)
|
||||||
|
if not isinstance(tz_aware, bool):
|
||||||
|
raise TypeError("tz_aware must be True or False")
|
||||||
|
if uuid_representation not in ALL_UUID_REPRESENTATIONS:
|
||||||
|
raise ValueError(
|
||||||
|
"uuid_representation must be a value " "from .binary.UuidRepresentation"
|
||||||
|
)
|
||||||
|
if not isinstance(unicode_decode_error_handler, (str, None)):
|
||||||
|
raise ValueError("unicode_decode_error_handler must be a string " "or None")
|
||||||
|
if tzinfo is not None:
|
||||||
|
if not isinstance(tzinfo, datetime.tzinfo):
|
||||||
|
raise TypeError("tzinfo must be an instance of datetime.tzinfo")
|
||||||
|
if not tz_aware:
|
||||||
|
raise ValueError("cannot specify tzinfo without also setting tz_aware=True")
|
||||||
|
|
||||||
|
type_registry = type_registry or TypeRegistry()
|
||||||
|
|
||||||
|
if not isinstance(type_registry, TypeRegistry):
|
||||||
|
raise TypeError("type_registry must be an instance of TypeRegistry")
|
||||||
|
|
||||||
|
return tuple.__new__(
|
||||||
|
cls,
|
||||||
|
(
|
||||||
|
document_class,
|
||||||
|
tz_aware,
|
||||||
|
uuid_representation,
|
||||||
|
unicode_decode_error_handler,
|
||||||
|
tzinfo,
|
||||||
|
type_registry,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _arguments_repr(self):
|
||||||
|
"""Representation of the arguments used to create this object."""
|
||||||
|
document_class_repr = "dict" if self.document_class is dict else repr(self.document_class)
|
||||||
|
|
||||||
|
uuid_rep_repr = UUID_REPRESENTATION_NAMES.get(
|
||||||
|
self.uuid_representation, self.uuid_representation
|
||||||
|
)
|
||||||
|
|
||||||
|
return (
|
||||||
|
"document_class=%s, tz_aware=%r, uuid_representation=%s, "
|
||||||
|
"unicode_decode_error_handler=%r, tzinfo=%r, "
|
||||||
|
"type_registry=%r"
|
||||||
|
% (
|
||||||
|
document_class_repr,
|
||||||
|
self.tz_aware,
|
||||||
|
uuid_rep_repr,
|
||||||
|
self.unicode_decode_error_handler,
|
||||||
|
self.tzinfo,
|
||||||
|
self.type_registry,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _options_dict(self):
|
||||||
|
"""Dictionary of the arguments used to create this object."""
|
||||||
|
# TODO: PYTHON-2442 use _asdict() instead
|
||||||
|
return {
|
||||||
|
"document_class": self.document_class,
|
||||||
|
"tz_aware": self.tz_aware,
|
||||||
|
"uuid_representation": self.uuid_representation,
|
||||||
|
"unicode_decode_error_handler": self.unicode_decode_error_handler,
|
||||||
|
"tzinfo": self.tzinfo,
|
||||||
|
"type_registry": self.type_registry,
|
||||||
|
}
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "%s(%s)" % (self.__class__.__name__, self._arguments_repr())
|
||||||
|
|
||||||
|
def with_options(self, **kwargs):
|
||||||
|
"""Make a copy of this CodecOptions, overriding some options::
|
||||||
|
|
||||||
|
>>> from .codec_options import DEFAULT_CODEC_OPTIONS
|
||||||
|
>>> DEFAULT_CODEC_OPTIONS.tz_aware
|
||||||
|
False
|
||||||
|
>>> options = DEFAULT_CODEC_OPTIONS.with_options(tz_aware=True)
|
||||||
|
>>> options.tz_aware
|
||||||
|
True
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
opts = self._options_dict()
|
||||||
|
opts.update(kwargs)
|
||||||
|
return CodecOptions(**opts)
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_CODEC_OPTIONS = CodecOptions()
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_codec_options(options):
|
||||||
|
"""Parse BSON codec options."""
|
||||||
|
kwargs = {}
|
||||||
|
for k in set(options) & {
|
||||||
|
"document_class",
|
||||||
|
"tz_aware",
|
||||||
|
"uuidrepresentation",
|
||||||
|
"unicode_decode_error_handler",
|
||||||
|
"tzinfo",
|
||||||
|
"type_registry",
|
||||||
|
}:
|
||||||
|
if k == "uuidrepresentation":
|
||||||
|
kwargs["uuid_representation"] = options[k]
|
||||||
|
else:
|
||||||
|
kwargs[k] = options[k]
|
||||||
|
return CodecOptions(**kwargs)
|
125
src/xtquant/xtbson/bson36/dbref.py
Normal file
125
src/xtquant/xtbson/bson36/dbref.py
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
# Copyright 2009-2015 MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for manipulating DBRefs (references to MongoDB documents)."""
|
||||||
|
|
||||||
|
from copy import deepcopy
|
||||||
|
|
||||||
|
from ._helpers import _getstate_slots, _setstate_slots
|
||||||
|
from .son import SON
|
||||||
|
|
||||||
|
|
||||||
|
class DBRef(object):
|
||||||
|
"""A reference to a document stored in MongoDB."""
|
||||||
|
|
||||||
|
__slots__ = "__collection", "__id", "__database", "__kwargs"
|
||||||
|
__getstate__ = _getstate_slots
|
||||||
|
__setstate__ = _setstate_slots
|
||||||
|
# DBRef isn't actually a BSON "type" so this number was arbitrarily chosen.
|
||||||
|
_type_marker = 100
|
||||||
|
|
||||||
|
def __init__(self, collection, id, database=None, _extra={}, **kwargs):
|
||||||
|
"""Initialize a new :class:`DBRef`.
|
||||||
|
|
||||||
|
Raises :class:`TypeError` if `collection` or `database` is not
|
||||||
|
an instance of :class:`basestring` (:class:`str` in python 3).
|
||||||
|
`database` is optional and allows references to documents to work
|
||||||
|
across databases. Any additional keyword arguments will create
|
||||||
|
additional fields in the resultant embedded document.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `collection`: name of the collection the document is stored in
|
||||||
|
- `id`: the value of the document's ``"_id"`` field
|
||||||
|
- `database` (optional): name of the database to reference
|
||||||
|
- `**kwargs` (optional): additional keyword arguments will
|
||||||
|
create additional, custom fields
|
||||||
|
|
||||||
|
.. seealso:: The MongoDB documentation on `dbrefs <https://dochub.mongodb.org/core/dbrefs>`_.
|
||||||
|
"""
|
||||||
|
if not isinstance(collection, str):
|
||||||
|
raise TypeError("collection must be an instance of str")
|
||||||
|
if database is not None and not isinstance(database, str):
|
||||||
|
raise TypeError("database must be an instance of str")
|
||||||
|
|
||||||
|
self.__collection = collection
|
||||||
|
self.__id = id
|
||||||
|
self.__database = database
|
||||||
|
kwargs.update(_extra)
|
||||||
|
self.__kwargs = kwargs
|
||||||
|
|
||||||
|
@property
|
||||||
|
def collection(self):
|
||||||
|
"""Get the name of this DBRef's collection."""
|
||||||
|
return self.__collection
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self):
|
||||||
|
"""Get this DBRef's _id."""
|
||||||
|
return self.__id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def database(self):
|
||||||
|
"""Get the name of this DBRef's database.
|
||||||
|
|
||||||
|
Returns None if this DBRef doesn't specify a database.
|
||||||
|
"""
|
||||||
|
return self.__database
|
||||||
|
|
||||||
|
def __getattr__(self, key):
|
||||||
|
try:
|
||||||
|
return self.__kwargs[key]
|
||||||
|
except KeyError:
|
||||||
|
raise AttributeError(key)
|
||||||
|
|
||||||
|
def as_doc(self):
|
||||||
|
"""Get the SON document representation of this DBRef.
|
||||||
|
|
||||||
|
Generally not needed by application developers
|
||||||
|
"""
|
||||||
|
doc = SON([("$ref", self.collection), ("$id", self.id)])
|
||||||
|
if self.database is not None:
|
||||||
|
doc["$db"] = self.database
|
||||||
|
doc.update(self.__kwargs)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
extra = "".join([", %s=%r" % (k, v) for k, v in self.__kwargs.items()])
|
||||||
|
if self.database is None:
|
||||||
|
return "DBRef(%r, %r%s)" % (self.collection, self.id, extra)
|
||||||
|
return "DBRef(%r, %r, %r%s)" % (self.collection, self.id, self.database, extra)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, DBRef):
|
||||||
|
us = (self.__database, self.__collection, self.__id, self.__kwargs)
|
||||||
|
them = (other.__database, other.__collection, other.__id, other.__kwargs)
|
||||||
|
return us == them
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
"""Get a hash value for this :class:`DBRef`."""
|
||||||
|
return hash(
|
||||||
|
(self.__collection, self.__id, self.__database, tuple(sorted(self.__kwargs.items())))
|
||||||
|
)
|
||||||
|
|
||||||
|
def __deepcopy__(self, memo):
|
||||||
|
"""Support function for `copy.deepcopy()`."""
|
||||||
|
return DBRef(
|
||||||
|
deepcopy(self.__collection, memo),
|
||||||
|
deepcopy(self.__id, memo),
|
||||||
|
deepcopy(self.__database, memo),
|
||||||
|
deepcopy(self.__kwargs, memo),
|
||||||
|
)
|
315
src/xtquant/xtbson/bson36/decimal128.py
Normal file
315
src/xtquant/xtbson/bson36/decimal128.py
Normal file
@ -0,0 +1,315 @@
|
|||||||
|
# Copyright 2016-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for working with the BSON decimal128 type.
|
||||||
|
|
||||||
|
.. versionadded:: 3.4
|
||||||
|
|
||||||
|
.. note:: The Decimal128 BSON type requires MongoDB 3.4+.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import decimal
|
||||||
|
import struct
|
||||||
|
import sys
|
||||||
|
|
||||||
|
_PACK_64 = struct.Struct("<Q").pack
|
||||||
|
_UNPACK_64 = struct.Struct("<Q").unpack
|
||||||
|
|
||||||
|
_EXPONENT_MASK = 3 << 61
|
||||||
|
_EXPONENT_BIAS = 6176
|
||||||
|
_EXPONENT_MAX = 6144
|
||||||
|
_EXPONENT_MIN = -6143
|
||||||
|
_MAX_DIGITS = 34
|
||||||
|
|
||||||
|
_INF = 0x7800000000000000
|
||||||
|
_NAN = 0x7C00000000000000
|
||||||
|
_SNAN = 0x7E00000000000000
|
||||||
|
_SIGN = 0x8000000000000000
|
||||||
|
|
||||||
|
_NINF = (_INF + _SIGN, 0)
|
||||||
|
_PINF = (_INF, 0)
|
||||||
|
_NNAN = (_NAN + _SIGN, 0)
|
||||||
|
_PNAN = (_NAN, 0)
|
||||||
|
_NSNAN = (_SNAN + _SIGN, 0)
|
||||||
|
_PSNAN = (_SNAN, 0)
|
||||||
|
|
||||||
|
_CTX_OPTIONS = {
|
||||||
|
"prec": _MAX_DIGITS,
|
||||||
|
"rounding": decimal.ROUND_HALF_EVEN,
|
||||||
|
"Emin": _EXPONENT_MIN,
|
||||||
|
"Emax": _EXPONENT_MAX,
|
||||||
|
"capitals": 1,
|
||||||
|
"flags": [],
|
||||||
|
"traps": [decimal.InvalidOperation, decimal.Overflow, decimal.Inexact],
|
||||||
|
"clamp": 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
_DEC128_CTX = decimal.Context(**_CTX_OPTIONS.copy())
|
||||||
|
|
||||||
|
|
||||||
|
def create_decimal128_context():
|
||||||
|
"""Returns an instance of :class:`decimal.Context` appropriate
|
||||||
|
for working with IEEE-754 128-bit decimal floating point values.
|
||||||
|
"""
|
||||||
|
opts = _CTX_OPTIONS.copy()
|
||||||
|
opts["traps"] = []
|
||||||
|
return decimal.Context(**opts)
|
||||||
|
|
||||||
|
|
||||||
|
def _decimal_to_128(value):
|
||||||
|
"""Converts a decimal.Decimal to BID (high bits, low bits).
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `value`: An instance of decimal.Decimal
|
||||||
|
"""
|
||||||
|
with decimal.localcontext(_DEC128_CTX) as ctx:
|
||||||
|
value = ctx.create_decimal(value)
|
||||||
|
|
||||||
|
if value.is_infinite():
|
||||||
|
return _NINF if value.is_signed() else _PINF
|
||||||
|
|
||||||
|
sign, digits, exponent = value.as_tuple()
|
||||||
|
|
||||||
|
if value.is_nan():
|
||||||
|
if digits:
|
||||||
|
raise ValueError("NaN with debug payload is not supported")
|
||||||
|
if value.is_snan():
|
||||||
|
return _NSNAN if value.is_signed() else _PSNAN
|
||||||
|
return _NNAN if value.is_signed() else _PNAN
|
||||||
|
|
||||||
|
significand = int("".join([str(digit) for digit in digits]))
|
||||||
|
bit_length = significand.bit_length()
|
||||||
|
|
||||||
|
high = 0
|
||||||
|
low = 0
|
||||||
|
for i in range(min(64, bit_length)):
|
||||||
|
if significand & (1 << i):
|
||||||
|
low |= 1 << i
|
||||||
|
|
||||||
|
for i in range(64, bit_length):
|
||||||
|
if significand & (1 << i):
|
||||||
|
high |= 1 << (i - 64)
|
||||||
|
|
||||||
|
biased_exponent = exponent + _EXPONENT_BIAS
|
||||||
|
|
||||||
|
if high >> 49 == 1:
|
||||||
|
high = high & 0x7FFFFFFFFFFF
|
||||||
|
high |= _EXPONENT_MASK
|
||||||
|
high |= (biased_exponent & 0x3FFF) << 47
|
||||||
|
else:
|
||||||
|
high |= biased_exponent << 49
|
||||||
|
|
||||||
|
if sign:
|
||||||
|
high |= _SIGN
|
||||||
|
|
||||||
|
return high, low
|
||||||
|
|
||||||
|
|
||||||
|
class Decimal128(object):
|
||||||
|
"""BSON Decimal128 type::
|
||||||
|
|
||||||
|
>>> Decimal128(Decimal("0.0005"))
|
||||||
|
Decimal128('0.0005')
|
||||||
|
>>> Decimal128("0.0005")
|
||||||
|
Decimal128('0.0005')
|
||||||
|
>>> Decimal128((3474527112516337664, 5))
|
||||||
|
Decimal128('0.0005')
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `value`: An instance of :class:`decimal.Decimal`, string, or tuple of
|
||||||
|
(high bits, low bits) from Binary Integer Decimal (BID) format.
|
||||||
|
|
||||||
|
.. note:: :class:`~Decimal128` uses an instance of :class:`decimal.Context`
|
||||||
|
configured for IEEE-754 Decimal128 when validating parameters.
|
||||||
|
Signals like :class:`decimal.InvalidOperation`, :class:`decimal.Inexact`,
|
||||||
|
and :class:`decimal.Overflow` are trapped and raised as exceptions::
|
||||||
|
|
||||||
|
>>> Decimal128(".13.1")
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
...
|
||||||
|
decimal.InvalidOperation: [<class 'decimal.ConversionSyntax'>]
|
||||||
|
>>>
|
||||||
|
>>> Decimal128("1E-6177")
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
...
|
||||||
|
decimal.Inexact: [<class 'decimal.Inexact'>]
|
||||||
|
>>>
|
||||||
|
>>> Decimal128("1E6145")
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
...
|
||||||
|
decimal.Overflow: [<class 'decimal.Overflow'>, <class 'decimal.Rounded'>]
|
||||||
|
|
||||||
|
To ensure the result of a calculation can always be stored as BSON
|
||||||
|
Decimal128 use the context returned by
|
||||||
|
:func:`create_decimal128_context`::
|
||||||
|
|
||||||
|
>>> import decimal
|
||||||
|
>>> decimal128_ctx = create_decimal128_context()
|
||||||
|
>>> with decimal.localcontext(decimal128_ctx) as ctx:
|
||||||
|
... Decimal128(ctx.create_decimal(".13.3"))
|
||||||
|
...
|
||||||
|
Decimal128('NaN')
|
||||||
|
>>>
|
||||||
|
>>> with decimal.localcontext(decimal128_ctx) as ctx:
|
||||||
|
... Decimal128(ctx.create_decimal("1E-6177"))
|
||||||
|
...
|
||||||
|
Decimal128('0E-6176')
|
||||||
|
>>>
|
||||||
|
>>> with decimal.localcontext(DECIMAL128_CTX) as ctx:
|
||||||
|
... Decimal128(ctx.create_decimal("1E6145"))
|
||||||
|
...
|
||||||
|
Decimal128('Infinity')
|
||||||
|
|
||||||
|
To match the behavior of MongoDB's Decimal128 implementation
|
||||||
|
str(Decimal(value)) may not match str(Decimal128(value)) for NaN values::
|
||||||
|
|
||||||
|
>>> Decimal128(Decimal('NaN'))
|
||||||
|
Decimal128('NaN')
|
||||||
|
>>> Decimal128(Decimal('-NaN'))
|
||||||
|
Decimal128('NaN')
|
||||||
|
>>> Decimal128(Decimal('sNaN'))
|
||||||
|
Decimal128('NaN')
|
||||||
|
>>> Decimal128(Decimal('-sNaN'))
|
||||||
|
Decimal128('NaN')
|
||||||
|
|
||||||
|
However, :meth:`~Decimal128.to_decimal` will return the exact value::
|
||||||
|
|
||||||
|
>>> Decimal128(Decimal('NaN')).to_decimal()
|
||||||
|
Decimal('NaN')
|
||||||
|
>>> Decimal128(Decimal('-NaN')).to_decimal()
|
||||||
|
Decimal('-NaN')
|
||||||
|
>>> Decimal128(Decimal('sNaN')).to_decimal()
|
||||||
|
Decimal('sNaN')
|
||||||
|
>>> Decimal128(Decimal('-sNaN')).to_decimal()
|
||||||
|
Decimal('-sNaN')
|
||||||
|
|
||||||
|
Two instances of :class:`Decimal128` compare equal if their Binary
|
||||||
|
Integer Decimal encodings are equal::
|
||||||
|
|
||||||
|
>>> Decimal128('NaN') == Decimal128('NaN')
|
||||||
|
True
|
||||||
|
>>> Decimal128('NaN').bid == Decimal128('NaN').bid
|
||||||
|
True
|
||||||
|
|
||||||
|
This differs from :class:`decimal.Decimal` comparisons for NaN::
|
||||||
|
|
||||||
|
>>> Decimal('NaN') == Decimal('NaN')
|
||||||
|
False
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ("__high", "__low")
|
||||||
|
|
||||||
|
_type_marker = 19
|
||||||
|
|
||||||
|
def __init__(self, value):
|
||||||
|
if isinstance(value, (str, decimal.Decimal)):
|
||||||
|
self.__high, self.__low = _decimal_to_128(value)
|
||||||
|
elif isinstance(value, (list, tuple)):
|
||||||
|
if len(value) != 2:
|
||||||
|
raise ValueError(
|
||||||
|
"Invalid size for creation of Decimal128 "
|
||||||
|
"from list or tuple. Must have exactly 2 "
|
||||||
|
"elements."
|
||||||
|
)
|
||||||
|
self.__high, self.__low = value
|
||||||
|
else:
|
||||||
|
raise TypeError("Cannot convert %r to Decimal128" % (value,))
|
||||||
|
|
||||||
|
def to_decimal(self):
|
||||||
|
"""Returns an instance of :class:`decimal.Decimal` for this
|
||||||
|
:class:`Decimal128`.
|
||||||
|
"""
|
||||||
|
high = self.__high
|
||||||
|
low = self.__low
|
||||||
|
sign = 1 if (high & _SIGN) else 0
|
||||||
|
|
||||||
|
if (high & _SNAN) == _SNAN:
|
||||||
|
return decimal.Decimal((sign, (), "N"))
|
||||||
|
elif (high & _NAN) == _NAN:
|
||||||
|
return decimal.Decimal((sign, (), "n"))
|
||||||
|
elif (high & _INF) == _INF:
|
||||||
|
return decimal.Decimal((sign, (), "F"))
|
||||||
|
|
||||||
|
if (high & _EXPONENT_MASK) == _EXPONENT_MASK:
|
||||||
|
exponent = ((high & 0x1FFFE00000000000) >> 47) - _EXPONENT_BIAS
|
||||||
|
return decimal.Decimal((sign, (0,), exponent))
|
||||||
|
else:
|
||||||
|
exponent = ((high & 0x7FFF800000000000) >> 49) - _EXPONENT_BIAS
|
||||||
|
|
||||||
|
arr = bytearray(15)
|
||||||
|
mask = 0x00000000000000FF
|
||||||
|
for i in range(14, 6, -1):
|
||||||
|
arr[i] = (low & mask) >> ((14 - i) << 3)
|
||||||
|
mask = mask << 8
|
||||||
|
|
||||||
|
mask = 0x00000000000000FF
|
||||||
|
for i in range(6, 0, -1):
|
||||||
|
arr[i] = (high & mask) >> ((6 - i) << 3)
|
||||||
|
mask = mask << 8
|
||||||
|
|
||||||
|
mask = 0x0001000000000000
|
||||||
|
arr[0] = (high & mask) >> 48
|
||||||
|
|
||||||
|
# cdecimal only accepts a tuple for digits.
|
||||||
|
digits = tuple(int(digit) for digit in str(int.from_bytes(arr, "big")))
|
||||||
|
|
||||||
|
with decimal.localcontext(_DEC128_CTX) as ctx:
|
||||||
|
return ctx.create_decimal((sign, digits, exponent))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_bid(cls, value):
|
||||||
|
"""Create an instance of :class:`Decimal128` from Binary Integer
|
||||||
|
Decimal string.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `value`: 16 byte string (128-bit IEEE 754-2008 decimal floating
|
||||||
|
point in Binary Integer Decimal (BID) format).
|
||||||
|
"""
|
||||||
|
if not isinstance(value, bytes):
|
||||||
|
raise TypeError("value must be an instance of bytes")
|
||||||
|
if len(value) != 16:
|
||||||
|
raise ValueError("value must be exactly 16 bytes")
|
||||||
|
return cls((_UNPACK_64(value[8:])[0], _UNPACK_64(value[:8])[0]))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bid(self):
|
||||||
|
"""The Binary Integer Decimal (BID) encoding of this instance."""
|
||||||
|
return _PACK_64(self.__low) + _PACK_64(self.__high)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
dec = self.to_decimal()
|
||||||
|
if dec.is_nan():
|
||||||
|
# Required by the drivers spec to match MongoDB behavior.
|
||||||
|
return "NaN"
|
||||||
|
return str(dec)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Decimal128('%s')" % (str(self),)
|
||||||
|
|
||||||
|
def __setstate__(self, value):
|
||||||
|
self.__high, self.__low = value
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
return self.__high, self.__low
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, Decimal128):
|
||||||
|
return self.bid == other.bid
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
35
src/xtquant/xtbson/bson36/errors.py
Normal file
35
src/xtquant/xtbson/bson36/errors.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# Copyright 2009-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Exceptions raised by the BSON package."""
|
||||||
|
|
||||||
|
|
||||||
|
class BSONError(Exception):
|
||||||
|
"""Base class for all BSON exceptions."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidBSON(BSONError):
|
||||||
|
"""Raised when trying to create a BSON object from invalid data."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidStringData(BSONError):
|
||||||
|
"""Raised when trying to encode a string containing non-UTF8 data."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidDocument(BSONError):
|
||||||
|
"""Raised when trying to create a BSON object from an invalid document."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidId(BSONError):
|
||||||
|
"""Raised when trying to create an ObjectId from invalid data."""
|
37
src/xtquant/xtbson/bson36/int64.py
Normal file
37
src/xtquant/xtbson/bson36/int64.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# Copyright 2014-2015 MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""A BSON wrapper for long (int in python3)"""
|
||||||
|
|
||||||
|
|
||||||
|
class Int64(int):
|
||||||
|
"""Representation of the BSON int64 type.
|
||||||
|
|
||||||
|
This is necessary because every integral number is an :class:`int` in
|
||||||
|
Python 3. Small integral numbers are encoded to BSON int32 by default,
|
||||||
|
but Int64 numbers will always be encoded to BSON int64.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `value`: the numeric value to represent
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ()
|
||||||
|
|
||||||
|
_type_marker = 18
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
pass
|
861
src/xtquant/xtbson/bson36/json_util.py
Normal file
861
src/xtquant/xtbson/bson36/json_util.py
Normal file
@ -0,0 +1,861 @@
|
|||||||
|
# Copyright 2009-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for using Python's :mod:`json` module with BSON documents.
|
||||||
|
|
||||||
|
This module provides two helper methods `dumps` and `loads` that wrap the
|
||||||
|
native :mod:`json` methods and provide explicit BSON conversion to and from
|
||||||
|
JSON. :class:`~bson.json_util.JSONOptions` provides a way to control how JSON
|
||||||
|
is emitted and parsed, with the default being the Relaxed Extended JSON format.
|
||||||
|
:mod:`~bson.json_util` can also generate Canonical or legacy `Extended JSON`_
|
||||||
|
when :const:`CANONICAL_JSON_OPTIONS` or :const:`LEGACY_JSON_OPTIONS` is
|
||||||
|
provided, respectively.
|
||||||
|
|
||||||
|
.. _Extended JSON: https://github.com/mongodb/specifications/blob/master/source/extended-json.rst
|
||||||
|
|
||||||
|
Example usage (deserialization):
|
||||||
|
|
||||||
|
.. doctest::
|
||||||
|
|
||||||
|
>>> from .json_util import loads
|
||||||
|
>>> loads('[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$scope": {}, "$code": "function x() { return 1; }"}}, {"bin": {"$type": "80", "$binary": "AQIDBA=="}}]')
|
||||||
|
[{'foo': [1, 2]}, {'bar': {'hello': 'world'}}, {'code': Code('function x() { return 1; }', {})}, {'bin': Binary(b'...', 128)}]
|
||||||
|
|
||||||
|
Example usage with :const:`RELAXED_JSON_OPTIONS` (the default):
|
||||||
|
|
||||||
|
.. doctest::
|
||||||
|
|
||||||
|
>>> from . import Binary, Code
|
||||||
|
>>> from .json_util import dumps
|
||||||
|
>>> dumps([{'foo': [1, 2]},
|
||||||
|
... {'bar': {'hello': 'world'}},
|
||||||
|
... {'code': Code("function x() { return 1; }")},
|
||||||
|
... {'bin': Binary(b"\x01\x02\x03\x04")}])
|
||||||
|
'[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]'
|
||||||
|
|
||||||
|
Example usage (with :const:`CANONICAL_JSON_OPTIONS`):
|
||||||
|
|
||||||
|
.. doctest::
|
||||||
|
|
||||||
|
>>> from . import Binary, Code
|
||||||
|
>>> from .json_util import dumps, CANONICAL_JSON_OPTIONS
|
||||||
|
>>> dumps([{'foo': [1, 2]},
|
||||||
|
... {'bar': {'hello': 'world'}},
|
||||||
|
... {'code': Code("function x() { return 1; }")},
|
||||||
|
... {'bin': Binary(b"\x01\x02\x03\x04")}],
|
||||||
|
... json_options=CANONICAL_JSON_OPTIONS)
|
||||||
|
'[{"foo": [{"$numberInt": "1"}, {"$numberInt": "2"}]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]'
|
||||||
|
|
||||||
|
Example usage (with :const:`LEGACY_JSON_OPTIONS`):
|
||||||
|
|
||||||
|
.. doctest::
|
||||||
|
|
||||||
|
>>> from . import Binary, Code
|
||||||
|
>>> from .json_util import dumps, LEGACY_JSON_OPTIONS
|
||||||
|
>>> dumps([{'foo': [1, 2]},
|
||||||
|
... {'bar': {'hello': 'world'}},
|
||||||
|
... {'code': Code("function x() { return 1; }", {})},
|
||||||
|
... {'bin': Binary(b"\x01\x02\x03\x04")}],
|
||||||
|
... json_options=LEGACY_JSON_OPTIONS)
|
||||||
|
'[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }", "$scope": {}}}, {"bin": {"$binary": "AQIDBA==", "$type": "00"}}]'
|
||||||
|
|
||||||
|
Alternatively, you can manually pass the `default` to :func:`json.dumps`.
|
||||||
|
It won't handle :class:`~bson.binary.Binary` and :class:`~bson.code.Code`
|
||||||
|
instances (as they are extended strings you can't provide custom defaults),
|
||||||
|
but it will be faster as there is less recursion.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
If your application does not need the flexibility offered by
|
||||||
|
:class:`JSONOptions` and spends a large amount of time in the `json_util`
|
||||||
|
module, look to
|
||||||
|
`python-bsonjs <https://pypi.python.org/pypi/python-bsonjs>`_ for a nice
|
||||||
|
performance improvement. `python-bsonjs` is a fast BSON to MongoDB
|
||||||
|
Extended JSON converter for Python built on top of
|
||||||
|
`libbson <https://github.com/mongodb/libbson>`_. `python-bsonjs` works best
|
||||||
|
with PyMongo when using :class:`~bson.raw_bson.RawBSONDocument`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import math
|
||||||
|
import re
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import bson
|
||||||
|
from . import EPOCH_AWARE, RE_TYPE, SON
|
||||||
|
from .binary import ALL_UUID_SUBTYPES, UUID_SUBTYPE, Binary, UuidRepresentation
|
||||||
|
from .code import Code
|
||||||
|
from .codec_options import CodecOptions
|
||||||
|
from .dbref import DBRef
|
||||||
|
from .decimal128 import Decimal128
|
||||||
|
from .int64 import Int64
|
||||||
|
from .max_key import MaxKey
|
||||||
|
from .min_key import MinKey
|
||||||
|
from .objectid import ObjectId
|
||||||
|
from .regex import Regex
|
||||||
|
from .timestamp import Timestamp
|
||||||
|
from .tz_util import utc
|
||||||
|
|
||||||
|
_RE_OPT_TABLE = {
|
||||||
|
"i": re.I,
|
||||||
|
"l": re.L,
|
||||||
|
"m": re.M,
|
||||||
|
"s": re.S,
|
||||||
|
"u": re.U,
|
||||||
|
"x": re.X,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class DatetimeRepresentation:
|
||||||
|
LEGACY = 0
|
||||||
|
"""Legacy MongoDB Extended JSON datetime representation.
|
||||||
|
|
||||||
|
:class:`datetime.datetime` instances will be encoded to JSON in the
|
||||||
|
format `{"$date": <dateAsMilliseconds>}`, where `dateAsMilliseconds` is
|
||||||
|
a 64-bit signed integer giving the number of milliseconds since the Unix
|
||||||
|
epoch UTC. This was the default encoding before PyMongo version 3.4.
|
||||||
|
|
||||||
|
.. versionadded:: 3.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
NUMBERLONG = 1
|
||||||
|
"""NumberLong datetime representation.
|
||||||
|
|
||||||
|
:class:`datetime.datetime` instances will be encoded to JSON in the
|
||||||
|
format `{"$date": {"$numberLong": "<dateAsMilliseconds>"}}`,
|
||||||
|
where `dateAsMilliseconds` is the string representation of a 64-bit signed
|
||||||
|
integer giving the number of milliseconds since the Unix epoch UTC.
|
||||||
|
|
||||||
|
.. versionadded:: 3.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
ISO8601 = 2
|
||||||
|
"""ISO-8601 datetime representation.
|
||||||
|
|
||||||
|
:class:`datetime.datetime` instances greater than or equal to the Unix
|
||||||
|
epoch UTC will be encoded to JSON in the format `{"$date": "<ISO-8601>"}`.
|
||||||
|
:class:`datetime.datetime` instances before the Unix epoch UTC will be
|
||||||
|
encoded as if the datetime representation is
|
||||||
|
:const:`~DatetimeRepresentation.NUMBERLONG`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class JSONMode:
|
||||||
|
LEGACY = 0
|
||||||
|
"""Legacy Extended JSON representation.
|
||||||
|
|
||||||
|
In this mode, :func:`~bson.json_util.dumps` produces PyMongo's legacy
|
||||||
|
non-standard JSON output. Consider using
|
||||||
|
:const:`~bson.json_util.JSONMode.RELAXED` or
|
||||||
|
:const:`~bson.json_util.JSONMode.CANONICAL` instead.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
RELAXED = 1
|
||||||
|
"""Relaxed Extended JSON representation.
|
||||||
|
|
||||||
|
In this mode, :func:`~bson.json_util.dumps` produces Relaxed Extended JSON,
|
||||||
|
a mostly JSON-like format. Consider using this for things like a web API,
|
||||||
|
where one is sending a document (or a projection of a document) that only
|
||||||
|
uses ordinary JSON type primitives. In particular, the ``int``,
|
||||||
|
:class:`~bson.int64.Int64`, and ``float`` numeric types are represented in
|
||||||
|
the native JSON number format. This output is also the most human readable
|
||||||
|
and is useful for debugging and documentation.
|
||||||
|
|
||||||
|
.. seealso:: The specification for Relaxed `Extended JSON`_.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
CANONICAL = 2
|
||||||
|
"""Canonical Extended JSON representation.
|
||||||
|
|
||||||
|
In this mode, :func:`~bson.json_util.dumps` produces Canonical Extended
|
||||||
|
JSON, a type preserving format. Consider using this for things like
|
||||||
|
testing, where one has to precisely specify expected types in JSON. In
|
||||||
|
particular, the ``int``, :class:`~bson.int64.Int64`, and ``float`` numeric
|
||||||
|
types are encoded with type wrappers.
|
||||||
|
|
||||||
|
.. seealso:: The specification for Canonical `Extended JSON`_.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class JSONOptions(CodecOptions):
|
||||||
|
"""Encapsulates JSON options for :func:`dumps` and :func:`loads`.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `strict_number_long`: If ``True``, :class:`~bson.int64.Int64` objects
|
||||||
|
are encoded to MongoDB Extended JSON's *Strict mode* type
|
||||||
|
`NumberLong`, ie ``'{"$numberLong": "<number>" }'``. Otherwise they
|
||||||
|
will be encoded as an `int`. Defaults to ``False``.
|
||||||
|
- `datetime_representation`: The representation to use when encoding
|
||||||
|
instances of :class:`datetime.datetime`. Defaults to
|
||||||
|
:const:`~DatetimeRepresentation.LEGACY`.
|
||||||
|
- `strict_uuid`: If ``True``, :class:`uuid.UUID` object are encoded to
|
||||||
|
MongoDB Extended JSON's *Strict mode* type `Binary`. Otherwise it
|
||||||
|
will be encoded as ``'{"$uuid": "<hex>" }'``. Defaults to ``False``.
|
||||||
|
- `json_mode`: The :class:`JSONMode` to use when encoding BSON types to
|
||||||
|
Extended JSON. Defaults to :const:`~JSONMode.LEGACY`.
|
||||||
|
- `document_class`: BSON documents returned by :func:`loads` will be
|
||||||
|
decoded to an instance of this class. Must be a subclass of
|
||||||
|
:class:`collections.MutableMapping`. Defaults to :class:`dict`.
|
||||||
|
- `uuid_representation`: The :class:`~bson.binary.UuidRepresentation`
|
||||||
|
to use when encoding and decoding instances of :class:`uuid.UUID`.
|
||||||
|
Defaults to :const:`~bson.binary.UuidRepresentation.UNSPECIFIED`.
|
||||||
|
- `tz_aware`: If ``True``, MongoDB Extended JSON's *Strict mode* type
|
||||||
|
`Date` will be decoded to timezone aware instances of
|
||||||
|
:class:`datetime.datetime`. Otherwise they will be naive. Defaults
|
||||||
|
to ``False``.
|
||||||
|
- `tzinfo`: A :class:`datetime.tzinfo` subclass that specifies the
|
||||||
|
timezone from which :class:`~datetime.datetime` objects should be
|
||||||
|
decoded. Defaults to :const:`~bson.tz_util.utc`.
|
||||||
|
- `args`: arguments to :class:`~bson.codec_options.CodecOptions`
|
||||||
|
- `kwargs`: arguments to :class:`~bson.codec_options.CodecOptions`
|
||||||
|
|
||||||
|
.. seealso:: The specification for Relaxed and Canonical `Extended JSON`_.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
The default for `json_mode` was changed from :const:`JSONMode.LEGACY`
|
||||||
|
to :const:`JSONMode.RELAXED`.
|
||||||
|
The default for `uuid_representation` was changed from
|
||||||
|
:const:`~bson.binary.UuidRepresentation.PYTHON_LEGACY` to
|
||||||
|
:const:`~bson.binary.UuidRepresentation.UNSPECIFIED`.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.5
|
||||||
|
Accepts the optional parameter `json_mode`.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
Changed default value of `tz_aware` to False.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls,
|
||||||
|
strict_number_long=None,
|
||||||
|
datetime_representation=None,
|
||||||
|
strict_uuid=None,
|
||||||
|
json_mode=JSONMode.RELAXED,
|
||||||
|
*args,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
kwargs["tz_aware"] = kwargs.get("tz_aware", False)
|
||||||
|
if kwargs["tz_aware"]:
|
||||||
|
kwargs["tzinfo"] = kwargs.get("tzinfo", utc)
|
||||||
|
if datetime_representation not in (
|
||||||
|
DatetimeRepresentation.LEGACY,
|
||||||
|
DatetimeRepresentation.NUMBERLONG,
|
||||||
|
DatetimeRepresentation.ISO8601,
|
||||||
|
None,
|
||||||
|
):
|
||||||
|
raise ValueError(
|
||||||
|
"JSONOptions.datetime_representation must be one of LEGACY, "
|
||||||
|
"NUMBERLONG, or ISO8601 from DatetimeRepresentation."
|
||||||
|
)
|
||||||
|
self = super(JSONOptions, cls).__new__(cls, *args, **kwargs)
|
||||||
|
if json_mode not in (JSONMode.LEGACY, JSONMode.RELAXED, JSONMode.CANONICAL):
|
||||||
|
raise ValueError(
|
||||||
|
"JSONOptions.json_mode must be one of LEGACY, RELAXED, "
|
||||||
|
"or CANONICAL from JSONMode."
|
||||||
|
)
|
||||||
|
self.json_mode = json_mode
|
||||||
|
if self.json_mode == JSONMode.RELAXED:
|
||||||
|
if strict_number_long:
|
||||||
|
raise ValueError("Cannot specify strict_number_long=True with" " JSONMode.RELAXED")
|
||||||
|
if datetime_representation not in (None, DatetimeRepresentation.ISO8601):
|
||||||
|
raise ValueError(
|
||||||
|
"datetime_representation must be DatetimeRepresentation."
|
||||||
|
"ISO8601 or omitted with JSONMode.RELAXED"
|
||||||
|
)
|
||||||
|
if strict_uuid not in (None, True):
|
||||||
|
raise ValueError("Cannot specify strict_uuid=False with JSONMode.RELAXED")
|
||||||
|
self.strict_number_long = False
|
||||||
|
self.datetime_representation = DatetimeRepresentation.ISO8601
|
||||||
|
self.strict_uuid = True
|
||||||
|
elif self.json_mode == JSONMode.CANONICAL:
|
||||||
|
if strict_number_long not in (None, True):
|
||||||
|
raise ValueError("Cannot specify strict_number_long=False with" " JSONMode.RELAXED")
|
||||||
|
if datetime_representation not in (None, DatetimeRepresentation.NUMBERLONG):
|
||||||
|
raise ValueError(
|
||||||
|
"datetime_representation must be DatetimeRepresentation."
|
||||||
|
"NUMBERLONG or omitted with JSONMode.RELAXED"
|
||||||
|
)
|
||||||
|
if strict_uuid not in (None, True):
|
||||||
|
raise ValueError("Cannot specify strict_uuid=False with JSONMode.RELAXED")
|
||||||
|
self.strict_number_long = True
|
||||||
|
self.datetime_representation = DatetimeRepresentation.NUMBERLONG
|
||||||
|
self.strict_uuid = True
|
||||||
|
else: # JSONMode.LEGACY
|
||||||
|
self.strict_number_long = False
|
||||||
|
self.datetime_representation = DatetimeRepresentation.LEGACY
|
||||||
|
self.strict_uuid = False
|
||||||
|
if strict_number_long is not None:
|
||||||
|
self.strict_number_long = strict_number_long
|
||||||
|
if datetime_representation is not None:
|
||||||
|
self.datetime_representation = datetime_representation
|
||||||
|
if strict_uuid is not None:
|
||||||
|
self.strict_uuid = strict_uuid
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _arguments_repr(self):
|
||||||
|
return (
|
||||||
|
"strict_number_long=%r, "
|
||||||
|
"datetime_representation=%r, "
|
||||||
|
"strict_uuid=%r, json_mode=%r, %s"
|
||||||
|
% (
|
||||||
|
self.strict_number_long,
|
||||||
|
self.datetime_representation,
|
||||||
|
self.strict_uuid,
|
||||||
|
self.json_mode,
|
||||||
|
super(JSONOptions, self)._arguments_repr(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _options_dict(self):
|
||||||
|
# TODO: PYTHON-2442 use _asdict() instead
|
||||||
|
options_dict = super(JSONOptions, self)._options_dict()
|
||||||
|
options_dict.update(
|
||||||
|
{
|
||||||
|
"strict_number_long": self.strict_number_long,
|
||||||
|
"datetime_representation": self.datetime_representation,
|
||||||
|
"strict_uuid": self.strict_uuid,
|
||||||
|
"json_mode": self.json_mode,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return options_dict
|
||||||
|
|
||||||
|
def with_options(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Make a copy of this JSONOptions, overriding some options::
|
||||||
|
|
||||||
|
>>> from .json_util import CANONICAL_JSON_OPTIONS
|
||||||
|
>>> CANONICAL_JSON_OPTIONS.tz_aware
|
||||||
|
True
|
||||||
|
>>> json_options = CANONICAL_JSON_OPTIONS.with_options(tz_aware=False, tzinfo=None)
|
||||||
|
>>> json_options.tz_aware
|
||||||
|
False
|
||||||
|
|
||||||
|
.. versionadded:: 3.12
|
||||||
|
"""
|
||||||
|
opts = self._options_dict()
|
||||||
|
for opt in ("strict_number_long", "datetime_representation", "strict_uuid", "json_mode"):
|
||||||
|
opts[opt] = kwargs.get(opt, getattr(self, opt))
|
||||||
|
opts.update(kwargs)
|
||||||
|
return JSONOptions(**opts)
|
||||||
|
|
||||||
|
|
||||||
|
LEGACY_JSON_OPTIONS = JSONOptions(json_mode=JSONMode.LEGACY)
|
||||||
|
""":class:`JSONOptions` for encoding to PyMongo's legacy JSON format.
|
||||||
|
|
||||||
|
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.LEGACY`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
CANONICAL_JSON_OPTIONS = JSONOptions(json_mode=JSONMode.CANONICAL)
|
||||||
|
""":class:`JSONOptions` for Canonical Extended JSON.
|
||||||
|
|
||||||
|
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.CANONICAL`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
RELAXED_JSON_OPTIONS = JSONOptions(json_mode=JSONMode.RELAXED)
|
||||||
|
""":class:`JSONOptions` for Relaxed Extended JSON.
|
||||||
|
|
||||||
|
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.RELAXED`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
DEFAULT_JSON_OPTIONS = RELAXED_JSON_OPTIONS
|
||||||
|
"""The default :class:`JSONOptions` for JSON encoding/decoding.
|
||||||
|
|
||||||
|
The same as :const:`RELAXED_JSON_OPTIONS`.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
Changed from :const:`LEGACY_JSON_OPTIONS` to
|
||||||
|
:const:`RELAXED_JSON_OPTIONS`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def dumps(obj, *args, **kwargs):
|
||||||
|
"""Helper function that wraps :func:`json.dumps`.
|
||||||
|
|
||||||
|
Recursive function that handles all BSON types including
|
||||||
|
:class:`~bson.binary.Binary` and :class:`~bson.code.Code`.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `json_options`: A :class:`JSONOptions` instance used to modify the
|
||||||
|
encoding of MongoDB Extended JSON types. Defaults to
|
||||||
|
:const:`DEFAULT_JSON_OPTIONS`.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
Now outputs MongoDB Relaxed Extended JSON by default (using
|
||||||
|
:const:`DEFAULT_JSON_OPTIONS`).
|
||||||
|
|
||||||
|
.. versionchanged:: 3.4
|
||||||
|
Accepts optional parameter `json_options`. See :class:`JSONOptions`.
|
||||||
|
"""
|
||||||
|
json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS)
|
||||||
|
return json.dumps(_json_convert(obj, json_options), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def loads(s, *args, **kwargs):
|
||||||
|
"""Helper function that wraps :func:`json.loads`.
|
||||||
|
|
||||||
|
Automatically passes the object_hook for BSON type conversion.
|
||||||
|
|
||||||
|
Raises ``TypeError``, ``ValueError``, ``KeyError``, or
|
||||||
|
:exc:`~bson.errors.InvalidId` on invalid MongoDB Extended JSON.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `json_options`: A :class:`JSONOptions` instance used to modify the
|
||||||
|
decoding of MongoDB Extended JSON types. Defaults to
|
||||||
|
:const:`DEFAULT_JSON_OPTIONS`.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.5
|
||||||
|
Parses Relaxed and Canonical Extended JSON as well as PyMongo's legacy
|
||||||
|
format. Now raises ``TypeError`` or ``ValueError`` when parsing JSON
|
||||||
|
type wrappers with values of the wrong type or any extra keys.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.4
|
||||||
|
Accepts optional parameter `json_options`. See :class:`JSONOptions`.
|
||||||
|
"""
|
||||||
|
json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS)
|
||||||
|
kwargs["object_pairs_hook"] = lambda pairs: object_pairs_hook(pairs, json_options)
|
||||||
|
return json.loads(s, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def _json_convert(obj, json_options=DEFAULT_JSON_OPTIONS):
|
||||||
|
"""Recursive helper method that converts BSON types so they can be
|
||||||
|
converted into json.
|
||||||
|
"""
|
||||||
|
if hasattr(obj, "items"):
|
||||||
|
return SON(((k, _json_convert(v, json_options)) for k, v in obj.items()))
|
||||||
|
elif hasattr(obj, "__iter__") and not isinstance(obj, (str, bytes)):
|
||||||
|
return list((_json_convert(v, json_options) for v in obj))
|
||||||
|
try:
|
||||||
|
return default(obj, json_options)
|
||||||
|
except TypeError:
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
def object_pairs_hook(pairs, json_options=DEFAULT_JSON_OPTIONS):
|
||||||
|
return object_hook(json_options.document_class(pairs), json_options)
|
||||||
|
|
||||||
|
|
||||||
|
def object_hook(dct, json_options=DEFAULT_JSON_OPTIONS):
|
||||||
|
if "$oid" in dct:
|
||||||
|
return _parse_canonical_oid(dct)
|
||||||
|
if (
|
||||||
|
isinstance(dct.get("$ref"), str)
|
||||||
|
and "$id" in dct
|
||||||
|
and isinstance(dct.get("$db"), (str, type(None)))
|
||||||
|
):
|
||||||
|
return _parse_canonical_dbref(dct)
|
||||||
|
if "$date" in dct:
|
||||||
|
return _parse_canonical_datetime(dct, json_options)
|
||||||
|
if "$regex" in dct:
|
||||||
|
return _parse_legacy_regex(dct)
|
||||||
|
if "$minKey" in dct:
|
||||||
|
return _parse_canonical_minkey(dct)
|
||||||
|
if "$maxKey" in dct:
|
||||||
|
return _parse_canonical_maxkey(dct)
|
||||||
|
if "$binary" in dct:
|
||||||
|
if "$type" in dct:
|
||||||
|
return _parse_legacy_binary(dct, json_options)
|
||||||
|
else:
|
||||||
|
return _parse_canonical_binary(dct, json_options)
|
||||||
|
if "$code" in dct:
|
||||||
|
return _parse_canonical_code(dct)
|
||||||
|
if "$uuid" in dct:
|
||||||
|
return _parse_legacy_uuid(dct, json_options)
|
||||||
|
if "$undefined" in dct:
|
||||||
|
return None
|
||||||
|
if "$numberLong" in dct:
|
||||||
|
return _parse_canonical_int64(dct)
|
||||||
|
if "$timestamp" in dct:
|
||||||
|
tsp = dct["$timestamp"]
|
||||||
|
return Timestamp(tsp["t"], tsp["i"])
|
||||||
|
if "$numberDecimal" in dct:
|
||||||
|
return _parse_canonical_decimal128(dct)
|
||||||
|
if "$dbPointer" in dct:
|
||||||
|
return _parse_canonical_dbpointer(dct)
|
||||||
|
if "$regularExpression" in dct:
|
||||||
|
return _parse_canonical_regex(dct)
|
||||||
|
if "$symbol" in dct:
|
||||||
|
return _parse_canonical_symbol(dct)
|
||||||
|
if "$numberInt" in dct:
|
||||||
|
return _parse_canonical_int32(dct)
|
||||||
|
if "$numberDouble" in dct:
|
||||||
|
return _parse_canonical_double(dct)
|
||||||
|
return dct
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_legacy_regex(doc):
|
||||||
|
pattern = doc["$regex"]
|
||||||
|
# Check if this is the $regex query operator.
|
||||||
|
if not isinstance(pattern, (str, bytes)):
|
||||||
|
return doc
|
||||||
|
flags = 0
|
||||||
|
# PyMongo always adds $options but some other tools may not.
|
||||||
|
for opt in doc.get("$options", ""):
|
||||||
|
flags |= _RE_OPT_TABLE.get(opt, 0)
|
||||||
|
return Regex(pattern, flags)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_legacy_uuid(doc, json_options):
|
||||||
|
"""Decode a JSON legacy $uuid to Python UUID."""
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $uuid, extra field(s): %s" % (doc,))
|
||||||
|
if not isinstance(doc["$uuid"], str):
|
||||||
|
raise TypeError("$uuid must be a string: %s" % (doc,))
|
||||||
|
if json_options.uuid_representation == UuidRepresentation.UNSPECIFIED:
|
||||||
|
return Binary.from_uuid(uuid.UUID(doc["$uuid"]))
|
||||||
|
else:
|
||||||
|
return uuid.UUID(doc["$uuid"])
|
||||||
|
|
||||||
|
|
||||||
|
def _binary_or_uuid(data, subtype, json_options):
|
||||||
|
# special handling for UUID
|
||||||
|
if subtype in ALL_UUID_SUBTYPES:
|
||||||
|
uuid_representation = json_options.uuid_representation
|
||||||
|
binary_value = Binary(data, subtype)
|
||||||
|
if uuid_representation == UuidRepresentation.UNSPECIFIED:
|
||||||
|
return binary_value
|
||||||
|
if subtype == UUID_SUBTYPE:
|
||||||
|
# Legacy behavior: use STANDARD with binary subtype 4.
|
||||||
|
uuid_representation = UuidRepresentation.STANDARD
|
||||||
|
elif uuid_representation == UuidRepresentation.STANDARD:
|
||||||
|
# subtype == OLD_UUID_SUBTYPE
|
||||||
|
# Legacy behavior: STANDARD is the same as PYTHON_LEGACY.
|
||||||
|
uuid_representation = UuidRepresentation.PYTHON_LEGACY
|
||||||
|
return binary_value.as_uuid(uuid_representation)
|
||||||
|
|
||||||
|
if subtype == 0:
|
||||||
|
return data
|
||||||
|
return Binary(data, subtype)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_legacy_binary(doc, json_options):
|
||||||
|
if isinstance(doc["$type"], int):
|
||||||
|
doc["$type"] = "%02x" % doc["$type"]
|
||||||
|
subtype = int(doc["$type"], 16)
|
||||||
|
if subtype >= 0xFFFFFF80: # Handle mongoexport values
|
||||||
|
subtype = int(doc["$type"][6:], 16)
|
||||||
|
data = base64.b64decode(doc["$binary"].encode())
|
||||||
|
return _binary_or_uuid(data, subtype, json_options)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_binary(doc, json_options):
|
||||||
|
binary = doc["$binary"]
|
||||||
|
b64 = binary["base64"]
|
||||||
|
subtype = binary["subType"]
|
||||||
|
if not isinstance(b64, str):
|
||||||
|
raise TypeError("$binary base64 must be a string: %s" % (doc,))
|
||||||
|
if not isinstance(subtype, str) or len(subtype) > 2:
|
||||||
|
raise TypeError("$binary subType must be a string at most 2 " "characters: %s" % (doc,))
|
||||||
|
if len(binary) != 2:
|
||||||
|
raise TypeError(
|
||||||
|
'$binary must include only "base64" and "subType" ' "components: %s" % (doc,)
|
||||||
|
)
|
||||||
|
|
||||||
|
data = base64.b64decode(b64.encode())
|
||||||
|
return _binary_or_uuid(data, int(subtype, 16), json_options)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_datetime(doc, json_options):
|
||||||
|
"""Decode a JSON datetime to python datetime.datetime."""
|
||||||
|
dtm = doc["$date"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $date, extra field(s): %s" % (doc,))
|
||||||
|
# mongoexport 2.6 and newer
|
||||||
|
if isinstance(dtm, str):
|
||||||
|
# Parse offset
|
||||||
|
if dtm[-1] == "Z":
|
||||||
|
dt = dtm[:-1]
|
||||||
|
offset = "Z"
|
||||||
|
elif dtm[-6] in ("+", "-") and dtm[-3] == ":":
|
||||||
|
# (+|-)HH:MM
|
||||||
|
dt = dtm[:-6]
|
||||||
|
offset = dtm[-6:]
|
||||||
|
elif dtm[-5] in ("+", "-"):
|
||||||
|
# (+|-)HHMM
|
||||||
|
dt = dtm[:-5]
|
||||||
|
offset = dtm[-5:]
|
||||||
|
elif dtm[-3] in ("+", "-"):
|
||||||
|
# (+|-)HH
|
||||||
|
dt = dtm[:-3]
|
||||||
|
offset = dtm[-3:]
|
||||||
|
else:
|
||||||
|
dt = dtm
|
||||||
|
offset = ""
|
||||||
|
|
||||||
|
# Parse the optional factional seconds portion.
|
||||||
|
dot_index = dt.rfind(".")
|
||||||
|
microsecond = 0
|
||||||
|
if dot_index != -1:
|
||||||
|
microsecond = int(float(dt[dot_index:]) * 1000000)
|
||||||
|
dt = dt[:dot_index]
|
||||||
|
|
||||||
|
aware = datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S").replace(
|
||||||
|
microsecond=microsecond, tzinfo=utc
|
||||||
|
)
|
||||||
|
|
||||||
|
if offset and offset != "Z":
|
||||||
|
if len(offset) == 6:
|
||||||
|
hours, minutes = offset[1:].split(":")
|
||||||
|
secs = int(hours) * 3600 + int(minutes) * 60
|
||||||
|
elif len(offset) == 5:
|
||||||
|
secs = int(offset[1:3]) * 3600 + int(offset[3:]) * 60
|
||||||
|
elif len(offset) == 3:
|
||||||
|
secs = int(offset[1:3]) * 3600
|
||||||
|
if offset[0] == "-":
|
||||||
|
secs *= -1
|
||||||
|
aware = aware - datetime.timedelta(seconds=secs)
|
||||||
|
|
||||||
|
if json_options.tz_aware:
|
||||||
|
if json_options.tzinfo:
|
||||||
|
aware = aware.astimezone(json_options.tzinfo)
|
||||||
|
return aware
|
||||||
|
else:
|
||||||
|
return aware.replace(tzinfo=None)
|
||||||
|
return bson._millis_to_datetime(int(dtm), json_options)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_oid(doc):
|
||||||
|
"""Decode a JSON ObjectId to bson.objectid.ObjectId."""
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $oid, extra field(s): %s" % (doc,))
|
||||||
|
return ObjectId(doc["$oid"])
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_symbol(doc):
|
||||||
|
"""Decode a JSON symbol to Python string."""
|
||||||
|
symbol = doc["$symbol"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $symbol, extra field(s): %s" % (doc,))
|
||||||
|
return str(symbol)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_code(doc):
|
||||||
|
"""Decode a JSON code to bson.code.Code."""
|
||||||
|
for key in doc:
|
||||||
|
if key not in ("$code", "$scope"):
|
||||||
|
raise TypeError("Bad $code, extra field(s): %s" % (doc,))
|
||||||
|
return Code(doc["$code"], scope=doc.get("$scope"))
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_regex(doc):
|
||||||
|
"""Decode a JSON regex to bson.regex.Regex."""
|
||||||
|
regex = doc["$regularExpression"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $regularExpression, extra field(s): %s" % (doc,))
|
||||||
|
if len(regex) != 2:
|
||||||
|
raise TypeError(
|
||||||
|
'Bad $regularExpression must include only "pattern"'
|
||||||
|
'and "options" components: %s' % (doc,)
|
||||||
|
)
|
||||||
|
opts = regex["options"]
|
||||||
|
if not isinstance(opts, str):
|
||||||
|
raise TypeError(
|
||||||
|
"Bad $regularExpression options, options must be " "string, was type %s" % (type(opts))
|
||||||
|
)
|
||||||
|
return Regex(regex["pattern"], opts)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_dbref(doc):
|
||||||
|
"""Decode a JSON DBRef to bson.dbref.DBRef."""
|
||||||
|
return DBRef(doc.pop("$ref"), doc.pop("$id"), database=doc.pop("$db", None), **doc)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_dbpointer(doc):
|
||||||
|
"""Decode a JSON (deprecated) DBPointer to bson.dbref.DBRef."""
|
||||||
|
dbref = doc["$dbPointer"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $dbPointer, extra field(s): %s" % (doc,))
|
||||||
|
if isinstance(dbref, DBRef):
|
||||||
|
dbref_doc = dbref.as_doc()
|
||||||
|
# DBPointer must not contain $db in its value.
|
||||||
|
if dbref.database is not None:
|
||||||
|
raise TypeError("Bad $dbPointer, extra field $db: %s" % (dbref_doc,))
|
||||||
|
if not isinstance(dbref.id, ObjectId):
|
||||||
|
raise TypeError("Bad $dbPointer, $id must be an ObjectId: %s" % (dbref_doc,))
|
||||||
|
if len(dbref_doc) != 2:
|
||||||
|
raise TypeError("Bad $dbPointer, extra field(s) in DBRef: %s" % (dbref_doc,))
|
||||||
|
return dbref
|
||||||
|
else:
|
||||||
|
raise TypeError("Bad $dbPointer, expected a DBRef: %s" % (doc,))
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_int32(doc):
|
||||||
|
"""Decode a JSON int32 to python int."""
|
||||||
|
i_str = doc["$numberInt"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $numberInt, extra field(s): %s" % (doc,))
|
||||||
|
if not isinstance(i_str, str):
|
||||||
|
raise TypeError("$numberInt must be string: %s" % (doc,))
|
||||||
|
return int(i_str)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_int64(doc):
|
||||||
|
"""Decode a JSON int64 to bson.int64.Int64."""
|
||||||
|
l_str = doc["$numberLong"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $numberLong, extra field(s): %s" % (doc,))
|
||||||
|
return Int64(l_str)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_double(doc):
|
||||||
|
"""Decode a JSON double to python float."""
|
||||||
|
d_str = doc["$numberDouble"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $numberDouble, extra field(s): %s" % (doc,))
|
||||||
|
if not isinstance(d_str, str):
|
||||||
|
raise TypeError("$numberDouble must be string: %s" % (doc,))
|
||||||
|
return float(d_str)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_decimal128(doc):
|
||||||
|
"""Decode a JSON decimal128 to bson.decimal128.Decimal128."""
|
||||||
|
d_str = doc["$numberDecimal"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $numberDecimal, extra field(s): %s" % (doc,))
|
||||||
|
if not isinstance(d_str, str):
|
||||||
|
raise TypeError("$numberDecimal must be string: %s" % (doc,))
|
||||||
|
return Decimal128(d_str)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_minkey(doc):
|
||||||
|
"""Decode a JSON MinKey to bson.min_key.MinKey."""
|
||||||
|
if type(doc["$minKey"]) is not int or doc["$minKey"] != 1:
|
||||||
|
raise TypeError("$minKey value must be 1: %s" % (doc,))
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $minKey, extra field(s): %s" % (doc,))
|
||||||
|
return MinKey()
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_maxkey(doc):
|
||||||
|
"""Decode a JSON MaxKey to bson.max_key.MaxKey."""
|
||||||
|
if type(doc["$maxKey"]) is not int or doc["$maxKey"] != 1:
|
||||||
|
raise TypeError("$maxKey value must be 1: %s", (doc,))
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $minKey, extra field(s): %s" % (doc,))
|
||||||
|
return MaxKey()
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_binary(data, subtype, json_options):
|
||||||
|
if json_options.json_mode == JSONMode.LEGACY:
|
||||||
|
return SON([("$binary", base64.b64encode(data).decode()), ("$type", "%02x" % subtype)])
|
||||||
|
return {
|
||||||
|
"$binary": SON([("base64", base64.b64encode(data).decode()), ("subType", "%02x" % subtype)])
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def default(obj, json_options=DEFAULT_JSON_OPTIONS):
|
||||||
|
# We preserve key order when rendering SON, DBRef, etc. as JSON by
|
||||||
|
# returning a SON for those types instead of a dict.
|
||||||
|
if isinstance(obj, ObjectId):
|
||||||
|
return {"$oid": str(obj)}
|
||||||
|
if isinstance(obj, DBRef):
|
||||||
|
return _json_convert(obj.as_doc(), json_options=json_options)
|
||||||
|
if isinstance(obj, datetime.datetime):
|
||||||
|
if json_options.datetime_representation == DatetimeRepresentation.ISO8601:
|
||||||
|
if not obj.tzinfo:
|
||||||
|
obj = obj.replace(tzinfo=utc)
|
||||||
|
if obj >= EPOCH_AWARE:
|
||||||
|
off = obj.tzinfo.utcoffset(obj)
|
||||||
|
if (off.days, off.seconds, off.microseconds) == (0, 0, 0):
|
||||||
|
tz_string = "Z"
|
||||||
|
else:
|
||||||
|
tz_string = obj.strftime("%z")
|
||||||
|
millis = int(obj.microsecond / 1000)
|
||||||
|
fracsecs = ".%03d" % (millis,) if millis else ""
|
||||||
|
return {
|
||||||
|
"$date": "%s%s%s" % (obj.strftime("%Y-%m-%dT%H:%M:%S"), fracsecs, tz_string)
|
||||||
|
}
|
||||||
|
|
||||||
|
millis = bson._datetime_to_millis(obj)
|
||||||
|
if json_options.datetime_representation == DatetimeRepresentation.LEGACY:
|
||||||
|
return {"$date": millis}
|
||||||
|
return {"$date": {"$numberLong": str(millis)}}
|
||||||
|
if json_options.strict_number_long and isinstance(obj, Int64):
|
||||||
|
return {"$numberLong": str(obj)}
|
||||||
|
if isinstance(obj, (RE_TYPE, Regex)):
|
||||||
|
flags = ""
|
||||||
|
if obj.flags & re.IGNORECASE:
|
||||||
|
flags += "i"
|
||||||
|
if obj.flags & re.LOCALE:
|
||||||
|
flags += "l"
|
||||||
|
if obj.flags & re.MULTILINE:
|
||||||
|
flags += "m"
|
||||||
|
if obj.flags & re.DOTALL:
|
||||||
|
flags += "s"
|
||||||
|
if obj.flags & re.UNICODE:
|
||||||
|
flags += "u"
|
||||||
|
if obj.flags & re.VERBOSE:
|
||||||
|
flags += "x"
|
||||||
|
if isinstance(obj.pattern, str):
|
||||||
|
pattern = obj.pattern
|
||||||
|
else:
|
||||||
|
pattern = obj.pattern.decode("utf-8")
|
||||||
|
if json_options.json_mode == JSONMode.LEGACY:
|
||||||
|
return SON([("$regex", pattern), ("$options", flags)])
|
||||||
|
return {"$regularExpression": SON([("pattern", pattern), ("options", flags)])}
|
||||||
|
if isinstance(obj, MinKey):
|
||||||
|
return {"$minKey": 1}
|
||||||
|
if isinstance(obj, MaxKey):
|
||||||
|
return {"$maxKey": 1}
|
||||||
|
if isinstance(obj, Timestamp):
|
||||||
|
return {"$timestamp": SON([("t", obj.time), ("i", obj.inc)])}
|
||||||
|
if isinstance(obj, Code):
|
||||||
|
if obj.scope is None:
|
||||||
|
return {"$code": str(obj)}
|
||||||
|
return SON([("$code", str(obj)), ("$scope", _json_convert(obj.scope, json_options))])
|
||||||
|
if isinstance(obj, Binary):
|
||||||
|
return _encode_binary(obj, obj.subtype, json_options)
|
||||||
|
if isinstance(obj, bytes):
|
||||||
|
return _encode_binary(obj, 0, json_options)
|
||||||
|
if isinstance(obj, uuid.UUID):
|
||||||
|
if json_options.strict_uuid:
|
||||||
|
binval = Binary.from_uuid(obj, uuid_representation=json_options.uuid_representation)
|
||||||
|
return _encode_binary(binval, binval.subtype, json_options)
|
||||||
|
else:
|
||||||
|
return {"$uuid": obj.hex}
|
||||||
|
if isinstance(obj, Decimal128):
|
||||||
|
return {"$numberDecimal": str(obj)}
|
||||||
|
if isinstance(obj, bool):
|
||||||
|
return obj
|
||||||
|
if json_options.json_mode == JSONMode.CANONICAL and isinstance(obj, int):
|
||||||
|
if -(2**31) <= obj < 2**31:
|
||||||
|
return {"$numberInt": str(obj)}
|
||||||
|
return {"$numberLong": str(obj)}
|
||||||
|
if json_options.json_mode != JSONMode.LEGACY and isinstance(obj, float):
|
||||||
|
if math.isnan(obj):
|
||||||
|
return {"$numberDouble": "NaN"}
|
||||||
|
elif math.isinf(obj):
|
||||||
|
representation = "Infinity" if obj > 0 else "-Infinity"
|
||||||
|
return {"$numberDouble": representation}
|
||||||
|
elif json_options.json_mode == JSONMode.CANONICAL:
|
||||||
|
# repr() will return the shortest string guaranteed to produce the
|
||||||
|
# original value, when float() is called on it.
|
||||||
|
return {"$numberDouble": str(repr(obj))}
|
||||||
|
raise TypeError("%r is not JSON serializable" % obj)
|
54
src/xtquant/xtbson/bson36/max_key.py
Normal file
54
src/xtquant/xtbson/bson36/max_key.py
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
# Copyright 2010-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Representation for the MongoDB internal MaxKey type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class MaxKey(object):
|
||||||
|
"""MongoDB internal MaxKey type."""
|
||||||
|
|
||||||
|
__slots__ = ()
|
||||||
|
|
||||||
|
_type_marker = 127
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, MaxKey)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self._type_marker)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
return isinstance(other, MaxKey)
|
||||||
|
|
||||||
|
def __lt__(self, dummy):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __ge__(self, dummy):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
return not isinstance(other, MaxKey)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "MaxKey()"
|
54
src/xtquant/xtbson/bson36/min_key.py
Normal file
54
src/xtquant/xtbson/bson36/min_key.py
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
# Copyright 2010-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Representation for the MongoDB internal MinKey type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class MinKey(object):
|
||||||
|
"""MongoDB internal MinKey type."""
|
||||||
|
|
||||||
|
__slots__ = ()
|
||||||
|
|
||||||
|
_type_marker = 255
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, MinKey)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self._type_marker)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __le__(self, dummy):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
return not isinstance(other, MinKey)
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
return isinstance(other, MinKey)
|
||||||
|
|
||||||
|
def __gt__(self, dummy):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "MinKey()"
|
285
src/xtquant/xtbson/bson36/objectid.py
Normal file
285
src/xtquant/xtbson/bson36/objectid.py
Normal file
@ -0,0 +1,285 @@
|
|||||||
|
# Copyright 2009-2015 MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for working with MongoDB `ObjectIds
|
||||||
|
<http://dochub.mongodb.org/core/objectids>`_.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import binascii
|
||||||
|
import calendar
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import struct
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from random import SystemRandom
|
||||||
|
|
||||||
|
from .errors import InvalidId
|
||||||
|
from .tz_util import utc
|
||||||
|
|
||||||
|
_MAX_COUNTER_VALUE = 0xFFFFFF
|
||||||
|
|
||||||
|
|
||||||
|
def _raise_invalid_id(oid):
|
||||||
|
raise InvalidId(
|
||||||
|
"%r is not a valid ObjectId, it must be a 12-byte input"
|
||||||
|
" or a 24-character hex string" % oid
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _random_bytes():
|
||||||
|
"""Get the 5-byte random field of an ObjectId."""
|
||||||
|
return os.urandom(5)
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectId(object):
|
||||||
|
"""A MongoDB ObjectId."""
|
||||||
|
|
||||||
|
_pid = os.getpid()
|
||||||
|
|
||||||
|
_inc = SystemRandom().randint(0, _MAX_COUNTER_VALUE)
|
||||||
|
_inc_lock = threading.Lock()
|
||||||
|
|
||||||
|
__random = _random_bytes()
|
||||||
|
|
||||||
|
__slots__ = ("__id",)
|
||||||
|
|
||||||
|
_type_marker = 7
|
||||||
|
|
||||||
|
def __init__(self, oid=None):
|
||||||
|
"""Initialize a new ObjectId.
|
||||||
|
|
||||||
|
An ObjectId is a 12-byte unique identifier consisting of:
|
||||||
|
|
||||||
|
- a 4-byte value representing the seconds since the Unix epoch,
|
||||||
|
- a 5-byte random value,
|
||||||
|
- a 3-byte counter, starting with a random value.
|
||||||
|
|
||||||
|
By default, ``ObjectId()`` creates a new unique identifier. The
|
||||||
|
optional parameter `oid` can be an :class:`ObjectId`, or any 12
|
||||||
|
:class:`bytes`.
|
||||||
|
|
||||||
|
For example, the 12 bytes b'foo-bar-quux' do not follow the ObjectId
|
||||||
|
specification but they are acceptable input::
|
||||||
|
|
||||||
|
>>> ObjectId(b'foo-bar-quux')
|
||||||
|
ObjectId('666f6f2d6261722d71757578')
|
||||||
|
|
||||||
|
`oid` can also be a :class:`str` of 24 hex digits::
|
||||||
|
|
||||||
|
>>> ObjectId('0123456789ab0123456789ab')
|
||||||
|
ObjectId('0123456789ab0123456789ab')
|
||||||
|
|
||||||
|
Raises :class:`~bson.errors.InvalidId` if `oid` is not 12 bytes nor
|
||||||
|
24 hex digits, or :class:`TypeError` if `oid` is not an accepted type.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `oid` (optional): a valid ObjectId.
|
||||||
|
|
||||||
|
.. seealso:: The MongoDB documentation on `ObjectIds`_.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.8
|
||||||
|
:class:`~bson.objectid.ObjectId` now implements the `ObjectID
|
||||||
|
specification version 0.2
|
||||||
|
<https://github.com/mongodb/specifications/blob/master/source/
|
||||||
|
objectid.rst>`_.
|
||||||
|
"""
|
||||||
|
if oid is None:
|
||||||
|
self.__generate()
|
||||||
|
elif isinstance(oid, bytes) and len(oid) == 12:
|
||||||
|
self.__id = oid
|
||||||
|
else:
|
||||||
|
self.__validate(oid)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_datetime(cls, generation_time):
|
||||||
|
"""Create a dummy ObjectId instance with a specific generation time.
|
||||||
|
|
||||||
|
This method is useful for doing range queries on a field
|
||||||
|
containing :class:`ObjectId` instances.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
It is not safe to insert a document containing an ObjectId
|
||||||
|
generated using this method. This method deliberately
|
||||||
|
eliminates the uniqueness guarantee that ObjectIds
|
||||||
|
generally provide. ObjectIds generated with this method
|
||||||
|
should be used exclusively in queries.
|
||||||
|
|
||||||
|
`generation_time` will be converted to UTC. Naive datetime
|
||||||
|
instances will be treated as though they already contain UTC.
|
||||||
|
|
||||||
|
An example using this helper to get documents where ``"_id"``
|
||||||
|
was generated before January 1, 2010 would be:
|
||||||
|
|
||||||
|
>>> gen_time = datetime.datetime(2010, 1, 1)
|
||||||
|
>>> dummy_id = ObjectId.from_datetime(gen_time)
|
||||||
|
>>> result = collection.find({"_id": {"$lt": dummy_id}})
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `generation_time`: :class:`~datetime.datetime` to be used
|
||||||
|
as the generation time for the resulting ObjectId.
|
||||||
|
"""
|
||||||
|
if generation_time.utcoffset() is not None:
|
||||||
|
generation_time = generation_time - generation_time.utcoffset()
|
||||||
|
timestamp = calendar.timegm(generation_time.timetuple())
|
||||||
|
oid = struct.pack(">I", int(timestamp)) + b"\x00\x00\x00\x00\x00\x00\x00\x00"
|
||||||
|
return cls(oid)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def is_valid(cls, oid):
|
||||||
|
"""Checks if a `oid` string is valid or not.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `oid`: the object id to validate
|
||||||
|
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
"""
|
||||||
|
if not oid:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
ObjectId(oid)
|
||||||
|
return True
|
||||||
|
except (InvalidId, TypeError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _random(cls):
|
||||||
|
"""Generate a 5-byte random number once per process."""
|
||||||
|
pid = os.getpid()
|
||||||
|
if pid != cls._pid:
|
||||||
|
cls._pid = pid
|
||||||
|
cls.__random = _random_bytes()
|
||||||
|
return cls.__random
|
||||||
|
|
||||||
|
def __generate(self):
|
||||||
|
"""Generate a new value for this ObjectId."""
|
||||||
|
|
||||||
|
# 4 bytes current time
|
||||||
|
oid = struct.pack(">I", int(time.time()))
|
||||||
|
|
||||||
|
# 5 bytes random
|
||||||
|
oid += ObjectId._random()
|
||||||
|
|
||||||
|
# 3 bytes inc
|
||||||
|
with ObjectId._inc_lock:
|
||||||
|
oid += struct.pack(">I", ObjectId._inc)[1:4]
|
||||||
|
ObjectId._inc = (ObjectId._inc + 1) % (_MAX_COUNTER_VALUE + 1)
|
||||||
|
|
||||||
|
self.__id = oid
|
||||||
|
|
||||||
|
def __validate(self, oid):
|
||||||
|
"""Validate and use the given id for this ObjectId.
|
||||||
|
|
||||||
|
Raises TypeError if id is not an instance of
|
||||||
|
(:class:`basestring` (:class:`str` or :class:`bytes`
|
||||||
|
in python 3), ObjectId) and InvalidId if it is not a
|
||||||
|
valid ObjectId.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `oid`: a valid ObjectId
|
||||||
|
"""
|
||||||
|
if isinstance(oid, ObjectId):
|
||||||
|
self.__id = oid.binary
|
||||||
|
elif isinstance(oid, str):
|
||||||
|
if len(oid) == 24:
|
||||||
|
try:
|
||||||
|
self.__id = bytes.fromhex(oid)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
_raise_invalid_id(oid)
|
||||||
|
else:
|
||||||
|
_raise_invalid_id(oid)
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
"id must be an instance of (bytes, str, ObjectId), " "not %s" % (type(oid),)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def binary(self):
|
||||||
|
"""12-byte binary representation of this ObjectId."""
|
||||||
|
return self.__id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def generation_time(self):
|
||||||
|
"""A :class:`datetime.datetime` instance representing the time of
|
||||||
|
generation for this :class:`ObjectId`.
|
||||||
|
|
||||||
|
The :class:`datetime.datetime` is timezone aware, and
|
||||||
|
represents the generation time in UTC. It is precise to the
|
||||||
|
second.
|
||||||
|
"""
|
||||||
|
timestamp = struct.unpack(">I", self.__id[0:4])[0]
|
||||||
|
return datetime.datetime.fromtimestamp(timestamp, utc)
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
"""return value of object for pickling.
|
||||||
|
needed explicitly because __slots__() defined.
|
||||||
|
"""
|
||||||
|
return self.__id
|
||||||
|
|
||||||
|
def __setstate__(self, value):
|
||||||
|
"""explicit state set from pickling"""
|
||||||
|
# Provide backwards compatability with OIDs
|
||||||
|
# pickled with pymongo-1.9 or older.
|
||||||
|
if isinstance(value, dict):
|
||||||
|
oid = value["_ObjectId__id"]
|
||||||
|
else:
|
||||||
|
oid = value
|
||||||
|
# ObjectIds pickled in python 2.x used `str` for __id.
|
||||||
|
# In python 3.x this has to be converted to `bytes`
|
||||||
|
# by encoding latin-1.
|
||||||
|
if isinstance(oid, str):
|
||||||
|
self.__id = oid.encode("latin-1")
|
||||||
|
else:
|
||||||
|
self.__id = oid
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return binascii.hexlify(self.__id).decode()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "ObjectId('%s')" % (str(self),)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id == other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id != other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id < other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id <= other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id > other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id >= other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
"""Get a hash value for this :class:`ObjectId`."""
|
||||||
|
return hash(self.__id)
|
174
src/xtquant/xtbson/bson36/raw_bson.py
Normal file
174
src/xtquant/xtbson/bson36/raw_bson.py
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
# Copyright 2015-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for representing raw BSON documents.
|
||||||
|
|
||||||
|
Inserting and Retrieving RawBSONDocuments
|
||||||
|
=========================================
|
||||||
|
|
||||||
|
Example: Moving a document between different databases/collections
|
||||||
|
|
||||||
|
.. doctest::
|
||||||
|
|
||||||
|
>>> import bson
|
||||||
|
>>> from pymongo import MongoClient
|
||||||
|
>>> from .raw_bson import RawBSONDocument
|
||||||
|
>>> client = MongoClient(document_class=RawBSONDocument)
|
||||||
|
>>> client.drop_database('db')
|
||||||
|
>>> client.drop_database('replica_db')
|
||||||
|
>>> db = client.db
|
||||||
|
>>> result = db.test.insert_many([{'_id': 1, 'a': 1},
|
||||||
|
... {'_id': 2, 'b': 1},
|
||||||
|
... {'_id': 3, 'c': 1},
|
||||||
|
... {'_id': 4, 'd': 1}])
|
||||||
|
>>> replica_db = client.replica_db
|
||||||
|
>>> for doc in db.test.find():
|
||||||
|
... print(f"raw document: {doc.raw}")
|
||||||
|
... print(f"decoded document: {bson.decode(doc.raw)}")
|
||||||
|
... result = replica_db.test.insert_one(doc)
|
||||||
|
raw document: b'...'
|
||||||
|
decoded document: {'_id': 1, 'a': 1}
|
||||||
|
raw document: b'...'
|
||||||
|
decoded document: {'_id': 2, 'b': 1}
|
||||||
|
raw document: b'...'
|
||||||
|
decoded document: {'_id': 3, 'c': 1}
|
||||||
|
raw document: b'...'
|
||||||
|
decoded document: {'_id': 4, 'd': 1}
|
||||||
|
|
||||||
|
For use cases like moving documents across different databases or writing binary
|
||||||
|
blobs to disk, using raw BSON documents provides better speed and avoids the
|
||||||
|
overhead of decoding or encoding BSON.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Mapping as _Mapping
|
||||||
|
|
||||||
|
from . import _get_object_size, _raw_to_dict
|
||||||
|
from .codec_options import _RAW_BSON_DOCUMENT_MARKER
|
||||||
|
from .codec_options import DEFAULT_CODEC_OPTIONS as DEFAULT
|
||||||
|
from .son import SON
|
||||||
|
|
||||||
|
|
||||||
|
class RawBSONDocument(_Mapping):
|
||||||
|
"""Representation for a MongoDB document that provides access to the raw
|
||||||
|
BSON bytes that compose it.
|
||||||
|
|
||||||
|
Only when a field is accessed or modified within the document does
|
||||||
|
RawBSONDocument decode its bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ("__raw", "__inflated_doc", "__codec_options")
|
||||||
|
_type_marker = _RAW_BSON_DOCUMENT_MARKER
|
||||||
|
|
||||||
|
def __init__(self, bson_bytes, codec_options=None):
|
||||||
|
"""Create a new :class:`RawBSONDocument`
|
||||||
|
|
||||||
|
:class:`RawBSONDocument` is a representation of a BSON document that
|
||||||
|
provides access to the underlying raw BSON bytes. Only when a field is
|
||||||
|
accessed or modified within the document does RawBSONDocument decode
|
||||||
|
its bytes.
|
||||||
|
|
||||||
|
:class:`RawBSONDocument` implements the ``Mapping`` abstract base
|
||||||
|
class from the standard library so it can be used like a read-only
|
||||||
|
``dict``::
|
||||||
|
|
||||||
|
>>> from . import encode
|
||||||
|
>>> raw_doc = RawBSONDocument(encode({'_id': 'my_doc'}))
|
||||||
|
>>> raw_doc.raw
|
||||||
|
b'...'
|
||||||
|
>>> raw_doc['_id']
|
||||||
|
'my_doc'
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `bson_bytes`: the BSON bytes that compose this document
|
||||||
|
- `codec_options` (optional): An instance of
|
||||||
|
:class:`~bson.codec_options.CodecOptions` whose ``document_class``
|
||||||
|
must be :class:`RawBSONDocument`. The default is
|
||||||
|
:attr:`DEFAULT_RAW_BSON_OPTIONS`.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.8
|
||||||
|
:class:`RawBSONDocument` now validates that the ``bson_bytes``
|
||||||
|
passed in represent a single bson document.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.5
|
||||||
|
If a :class:`~bson.codec_options.CodecOptions` is passed in, its
|
||||||
|
`document_class` must be :class:`RawBSONDocument`.
|
||||||
|
"""
|
||||||
|
self.__raw = bson_bytes
|
||||||
|
self.__inflated_doc = None
|
||||||
|
# Can't default codec_options to DEFAULT_RAW_BSON_OPTIONS in signature,
|
||||||
|
# it refers to this class RawBSONDocument.
|
||||||
|
if codec_options is None:
|
||||||
|
codec_options = DEFAULT_RAW_BSON_OPTIONS
|
||||||
|
elif codec_options.document_class is not RawBSONDocument:
|
||||||
|
raise TypeError(
|
||||||
|
"RawBSONDocument cannot use CodecOptions with document "
|
||||||
|
"class %s" % (codec_options.document_class,)
|
||||||
|
)
|
||||||
|
self.__codec_options = codec_options
|
||||||
|
# Validate the bson object size.
|
||||||
|
_get_object_size(bson_bytes, 0, len(bson_bytes))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def raw(self):
|
||||||
|
"""The raw BSON bytes composing this document."""
|
||||||
|
return self.__raw
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
"""Lazily decode and iterate elements in this document."""
|
||||||
|
return self.__inflated.items()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def __inflated(self):
|
||||||
|
if self.__inflated_doc is None:
|
||||||
|
# We already validated the object's size when this document was
|
||||||
|
# created, so no need to do that again.
|
||||||
|
# Use SON to preserve ordering of elements.
|
||||||
|
self.__inflated_doc = _inflate_bson(self.__raw, self.__codec_options)
|
||||||
|
return self.__inflated_doc
|
||||||
|
|
||||||
|
def __getitem__(self, item):
|
||||||
|
return self.__inflated[item]
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.__inflated)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.__inflated)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, RawBSONDocument):
|
||||||
|
return self.__raw == other.raw
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "RawBSONDocument(%r, codec_options=%r)" % (self.raw, self.__codec_options)
|
||||||
|
|
||||||
|
|
||||||
|
def _inflate_bson(bson_bytes, codec_options):
|
||||||
|
"""Inflates the top level fields of a BSON document.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `bson_bytes`: the BSON bytes that compose this document
|
||||||
|
- `codec_options`: An instance of
|
||||||
|
:class:`~bson.codec_options.CodecOptions` whose ``document_class``
|
||||||
|
must be :class:`RawBSONDocument`.
|
||||||
|
"""
|
||||||
|
# Use SON to preserve ordering of elements.
|
||||||
|
return _raw_to_dict(bson_bytes, 4, len(bson_bytes) - 1, codec_options, SON())
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_RAW_BSON_OPTIONS = DEFAULT.with_options(document_class=RawBSONDocument)
|
||||||
|
"""The default :class:`~bson.codec_options.CodecOptions` for
|
||||||
|
:class:`RawBSONDocument`.
|
||||||
|
"""
|
131
src/xtquant/xtbson/bson36/regex.py
Normal file
131
src/xtquant/xtbson/bson36/regex.py
Normal file
@ -0,0 +1,131 @@
|
|||||||
|
# Copyright 2013-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for representing MongoDB regular expressions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
from ._helpers import _getstate_slots, _setstate_slots
|
||||||
|
from .son import RE_TYPE
|
||||||
|
|
||||||
|
|
||||||
|
def str_flags_to_int(str_flags):
|
||||||
|
flags = 0
|
||||||
|
if "i" in str_flags:
|
||||||
|
flags |= re.IGNORECASE
|
||||||
|
if "l" in str_flags:
|
||||||
|
flags |= re.LOCALE
|
||||||
|
if "m" in str_flags:
|
||||||
|
flags |= re.MULTILINE
|
||||||
|
if "s" in str_flags:
|
||||||
|
flags |= re.DOTALL
|
||||||
|
if "u" in str_flags:
|
||||||
|
flags |= re.UNICODE
|
||||||
|
if "x" in str_flags:
|
||||||
|
flags |= re.VERBOSE
|
||||||
|
|
||||||
|
return flags
|
||||||
|
|
||||||
|
|
||||||
|
class Regex(object):
|
||||||
|
"""BSON regular expression data."""
|
||||||
|
|
||||||
|
__slots__ = ("pattern", "flags")
|
||||||
|
|
||||||
|
__getstate__ = _getstate_slots
|
||||||
|
__setstate__ = _setstate_slots
|
||||||
|
|
||||||
|
_type_marker = 11
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_native(cls, regex):
|
||||||
|
"""Convert a Python regular expression into a ``Regex`` instance.
|
||||||
|
|
||||||
|
Note that in Python 3, a regular expression compiled from a
|
||||||
|
:class:`str` has the ``re.UNICODE`` flag set. If it is undesirable
|
||||||
|
to store this flag in a BSON regular expression, unset it first::
|
||||||
|
|
||||||
|
>>> pattern = re.compile('.*')
|
||||||
|
>>> regex = Regex.from_native(pattern)
|
||||||
|
>>> regex.flags ^= re.UNICODE
|
||||||
|
>>> db.collection.insert_one({'pattern': regex})
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `regex`: A regular expression object from ``re.compile()``.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
Python regular expressions use a different syntax and different
|
||||||
|
set of flags than MongoDB, which uses `PCRE`_. A regular
|
||||||
|
expression retrieved from the server may not compile in
|
||||||
|
Python, or may match a different set of strings in Python than
|
||||||
|
when used in a MongoDB query.
|
||||||
|
|
||||||
|
.. _PCRE: http://www.pcre.org/
|
||||||
|
"""
|
||||||
|
if not isinstance(regex, RE_TYPE):
|
||||||
|
raise TypeError("regex must be a compiled regular expression, not %s" % type(regex))
|
||||||
|
|
||||||
|
return Regex(regex.pattern, regex.flags)
|
||||||
|
|
||||||
|
def __init__(self, pattern, flags=0):
|
||||||
|
"""BSON regular expression data.
|
||||||
|
|
||||||
|
This class is useful to store and retrieve regular expressions that are
|
||||||
|
incompatible with Python's regular expression dialect.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `pattern`: string
|
||||||
|
- `flags`: (optional) an integer bitmask, or a string of flag
|
||||||
|
characters like "im" for IGNORECASE and MULTILINE
|
||||||
|
"""
|
||||||
|
if not isinstance(pattern, (str, bytes)):
|
||||||
|
raise TypeError("pattern must be a string, not %s" % type(pattern))
|
||||||
|
self.pattern = pattern
|
||||||
|
|
||||||
|
if isinstance(flags, str):
|
||||||
|
self.flags = str_flags_to_int(flags)
|
||||||
|
elif isinstance(flags, int):
|
||||||
|
self.flags = flags
|
||||||
|
else:
|
||||||
|
raise TypeError("flags must be a string or int, not %s" % type(flags))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, Regex):
|
||||||
|
return self.pattern == other.pattern and self.flags == other.flags
|
||||||
|
else:
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
__hash__ = None
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Regex(%r, %r)" % (self.pattern, self.flags)
|
||||||
|
|
||||||
|
def try_compile(self):
|
||||||
|
"""Compile this :class:`Regex` as a Python regular expression.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
Python regular expressions use a different syntax and different
|
||||||
|
set of flags than MongoDB, which uses `PCRE`_. A regular
|
||||||
|
expression retrieved from the server may not compile in
|
||||||
|
Python, or may match a different set of strings in Python than
|
||||||
|
when used in a MongoDB query. :meth:`try_compile()` may raise
|
||||||
|
:exc:`re.error`.
|
||||||
|
|
||||||
|
.. _PCRE: http://www.pcre.org/
|
||||||
|
"""
|
||||||
|
return re.compile(self.pattern, self.flags)
|
184
src/xtquant/xtbson/bson36/son.py
Normal file
184
src/xtquant/xtbson/bson36/son.py
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
# Copyright 2009-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for creating and manipulating SON, the Serialized Ocument Notation.
|
||||||
|
|
||||||
|
Regular dictionaries can be used instead of SON objects, but not when the order
|
||||||
|
of keys is important. A SON object can be used just like a normal Python
|
||||||
|
dictionary."""
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import re
|
||||||
|
from collections.abc import Mapping as _Mapping
|
||||||
|
|
||||||
|
# This sort of sucks, but seems to be as good as it gets...
|
||||||
|
# This is essentially the same as re._pattern_type
|
||||||
|
RE_TYPE = type(re.compile(""))
|
||||||
|
|
||||||
|
|
||||||
|
class SON(dict):
|
||||||
|
"""SON data.
|
||||||
|
|
||||||
|
A subclass of dict that maintains ordering of keys and provides a
|
||||||
|
few extra niceties for dealing with SON. SON provides an API
|
||||||
|
similar to collections.OrderedDict.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, data=None, **kwargs):
|
||||||
|
self.__keys = []
|
||||||
|
dict.__init__(self)
|
||||||
|
self.update(data)
|
||||||
|
self.update(kwargs)
|
||||||
|
|
||||||
|
def __new__(cls, *args, **kwargs):
|
||||||
|
instance = super(SON, cls).__new__(cls, *args, **kwargs)
|
||||||
|
instance.__keys = []
|
||||||
|
return instance
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
result = []
|
||||||
|
for key in self.__keys:
|
||||||
|
result.append("(%r, %r)" % (key, self[key]))
|
||||||
|
return "SON([%s])" % ", ".join(result)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
if key not in self.__keys:
|
||||||
|
self.__keys.append(key)
|
||||||
|
dict.__setitem__(self, key, value)
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
self.__keys.remove(key)
|
||||||
|
dict.__delitem__(self, key)
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
other = SON()
|
||||||
|
other.update(self)
|
||||||
|
return other
|
||||||
|
|
||||||
|
# TODO this is all from UserDict.DictMixin. it could probably be made more
|
||||||
|
# efficient.
|
||||||
|
# second level definitions support higher levels
|
||||||
|
def __iter__(self):
|
||||||
|
for k in self.__keys:
|
||||||
|
yield k
|
||||||
|
|
||||||
|
def has_key(self, key):
|
||||||
|
return key in self.__keys
|
||||||
|
|
||||||
|
def iterkeys(self):
|
||||||
|
return self.__iter__()
|
||||||
|
|
||||||
|
# fourth level uses definitions from lower levels
|
||||||
|
def itervalues(self):
|
||||||
|
for _, v in self.items():
|
||||||
|
yield v
|
||||||
|
|
||||||
|
def values(self):
|
||||||
|
return [v for _, v in self.items()]
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self.__keys = []
|
||||||
|
super(SON, self).clear()
|
||||||
|
|
||||||
|
def setdefault(self, key, default=None):
|
||||||
|
try:
|
||||||
|
return self[key]
|
||||||
|
except KeyError:
|
||||||
|
self[key] = default
|
||||||
|
return default
|
||||||
|
|
||||||
|
def pop(self, key, *args):
|
||||||
|
if len(args) > 1:
|
||||||
|
raise TypeError("pop expected at most 2 arguments, got " + repr(1 + len(args)))
|
||||||
|
try:
|
||||||
|
value = self[key]
|
||||||
|
except KeyError:
|
||||||
|
if args:
|
||||||
|
return args[0]
|
||||||
|
raise
|
||||||
|
del self[key]
|
||||||
|
return value
|
||||||
|
|
||||||
|
def popitem(self):
|
||||||
|
try:
|
||||||
|
k, v = next(iter(self.items()))
|
||||||
|
except StopIteration:
|
||||||
|
raise KeyError("container is empty")
|
||||||
|
del self[k]
|
||||||
|
return (k, v)
|
||||||
|
|
||||||
|
def update(self, other=None, **kwargs):
|
||||||
|
# Make progressively weaker assumptions about "other"
|
||||||
|
if other is None:
|
||||||
|
pass
|
||||||
|
elif hasattr(other, "items"):
|
||||||
|
for k, v in other.items():
|
||||||
|
self[k] = v
|
||||||
|
elif hasattr(other, "keys"):
|
||||||
|
for k in other.keys():
|
||||||
|
self[k] = other[k]
|
||||||
|
else:
|
||||||
|
for k, v in other:
|
||||||
|
self[k] = v
|
||||||
|
if kwargs:
|
||||||
|
self.update(kwargs)
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
try:
|
||||||
|
return self[key]
|
||||||
|
except KeyError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
"""Comparison to another SON is order-sensitive while comparison to a
|
||||||
|
regular dictionary is order-insensitive.
|
||||||
|
"""
|
||||||
|
if isinstance(other, SON):
|
||||||
|
return len(self) == len(other) and list(self.items()) == list(other.items())
|
||||||
|
return self.to_dict() == other
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.__keys)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
"""Convert a SON document to a normal Python dictionary instance.
|
||||||
|
|
||||||
|
This is trickier than just *dict(...)* because it needs to be
|
||||||
|
recursive.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def transform_value(value):
|
||||||
|
if isinstance(value, list):
|
||||||
|
return [transform_value(v) for v in value]
|
||||||
|
elif isinstance(value, _Mapping):
|
||||||
|
return dict([(k, transform_value(v)) for k, v in value.items()])
|
||||||
|
else:
|
||||||
|
return value
|
||||||
|
|
||||||
|
return transform_value(dict(self))
|
||||||
|
|
||||||
|
def __deepcopy__(self, memo):
|
||||||
|
out = SON()
|
||||||
|
val_id = id(self)
|
||||||
|
if val_id in memo:
|
||||||
|
return memo.get(val_id)
|
||||||
|
memo[val_id] = out
|
||||||
|
for k, v in self.items():
|
||||||
|
if not isinstance(v, RE_TYPE):
|
||||||
|
v = copy.deepcopy(v, memo)
|
||||||
|
out[k] = v
|
||||||
|
return out
|
122
src/xtquant/xtbson/bson36/timestamp.py
Normal file
122
src/xtquant/xtbson/bson36/timestamp.py
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
# Copyright 2010-2015 MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for representing MongoDB internal Timestamps.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import calendar
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from ._helpers import _getstate_slots, _setstate_slots
|
||||||
|
from .tz_util import utc
|
||||||
|
|
||||||
|
UPPERBOUND = 4294967296
|
||||||
|
|
||||||
|
|
||||||
|
class Timestamp(object):
|
||||||
|
"""MongoDB internal timestamps used in the opLog."""
|
||||||
|
|
||||||
|
__slots__ = ("__time", "__inc")
|
||||||
|
|
||||||
|
__getstate__ = _getstate_slots
|
||||||
|
__setstate__ = _setstate_slots
|
||||||
|
|
||||||
|
_type_marker = 17
|
||||||
|
|
||||||
|
def __init__(self, time, inc):
|
||||||
|
"""Create a new :class:`Timestamp`.
|
||||||
|
|
||||||
|
This class is only for use with the MongoDB opLog. If you need
|
||||||
|
to store a regular timestamp, please use a
|
||||||
|
:class:`~datetime.datetime`.
|
||||||
|
|
||||||
|
Raises :class:`TypeError` if `time` is not an instance of
|
||||||
|
:class: `int` or :class:`~datetime.datetime`, or `inc` is not
|
||||||
|
an instance of :class:`int`. Raises :class:`ValueError` if
|
||||||
|
`time` or `inc` is not in [0, 2**32).
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `time`: time in seconds since epoch UTC, or a naive UTC
|
||||||
|
:class:`~datetime.datetime`, or an aware
|
||||||
|
:class:`~datetime.datetime`
|
||||||
|
- `inc`: the incrementing counter
|
||||||
|
"""
|
||||||
|
if isinstance(time, datetime.datetime):
|
||||||
|
if time.utcoffset() is not None:
|
||||||
|
time = time - time.utcoffset()
|
||||||
|
time = int(calendar.timegm(time.timetuple()))
|
||||||
|
if not isinstance(time, int):
|
||||||
|
raise TypeError("time must be an instance of int")
|
||||||
|
if not isinstance(inc, int):
|
||||||
|
raise TypeError("inc must be an instance of int")
|
||||||
|
if not 0 <= time < UPPERBOUND:
|
||||||
|
raise ValueError("time must be contained in [0, 2**32)")
|
||||||
|
if not 0 <= inc < UPPERBOUND:
|
||||||
|
raise ValueError("inc must be contained in [0, 2**32)")
|
||||||
|
|
||||||
|
self.__time = time
|
||||||
|
self.__inc = inc
|
||||||
|
|
||||||
|
@property
|
||||||
|
def time(self):
|
||||||
|
"""Get the time portion of this :class:`Timestamp`."""
|
||||||
|
return self.__time
|
||||||
|
|
||||||
|
@property
|
||||||
|
def inc(self):
|
||||||
|
"""Get the inc portion of this :class:`Timestamp`."""
|
||||||
|
return self.__inc
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, Timestamp):
|
||||||
|
return self.__time == other.time and self.__inc == other.inc
|
||||||
|
else:
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self.time) ^ hash(self.inc)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
if isinstance(other, Timestamp):
|
||||||
|
return (self.time, self.inc) < (other.time, other.inc)
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
if isinstance(other, Timestamp):
|
||||||
|
return (self.time, self.inc) <= (other.time, other.inc)
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
if isinstance(other, Timestamp):
|
||||||
|
return (self.time, self.inc) > (other.time, other.inc)
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
if isinstance(other, Timestamp):
|
||||||
|
return (self.time, self.inc) >= (other.time, other.inc)
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Timestamp(%s, %s)" % (self.__time, self.__inc)
|
||||||
|
|
||||||
|
def as_datetime(self):
|
||||||
|
"""Return a :class:`~datetime.datetime` instance corresponding
|
||||||
|
to the time portion of this :class:`Timestamp`.
|
||||||
|
|
||||||
|
The returned datetime's timezone is UTC.
|
||||||
|
"""
|
||||||
|
return datetime.datetime.fromtimestamp(self.__time, utc)
|
51
src/xtquant/xtbson/bson36/tz_util.py
Normal file
51
src/xtquant/xtbson/bson36/tz_util.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
# Copyright 2010-2015 MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Timezone related utilities for BSON."""
|
||||||
|
|
||||||
|
from datetime import timedelta, tzinfo
|
||||||
|
|
||||||
|
ZERO = timedelta(0)
|
||||||
|
|
||||||
|
|
||||||
|
class FixedOffset(tzinfo):
|
||||||
|
"""Fixed offset timezone, in minutes east from UTC.
|
||||||
|
|
||||||
|
Implementation based from the Python `standard library documentation
|
||||||
|
<http://docs.python.org/library/datetime.html#tzinfo-objects>`_.
|
||||||
|
Defining __getinitargs__ enables pickling / copying.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, offset, name):
|
||||||
|
if isinstance(offset, timedelta):
|
||||||
|
self.__offset = offset
|
||||||
|
else:
|
||||||
|
self.__offset = timedelta(minutes=offset)
|
||||||
|
self.__name = name
|
||||||
|
|
||||||
|
def __getinitargs__(self):
|
||||||
|
return self.__offset, self.__name
|
||||||
|
|
||||||
|
def utcoffset(self, dt):
|
||||||
|
return self.__offset
|
||||||
|
|
||||||
|
def tzname(self, dt):
|
||||||
|
return self.__name
|
||||||
|
|
||||||
|
def dst(self, dt):
|
||||||
|
return ZERO
|
||||||
|
|
||||||
|
|
||||||
|
utc = FixedOffset(0, "UTC")
|
||||||
|
"""Fixed offset timezone representing UTC."""
|
1404
src/xtquant/xtbson/bson37/__init__.py
Normal file
1404
src/xtquant/xtbson/bson37/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
41
src/xtquant/xtbson/bson37/_helpers.py
Normal file
41
src/xtquant/xtbson/bson37/_helpers.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
# Copyright 2021-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Setstate and getstate functions for objects with __slots__, allowing
|
||||||
|
compatibility with default pickling protocol
|
||||||
|
"""
|
||||||
|
from typing import Any, Mapping
|
||||||
|
|
||||||
|
|
||||||
|
def _setstate_slots(self: Any, state: Any) -> None:
|
||||||
|
for slot, value in state.items():
|
||||||
|
setattr(self, slot, value)
|
||||||
|
|
||||||
|
|
||||||
|
def _mangle_name(name: str, prefix: str) -> str:
|
||||||
|
if name.startswith("__"):
|
||||||
|
prefix = "_" + prefix
|
||||||
|
else:
|
||||||
|
prefix = ""
|
||||||
|
return prefix + name
|
||||||
|
|
||||||
|
|
||||||
|
def _getstate_slots(self: Any) -> Mapping[Any, Any]:
|
||||||
|
prefix = self.__class__.__name__
|
||||||
|
ret = dict()
|
||||||
|
for name in self.__slots__:
|
||||||
|
mangled_name = _mangle_name(name, prefix)
|
||||||
|
if hasattr(self, mangled_name):
|
||||||
|
ret[mangled_name] = getattr(self, mangled_name)
|
||||||
|
return ret
|
364
src/xtquant/xtbson/bson37/binary.py
Normal file
364
src/xtquant/xtbson/bson37/binary.py
Normal file
@ -0,0 +1,364 @@
|
|||||||
|
# Copyright 2009-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any, Tuple, Type, Union
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
"""Tools for representing BSON binary data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
BINARY_SUBTYPE = 0
|
||||||
|
"""BSON binary subtype for binary data.
|
||||||
|
|
||||||
|
This is the default subtype for binary data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
FUNCTION_SUBTYPE = 1
|
||||||
|
"""BSON binary subtype for functions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
OLD_BINARY_SUBTYPE = 2
|
||||||
|
"""Old BSON binary subtype for binary data.
|
||||||
|
|
||||||
|
This is the old default subtype, the current
|
||||||
|
default is :data:`BINARY_SUBTYPE`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
OLD_UUID_SUBTYPE = 3
|
||||||
|
"""Old BSON binary subtype for a UUID.
|
||||||
|
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded
|
||||||
|
by :mod:`bson` using this subtype when using
|
||||||
|
:data:`UuidRepresentation.PYTHON_LEGACY`,
|
||||||
|
:data:`UuidRepresentation.JAVA_LEGACY`, or
|
||||||
|
:data:`UuidRepresentation.CSHARP_LEGACY`.
|
||||||
|
|
||||||
|
.. versionadded:: 2.1
|
||||||
|
"""
|
||||||
|
|
||||||
|
UUID_SUBTYPE = 4
|
||||||
|
"""BSON binary subtype for a UUID.
|
||||||
|
|
||||||
|
This is the standard BSON binary subtype for UUIDs.
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded
|
||||||
|
by :mod:`bson` using this subtype when using
|
||||||
|
:data:`UuidRepresentation.STANDARD`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from array import array as _array
|
||||||
|
from mmap import mmap as _mmap
|
||||||
|
|
||||||
|
|
||||||
|
class UuidRepresentation:
|
||||||
|
UNSPECIFIED = 0
|
||||||
|
"""An unspecified UUID representation.
|
||||||
|
|
||||||
|
When configured, :class:`uuid.UUID` instances will **not** be
|
||||||
|
automatically encoded to or decoded from :class:`~bson.binary.Binary`.
|
||||||
|
When encoding a :class:`uuid.UUID` instance, an error will be raised.
|
||||||
|
To encode a :class:`uuid.UUID` instance with this configuration, it must
|
||||||
|
be wrapped in the :class:`~bson.binary.Binary` class by the application
|
||||||
|
code. When decoding a BSON binary field with a UUID subtype, a
|
||||||
|
:class:`~bson.binary.Binary` instance will be returned instead of a
|
||||||
|
:class:`uuid.UUID` instance.
|
||||||
|
|
||||||
|
See :ref:`unspecified-representation-details` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
|
||||||
|
STANDARD = UUID_SUBTYPE
|
||||||
|
"""The standard UUID representation.
|
||||||
|
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded to
|
||||||
|
and decoded from . binary, using RFC-4122 byte order with
|
||||||
|
binary subtype :data:`UUID_SUBTYPE`.
|
||||||
|
|
||||||
|
See :ref:`standard-representation-details` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
|
||||||
|
PYTHON_LEGACY = OLD_UUID_SUBTYPE
|
||||||
|
"""The Python legacy UUID representation.
|
||||||
|
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded to
|
||||||
|
and decoded from . binary, using RFC-4122 byte order with
|
||||||
|
binary subtype :data:`OLD_UUID_SUBTYPE`.
|
||||||
|
|
||||||
|
See :ref:`python-legacy-representation-details` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
|
||||||
|
JAVA_LEGACY = 5
|
||||||
|
"""The Java legacy UUID representation.
|
||||||
|
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded to
|
||||||
|
and decoded from . binary subtype :data:`OLD_UUID_SUBTYPE`,
|
||||||
|
using the Java driver's legacy byte order.
|
||||||
|
|
||||||
|
See :ref:`java-legacy-representation-details` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
|
||||||
|
CSHARP_LEGACY = 6
|
||||||
|
"""The C#/.net legacy UUID representation.
|
||||||
|
|
||||||
|
:class:`uuid.UUID` instances will automatically be encoded to
|
||||||
|
and decoded from . binary subtype :data:`OLD_UUID_SUBTYPE`,
|
||||||
|
using the C# driver's legacy byte order.
|
||||||
|
|
||||||
|
See :ref:`csharp-legacy-representation-details` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
STANDARD = UuidRepresentation.STANDARD
|
||||||
|
"""An alias for :data:`UuidRepresentation.STANDARD`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
PYTHON_LEGACY = UuidRepresentation.PYTHON_LEGACY
|
||||||
|
"""An alias for :data:`UuidRepresentation.PYTHON_LEGACY`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
JAVA_LEGACY = UuidRepresentation.JAVA_LEGACY
|
||||||
|
"""An alias for :data:`UuidRepresentation.JAVA_LEGACY`.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.6
|
||||||
|
BSON binary subtype 4 is decoded using RFC-4122 byte order.
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
CSHARP_LEGACY = UuidRepresentation.CSHARP_LEGACY
|
||||||
|
"""An alias for :data:`UuidRepresentation.CSHARP_LEGACY`.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.6
|
||||||
|
BSON binary subtype 4 is decoded using RFC-4122 byte order.
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALL_UUID_SUBTYPES = (OLD_UUID_SUBTYPE, UUID_SUBTYPE)
|
||||||
|
ALL_UUID_REPRESENTATIONS = (
|
||||||
|
UuidRepresentation.UNSPECIFIED,
|
||||||
|
UuidRepresentation.STANDARD,
|
||||||
|
UuidRepresentation.PYTHON_LEGACY,
|
||||||
|
UuidRepresentation.JAVA_LEGACY,
|
||||||
|
UuidRepresentation.CSHARP_LEGACY,
|
||||||
|
)
|
||||||
|
UUID_REPRESENTATION_NAMES = {
|
||||||
|
UuidRepresentation.UNSPECIFIED: "UuidRepresentation.UNSPECIFIED",
|
||||||
|
UuidRepresentation.STANDARD: "UuidRepresentation.STANDARD",
|
||||||
|
UuidRepresentation.PYTHON_LEGACY: "UuidRepresentation.PYTHON_LEGACY",
|
||||||
|
UuidRepresentation.JAVA_LEGACY: "UuidRepresentation.JAVA_LEGACY",
|
||||||
|
UuidRepresentation.CSHARP_LEGACY: "UuidRepresentation.CSHARP_LEGACY",
|
||||||
|
}
|
||||||
|
|
||||||
|
MD5_SUBTYPE = 5
|
||||||
|
"""BSON binary subtype for an MD5 hash.
|
||||||
|
"""
|
||||||
|
|
||||||
|
COLUMN_SUBTYPE = 7
|
||||||
|
"""BSON binary subtype for columns.
|
||||||
|
|
||||||
|
.. versionadded:: 4.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
USER_DEFINED_SUBTYPE = 128
|
||||||
|
"""BSON binary subtype for any user defined structure.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Binary(bytes):
|
||||||
|
"""Representation of BSON binary data.
|
||||||
|
|
||||||
|
This is necessary because we want to represent Python strings as
|
||||||
|
the BSON string type. We need to wrap binary data so we can tell
|
||||||
|
the difference between what should be considered binary data and
|
||||||
|
what should be considered a string when we encode to BSON.
|
||||||
|
|
||||||
|
Raises TypeError if `data` is not an instance of :class:`bytes`
|
||||||
|
(:class:`str` in python 2) or `subtype` is not an instance of
|
||||||
|
:class:`int`. Raises ValueError if `subtype` is not in [0, 256).
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
In python 3 instances of Binary with subtype 0 will be decoded
|
||||||
|
directly to :class:`bytes`.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `data`: the binary data to represent. Can be any bytes-like type
|
||||||
|
that implements the buffer protocol.
|
||||||
|
- `subtype` (optional): the `binary subtype
|
||||||
|
<https://bsonspec.org/spec.html>`_
|
||||||
|
to use
|
||||||
|
|
||||||
|
.. versionchanged:: 3.9
|
||||||
|
Support any bytes-like type that implements the buffer protocol.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_type_marker = 5
|
||||||
|
__subtype: int
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls: Type["Binary"],
|
||||||
|
data: Union[memoryview, bytes, "_mmap", "_array"],
|
||||||
|
subtype: int = BINARY_SUBTYPE,
|
||||||
|
) -> "Binary":
|
||||||
|
if not isinstance(subtype, int):
|
||||||
|
raise TypeError("subtype must be an instance of int")
|
||||||
|
if subtype >= 256 or subtype < 0:
|
||||||
|
raise ValueError("subtype must be contained in [0, 256)")
|
||||||
|
# Support any type that implements the buffer protocol.
|
||||||
|
self = bytes.__new__(cls, memoryview(data).tobytes())
|
||||||
|
self.__subtype = subtype
|
||||||
|
return self
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_uuid(
|
||||||
|
cls: Type["Binary"], uuid: UUID, uuid_representation: int = UuidRepresentation.STANDARD
|
||||||
|
) -> "Binary":
|
||||||
|
"""Create a BSON Binary object from a Python UUID.
|
||||||
|
|
||||||
|
Creates a :class:`~bson.binary.Binary` object from a
|
||||||
|
:class:`uuid.UUID` instance. Assumes that the native
|
||||||
|
:class:`uuid.UUID` instance uses the byte-order implied by the
|
||||||
|
provided ``uuid_representation``.
|
||||||
|
|
||||||
|
Raises :exc:`TypeError` if `uuid` is not an instance of
|
||||||
|
:class:`~uuid.UUID`.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `uuid`: A :class:`uuid.UUID` instance.
|
||||||
|
- `uuid_representation`: A member of
|
||||||
|
:class:`~bson.binary.UuidRepresentation`. Default:
|
||||||
|
:const:`~bson.binary.UuidRepresentation.STANDARD`.
|
||||||
|
See :ref:`handling-uuid-data-example` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
if not isinstance(uuid, UUID):
|
||||||
|
raise TypeError("uuid must be an instance of uuid.UUID")
|
||||||
|
|
||||||
|
if uuid_representation not in ALL_UUID_REPRESENTATIONS:
|
||||||
|
raise ValueError(
|
||||||
|
"uuid_representation must be a value from .binary.UuidRepresentation"
|
||||||
|
)
|
||||||
|
|
||||||
|
if uuid_representation == UuidRepresentation.UNSPECIFIED:
|
||||||
|
raise ValueError(
|
||||||
|
"cannot encode native uuid.UUID with "
|
||||||
|
"UuidRepresentation.UNSPECIFIED. UUIDs can be manually "
|
||||||
|
"converted to bson.Binary instances using "
|
||||||
|
"bson.Binary.from_uuid() or a different UuidRepresentation "
|
||||||
|
"can be configured. See the documentation for "
|
||||||
|
"UuidRepresentation for more information."
|
||||||
|
)
|
||||||
|
|
||||||
|
subtype = OLD_UUID_SUBTYPE
|
||||||
|
if uuid_representation == UuidRepresentation.PYTHON_LEGACY:
|
||||||
|
payload = uuid.bytes
|
||||||
|
elif uuid_representation == UuidRepresentation.JAVA_LEGACY:
|
||||||
|
from_uuid = uuid.bytes
|
||||||
|
payload = from_uuid[0:8][::-1] + from_uuid[8:16][::-1]
|
||||||
|
elif uuid_representation == UuidRepresentation.CSHARP_LEGACY:
|
||||||
|
payload = uuid.bytes_le
|
||||||
|
else:
|
||||||
|
# uuid_representation == UuidRepresentation.STANDARD
|
||||||
|
subtype = UUID_SUBTYPE
|
||||||
|
payload = uuid.bytes
|
||||||
|
|
||||||
|
return cls(payload, subtype)
|
||||||
|
|
||||||
|
def as_uuid(self, uuid_representation: int = UuidRepresentation.STANDARD) -> UUID:
|
||||||
|
"""Create a Python UUID from this BSON Binary object.
|
||||||
|
|
||||||
|
Decodes this binary object as a native :class:`uuid.UUID` instance
|
||||||
|
with the provided ``uuid_representation``.
|
||||||
|
|
||||||
|
Raises :exc:`ValueError` if this :class:`~bson.binary.Binary` instance
|
||||||
|
does not contain a UUID.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `uuid_representation`: A member of
|
||||||
|
:class:`~bson.binary.UuidRepresentation`. Default:
|
||||||
|
:const:`~bson.binary.UuidRepresentation.STANDARD`.
|
||||||
|
See :ref:`handling-uuid-data-example` for details.
|
||||||
|
|
||||||
|
.. versionadded:: 3.11
|
||||||
|
"""
|
||||||
|
if self.subtype not in ALL_UUID_SUBTYPES:
|
||||||
|
raise ValueError("cannot decode subtype %s as a uuid" % (self.subtype,))
|
||||||
|
|
||||||
|
if uuid_representation not in ALL_UUID_REPRESENTATIONS:
|
||||||
|
raise ValueError(
|
||||||
|
"uuid_representation must be a value from .binary.UuidRepresentation"
|
||||||
|
)
|
||||||
|
|
||||||
|
if uuid_representation == UuidRepresentation.UNSPECIFIED:
|
||||||
|
raise ValueError("uuid_representation cannot be UNSPECIFIED")
|
||||||
|
elif uuid_representation == UuidRepresentation.PYTHON_LEGACY:
|
||||||
|
if self.subtype == OLD_UUID_SUBTYPE:
|
||||||
|
return UUID(bytes=self)
|
||||||
|
elif uuid_representation == UuidRepresentation.JAVA_LEGACY:
|
||||||
|
if self.subtype == OLD_UUID_SUBTYPE:
|
||||||
|
return UUID(bytes=self[0:8][::-1] + self[8:16][::-1])
|
||||||
|
elif uuid_representation == UuidRepresentation.CSHARP_LEGACY:
|
||||||
|
if self.subtype == OLD_UUID_SUBTYPE:
|
||||||
|
return UUID(bytes_le=self)
|
||||||
|
else:
|
||||||
|
# uuid_representation == UuidRepresentation.STANDARD
|
||||||
|
if self.subtype == UUID_SUBTYPE:
|
||||||
|
return UUID(bytes=self)
|
||||||
|
|
||||||
|
raise ValueError(
|
||||||
|
"cannot decode subtype %s to %s"
|
||||||
|
% (self.subtype, UUID_REPRESENTATION_NAMES[uuid_representation])
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def subtype(self) -> int:
|
||||||
|
"""Subtype of this binary data."""
|
||||||
|
return self.__subtype
|
||||||
|
|
||||||
|
def __getnewargs__(self) -> Tuple[bytes, int]: # type: ignore[override]
|
||||||
|
# Work around http://bugs.python.org/issue7382
|
||||||
|
data = super(Binary, self).__getnewargs__()[0]
|
||||||
|
if not isinstance(data, bytes):
|
||||||
|
data = data.encode("latin-1")
|
||||||
|
return data, self.__subtype
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, Binary):
|
||||||
|
return (self.__subtype, bytes(self)) == (other.subtype, bytes(other))
|
||||||
|
# We don't return NotImplemented here because if we did then
|
||||||
|
# Binary("foo") == "foo" would return True, since Binary is a
|
||||||
|
# subclass of str...
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return super(Binary, self).__hash__() ^ hash(self.__subtype)
|
||||||
|
|
||||||
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Binary(%s, %s)" % (bytes.__repr__(self), self.__subtype)
|
101
src/xtquant/xtbson/bson37/code.py
Normal file
101
src/xtquant/xtbson/bson37/code.py
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
# Copyright 2009-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for representing JavaScript code in BSON.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Mapping as _Mapping
|
||||||
|
from typing import Any, Mapping, Optional, Type, Union
|
||||||
|
|
||||||
|
|
||||||
|
class Code(str):
|
||||||
|
"""BSON's JavaScript code type.
|
||||||
|
|
||||||
|
Raises :class:`TypeError` if `code` is not an instance of
|
||||||
|
:class:`basestring` (:class:`str` in python 3) or `scope`
|
||||||
|
is not ``None`` or an instance of :class:`dict`.
|
||||||
|
|
||||||
|
Scope variables can be set by passing a dictionary as the `scope`
|
||||||
|
argument or by using keyword arguments. If a variable is set as a
|
||||||
|
keyword argument it will override any setting for that variable in
|
||||||
|
the `scope` dictionary.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `code`: A string containing JavaScript code to be evaluated or another
|
||||||
|
instance of Code. In the latter case, the scope of `code` becomes this
|
||||||
|
Code's :attr:`scope`.
|
||||||
|
- `scope` (optional): dictionary representing the scope in which
|
||||||
|
`code` should be evaluated - a mapping from identifiers (as
|
||||||
|
strings) to values. Defaults to ``None``. This is applied after any
|
||||||
|
scope associated with a given `code` above.
|
||||||
|
- `**kwargs` (optional): scope variables can also be passed as
|
||||||
|
keyword arguments. These are applied after `scope` and `code`.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.4
|
||||||
|
The default value for :attr:`scope` is ``None`` instead of ``{}``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
_type_marker = 13
|
||||||
|
__scope: Union[Mapping[str, Any], None]
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls: Type["Code"],
|
||||||
|
code: Union[str, "Code"],
|
||||||
|
scope: Optional[Mapping[str, Any]] = None,
|
||||||
|
**kwargs: Any
|
||||||
|
) -> "Code":
|
||||||
|
if not isinstance(code, str):
|
||||||
|
raise TypeError("code must be an instance of str")
|
||||||
|
|
||||||
|
self = str.__new__(cls, code)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.__scope = code.scope # type: ignore
|
||||||
|
except AttributeError:
|
||||||
|
self.__scope = None
|
||||||
|
|
||||||
|
if scope is not None:
|
||||||
|
if not isinstance(scope, _Mapping):
|
||||||
|
raise TypeError("scope must be an instance of dict")
|
||||||
|
if self.__scope is not None:
|
||||||
|
self.__scope.update(scope) # type: ignore
|
||||||
|
else:
|
||||||
|
self.__scope = scope
|
||||||
|
|
||||||
|
if kwargs:
|
||||||
|
if self.__scope is not None:
|
||||||
|
self.__scope.update(kwargs) # type: ignore
|
||||||
|
else:
|
||||||
|
self.__scope = kwargs
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def scope(self) -> Optional[Mapping[str, Any]]:
|
||||||
|
"""Scope dictionary for this instance or ``None``."""
|
||||||
|
return self.__scope
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Code(%s, %r)" % (str.__repr__(self), self.__scope)
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, Code):
|
||||||
|
return (self.__scope, str(self)) == (other.__scope, str(other))
|
||||||
|
return False
|
||||||
|
|
||||||
|
__hash__: Any = None
|
||||||
|
|
||||||
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
return not self == other
|
462
src/xtquant/xtbson/bson37/codec_options.py
Normal file
462
src/xtquant/xtbson/bson37/codec_options.py
Normal file
@ -0,0 +1,462 @@
|
|||||||
|
# Copyright 2014-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for specifying BSON codec options."""
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import datetime
|
||||||
|
import enum
|
||||||
|
from collections.abc import MutableMapping as _MutableMapping
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Iterable,
|
||||||
|
Mapping,
|
||||||
|
NamedTuple,
|
||||||
|
Optional,
|
||||||
|
Type,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
cast,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .binary import (
|
||||||
|
ALL_UUID_REPRESENTATIONS,
|
||||||
|
UUID_REPRESENTATION_NAMES,
|
||||||
|
UuidRepresentation,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _abstractproperty(func: Callable[..., Any]) -> property:
|
||||||
|
return property(abc.abstractmethod(func))
|
||||||
|
|
||||||
|
|
||||||
|
_RAW_BSON_DOCUMENT_MARKER = 101
|
||||||
|
|
||||||
|
|
||||||
|
def _raw_document_class(document_class: Any) -> bool:
|
||||||
|
"""Determine if a document_class is a RawBSONDocument class."""
|
||||||
|
marker = getattr(document_class, "_type_marker", None)
|
||||||
|
return marker == _RAW_BSON_DOCUMENT_MARKER
|
||||||
|
|
||||||
|
|
||||||
|
class TypeEncoder(abc.ABC):
|
||||||
|
"""Base class for defining type codec classes which describe how a
|
||||||
|
custom type can be transformed to one of the types BSON understands.
|
||||||
|
|
||||||
|
Codec classes must implement the ``python_type`` attribute, and the
|
||||||
|
``transform_python`` method to support encoding.
|
||||||
|
|
||||||
|
See :ref:`custom-type-type-codec` documentation for an example.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@_abstractproperty
|
||||||
|
def python_type(self) -> Any:
|
||||||
|
"""The Python type to be converted into something serializable."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def transform_python(self, value: Any) -> Any:
|
||||||
|
"""Convert the given Python object into something serializable."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TypeDecoder(abc.ABC):
|
||||||
|
"""Base class for defining type codec classes which describe how a
|
||||||
|
BSON type can be transformed to a custom type.
|
||||||
|
|
||||||
|
Codec classes must implement the ``bson_type`` attribute, and the
|
||||||
|
``transform_bson`` method to support decoding.
|
||||||
|
|
||||||
|
See :ref:`custom-type-type-codec` documentation for an example.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@_abstractproperty
|
||||||
|
def bson_type(self) -> Any:
|
||||||
|
"""The BSON type to be converted into our own type."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def transform_bson(self, value: Any) -> Any:
|
||||||
|
"""Convert the given BSON value into our own type."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TypeCodec(TypeEncoder, TypeDecoder):
|
||||||
|
"""Base class for defining type codec classes which describe how a
|
||||||
|
custom type can be transformed to/from one of the types :mod:`bson`
|
||||||
|
can already encode/decode.
|
||||||
|
|
||||||
|
Codec classes must implement the ``python_type`` attribute, and the
|
||||||
|
``transform_python`` method to support encoding, as well as the
|
||||||
|
``bson_type`` attribute, and the ``transform_bson`` method to support
|
||||||
|
decoding.
|
||||||
|
|
||||||
|
See :ref:`custom-type-type-codec` documentation for an example.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
_Codec = Union[TypeEncoder, TypeDecoder, TypeCodec]
|
||||||
|
_Fallback = Callable[[Any], Any]
|
||||||
|
_DocumentType = TypeVar("_DocumentType", bound=Mapping[str, Any])
|
||||||
|
|
||||||
|
|
||||||
|
class TypeRegistry(object):
|
||||||
|
"""Encapsulates type codecs used in encoding and / or decoding BSON, as
|
||||||
|
well as the fallback encoder. Type registries cannot be modified after
|
||||||
|
instantiation.
|
||||||
|
|
||||||
|
``TypeRegistry`` can be initialized with an iterable of type codecs, and
|
||||||
|
a callable for the fallback encoder::
|
||||||
|
|
||||||
|
>>> from .codec_options import TypeRegistry
|
||||||
|
>>> type_registry = TypeRegistry([Codec1, Codec2, Codec3, ...],
|
||||||
|
... fallback_encoder)
|
||||||
|
|
||||||
|
See :ref:`custom-type-type-registry` documentation for an example.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `type_codecs` (optional): iterable of type codec instances. If
|
||||||
|
``type_codecs`` contains multiple codecs that transform a single
|
||||||
|
python or BSON type, the transformation specified by the type codec
|
||||||
|
occurring last prevails. A TypeError will be raised if one or more
|
||||||
|
type codecs modify the encoding behavior of a built-in :mod:`bson`
|
||||||
|
type.
|
||||||
|
- `fallback_encoder` (optional): callable that accepts a single,
|
||||||
|
unencodable python value and transforms it into a type that
|
||||||
|
:mod:`bson` can encode. See :ref:`fallback-encoder-callable`
|
||||||
|
documentation for an example.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
type_codecs: Optional[Iterable[_Codec]] = None,
|
||||||
|
fallback_encoder: Optional[_Fallback] = None,
|
||||||
|
) -> None:
|
||||||
|
self.__type_codecs = list(type_codecs or [])
|
||||||
|
self._fallback_encoder = fallback_encoder
|
||||||
|
self._encoder_map: Dict[Any, Any] = {}
|
||||||
|
self._decoder_map: Dict[Any, Any] = {}
|
||||||
|
|
||||||
|
if self._fallback_encoder is not None:
|
||||||
|
if not callable(fallback_encoder):
|
||||||
|
raise TypeError("fallback_encoder %r is not a callable" % (fallback_encoder))
|
||||||
|
|
||||||
|
for codec in self.__type_codecs:
|
||||||
|
is_valid_codec = False
|
||||||
|
if isinstance(codec, TypeEncoder):
|
||||||
|
self._validate_type_encoder(codec)
|
||||||
|
is_valid_codec = True
|
||||||
|
self._encoder_map[codec.python_type] = codec.transform_python
|
||||||
|
if isinstance(codec, TypeDecoder):
|
||||||
|
is_valid_codec = True
|
||||||
|
self._decoder_map[codec.bson_type] = codec.transform_bson
|
||||||
|
if not is_valid_codec:
|
||||||
|
raise TypeError(
|
||||||
|
"Expected an instance of %s, %s, or %s, got %r instead"
|
||||||
|
% (TypeEncoder.__name__, TypeDecoder.__name__, TypeCodec.__name__, codec)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _validate_type_encoder(self, codec: _Codec) -> None:
|
||||||
|
from . import _BUILT_IN_TYPES
|
||||||
|
|
||||||
|
for pytype in _BUILT_IN_TYPES:
|
||||||
|
if issubclass(cast(TypeCodec, codec).python_type, pytype):
|
||||||
|
err_msg = (
|
||||||
|
"TypeEncoders cannot change how built-in types are "
|
||||||
|
"encoded (encoder %s transforms type %s)" % (codec, pytype)
|
||||||
|
)
|
||||||
|
raise TypeError(err_msg)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "%s(type_codecs=%r, fallback_encoder=%r)" % (
|
||||||
|
self.__class__.__name__,
|
||||||
|
self.__type_codecs,
|
||||||
|
self._fallback_encoder,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> Any:
|
||||||
|
if not isinstance(other, type(self)):
|
||||||
|
return NotImplemented
|
||||||
|
return (
|
||||||
|
(self._decoder_map == other._decoder_map)
|
||||||
|
and (self._encoder_map == other._encoder_map)
|
||||||
|
and (self._fallback_encoder == other._fallback_encoder)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DatetimeConversion(int, enum.Enum):
|
||||||
|
"""Options for decoding BSON datetimes."""
|
||||||
|
|
||||||
|
DATETIME = 1
|
||||||
|
"""Decode a BSON UTC datetime as a :class:`datetime.datetime`.
|
||||||
|
|
||||||
|
BSON UTC datetimes that cannot be represented as a
|
||||||
|
:class:`~datetime.datetime` will raise an :class:`OverflowError`
|
||||||
|
or a :class:`ValueError`.
|
||||||
|
|
||||||
|
.. versionadded 4.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
DATETIME_CLAMP = 2
|
||||||
|
"""Decode a BSON UTC datetime as a :class:`datetime.datetime`, clamping
|
||||||
|
to :attr:`~datetime.datetime.min` and :attr:`~datetime.datetime.max`.
|
||||||
|
|
||||||
|
.. versionadded 4.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
DATETIME_MS = 3
|
||||||
|
"""Decode a BSON UTC datetime as a :class:`~bson.datetime_ms.DatetimeMS`
|
||||||
|
object.
|
||||||
|
|
||||||
|
.. versionadded 4.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
DATETIME_AUTO = 4
|
||||||
|
"""Decode a BSON UTC datetime as a :class:`datetime.datetime` if possible,
|
||||||
|
and a :class:`~bson.datetime_ms.DatetimeMS` if not.
|
||||||
|
|
||||||
|
.. versionadded 4.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class _BaseCodecOptions(NamedTuple):
|
||||||
|
document_class: Type[Mapping[str, Any]]
|
||||||
|
tz_aware: bool
|
||||||
|
uuid_representation: int
|
||||||
|
unicode_decode_error_handler: str
|
||||||
|
tzinfo: Optional[datetime.tzinfo]
|
||||||
|
type_registry: TypeRegistry
|
||||||
|
datetime_conversion: Optional[DatetimeConversion]
|
||||||
|
|
||||||
|
|
||||||
|
class CodecOptions(_BaseCodecOptions):
|
||||||
|
"""Encapsulates options used encoding and / or decoding BSON.
|
||||||
|
|
||||||
|
The `document_class` option is used to define a custom type for use
|
||||||
|
decoding BSON documents. Access to the underlying raw BSON bytes for
|
||||||
|
a document is available using the :class:`~bson.raw_bson.RawBSONDocument`
|
||||||
|
type::
|
||||||
|
|
||||||
|
>>> from .raw_bson import RawBSONDocument
|
||||||
|
>>> from .codec_options import CodecOptions
|
||||||
|
>>> codec_options = CodecOptions(document_class=RawBSONDocument)
|
||||||
|
>>> coll = db.get_collection('test', codec_options=codec_options)
|
||||||
|
>>> doc = coll.find_one()
|
||||||
|
>>> doc.raw
|
||||||
|
'\\x16\\x00\\x00\\x00\\x07_id\\x00[0\\x165\\x91\\x10\\xea\\x14\\xe8\\xc5\\x8b\\x93\\x00'
|
||||||
|
|
||||||
|
The document class can be any type that inherits from
|
||||||
|
:class:`~collections.abc.MutableMapping`::
|
||||||
|
|
||||||
|
>>> class AttributeDict(dict):
|
||||||
|
... # A dict that supports attribute access.
|
||||||
|
... def __getattr__(self, key):
|
||||||
|
... return self[key]
|
||||||
|
... def __setattr__(self, key, value):
|
||||||
|
... self[key] = value
|
||||||
|
...
|
||||||
|
>>> codec_options = CodecOptions(document_class=AttributeDict)
|
||||||
|
>>> coll = db.get_collection('test', codec_options=codec_options)
|
||||||
|
>>> doc = coll.find_one()
|
||||||
|
>>> doc._id
|
||||||
|
ObjectId('5b3016359110ea14e8c58b93')
|
||||||
|
|
||||||
|
See :doc:`/examples/datetimes` for examples using the `tz_aware` and
|
||||||
|
`tzinfo` options.
|
||||||
|
|
||||||
|
See :doc:`/examples/uuid` for examples using the `uuid_representation`
|
||||||
|
option.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `document_class`: BSON documents returned in queries will be decoded
|
||||||
|
to an instance of this class. Must be a subclass of
|
||||||
|
:class:`~collections.abc.MutableMapping`. Defaults to :class:`dict`.
|
||||||
|
- `tz_aware`: If ``True``, BSON datetimes will be decoded to timezone
|
||||||
|
aware instances of :class:`~datetime.datetime`. Otherwise they will be
|
||||||
|
naive. Defaults to ``False``.
|
||||||
|
- `uuid_representation`: The BSON representation to use when encoding
|
||||||
|
and decoding instances of :class:`~uuid.UUID`. Defaults to
|
||||||
|
:data:`~bson.binary.UuidRepresentation.UNSPECIFIED`. New
|
||||||
|
applications should consider setting this to
|
||||||
|
:data:`~bson.binary.UuidRepresentation.STANDARD` for cross language
|
||||||
|
compatibility. See :ref:`handling-uuid-data-example` for details.
|
||||||
|
- `unicode_decode_error_handler`: The error handler to apply when
|
||||||
|
a Unicode-related error occurs during BSON decoding that would
|
||||||
|
otherwise raise :exc:`UnicodeDecodeError`. Valid options include
|
||||||
|
'strict', 'replace', 'backslashreplace', 'surrogateescape', and
|
||||||
|
'ignore'. Defaults to 'strict'.
|
||||||
|
- `tzinfo`: A :class:`~datetime.tzinfo` subclass that specifies the
|
||||||
|
timezone to/from which :class:`~datetime.datetime` objects should be
|
||||||
|
encoded/decoded.
|
||||||
|
- `type_registry`: Instance of :class:`TypeRegistry` used to customize
|
||||||
|
encoding and decoding behavior.
|
||||||
|
- `datetime_conversion`: Specifies how UTC datetimes should be decoded
|
||||||
|
within BSON. Valid options include 'datetime_ms' to return as a
|
||||||
|
DatetimeMS, 'datetime' to return as a datetime.datetime and
|
||||||
|
raising a ValueError for out-of-range values, 'datetime_auto' to
|
||||||
|
return DatetimeMS objects when the underlying datetime is
|
||||||
|
out-of-range and 'datetime_clamp' to clamp to the minimum and
|
||||||
|
maximum possible datetimes. Defaults to 'datetime'.
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
The default for `uuid_representation` was changed from
|
||||||
|
:const:`~bson.binary.UuidRepresentation.PYTHON_LEGACY` to
|
||||||
|
:const:`~bson.binary.UuidRepresentation.UNSPECIFIED`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.8
|
||||||
|
`type_registry` attribute.
|
||||||
|
|
||||||
|
.. warning:: Care must be taken when changing
|
||||||
|
`unicode_decode_error_handler` from its default value ('strict').
|
||||||
|
The 'replace' and 'ignore' modes should not be used when documents
|
||||||
|
retrieved from the server will be modified in the client application
|
||||||
|
and stored back to the server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls: Type["CodecOptions"],
|
||||||
|
document_class: Optional[Type[Mapping[str, Any]]] = None,
|
||||||
|
tz_aware: bool = False,
|
||||||
|
uuid_representation: Optional[int] = UuidRepresentation.UNSPECIFIED,
|
||||||
|
unicode_decode_error_handler: str = "strict",
|
||||||
|
tzinfo: Optional[datetime.tzinfo] = None,
|
||||||
|
type_registry: Optional[TypeRegistry] = None,
|
||||||
|
datetime_conversion: Optional[DatetimeConversion] = DatetimeConversion.DATETIME,
|
||||||
|
) -> "CodecOptions":
|
||||||
|
doc_class = document_class or dict
|
||||||
|
# issubclass can raise TypeError for generic aliases like SON[str, Any].
|
||||||
|
# In that case we can use the base class for the comparison.
|
||||||
|
is_mapping = False
|
||||||
|
try:
|
||||||
|
is_mapping = issubclass(doc_class, _MutableMapping)
|
||||||
|
except TypeError:
|
||||||
|
if hasattr(doc_class, "__origin__"):
|
||||||
|
is_mapping = issubclass(doc_class.__origin__, _MutableMapping) # type: ignore[union-attr]
|
||||||
|
if not (is_mapping or _raw_document_class(doc_class)):
|
||||||
|
raise TypeError(
|
||||||
|
"document_class must be dict, bson.son.SON, "
|
||||||
|
"bson.raw_bson.RawBSONDocument, or a "
|
||||||
|
"subclass of collections.abc.MutableMapping"
|
||||||
|
)
|
||||||
|
if not isinstance(tz_aware, bool):
|
||||||
|
raise TypeError("tz_aware must be True or False")
|
||||||
|
if uuid_representation not in ALL_UUID_REPRESENTATIONS:
|
||||||
|
raise ValueError(
|
||||||
|
"uuid_representation must be a value from .binary.UuidRepresentation"
|
||||||
|
)
|
||||||
|
if not isinstance(unicode_decode_error_handler, str):
|
||||||
|
raise ValueError("unicode_decode_error_handler must be a string")
|
||||||
|
if tzinfo is not None:
|
||||||
|
if not isinstance(tzinfo, datetime.tzinfo):
|
||||||
|
raise TypeError("tzinfo must be an instance of datetime.tzinfo")
|
||||||
|
if not tz_aware:
|
||||||
|
raise ValueError("cannot specify tzinfo without also setting tz_aware=True")
|
||||||
|
|
||||||
|
type_registry = type_registry or TypeRegistry()
|
||||||
|
|
||||||
|
if not isinstance(type_registry, TypeRegistry):
|
||||||
|
raise TypeError("type_registry must be an instance of TypeRegistry")
|
||||||
|
|
||||||
|
return tuple.__new__(
|
||||||
|
cls,
|
||||||
|
(
|
||||||
|
doc_class,
|
||||||
|
tz_aware,
|
||||||
|
uuid_representation,
|
||||||
|
unicode_decode_error_handler,
|
||||||
|
tzinfo,
|
||||||
|
type_registry,
|
||||||
|
datetime_conversion,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _arguments_repr(self) -> str:
|
||||||
|
"""Representation of the arguments used to create this object."""
|
||||||
|
document_class_repr = "dict" if self.document_class is dict else repr(self.document_class)
|
||||||
|
|
||||||
|
uuid_rep_repr = UUID_REPRESENTATION_NAMES.get(
|
||||||
|
self.uuid_representation, self.uuid_representation
|
||||||
|
)
|
||||||
|
|
||||||
|
return (
|
||||||
|
"document_class=%s, tz_aware=%r, uuid_representation=%s, "
|
||||||
|
"unicode_decode_error_handler=%r, tzinfo=%r, "
|
||||||
|
"type_registry=%r, datetime_conversion=%s"
|
||||||
|
% (
|
||||||
|
document_class_repr,
|
||||||
|
self.tz_aware,
|
||||||
|
uuid_rep_repr,
|
||||||
|
self.unicode_decode_error_handler,
|
||||||
|
self.tzinfo,
|
||||||
|
self.type_registry,
|
||||||
|
self.datetime_conversion,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _options_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Dictionary of the arguments used to create this object."""
|
||||||
|
# TODO: PYTHON-2442 use _asdict() instead
|
||||||
|
return {
|
||||||
|
"document_class": self.document_class,
|
||||||
|
"tz_aware": self.tz_aware,
|
||||||
|
"uuid_representation": self.uuid_representation,
|
||||||
|
"unicode_decode_error_handler": self.unicode_decode_error_handler,
|
||||||
|
"tzinfo": self.tzinfo,
|
||||||
|
"type_registry": self.type_registry,
|
||||||
|
"datetime_conversion": self.datetime_conversion,
|
||||||
|
}
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "%s(%s)" % (self.__class__.__name__, self._arguments_repr())
|
||||||
|
|
||||||
|
def with_options(self, **kwargs: Any) -> "CodecOptions":
|
||||||
|
"""Make a copy of this CodecOptions, overriding some options::
|
||||||
|
|
||||||
|
>>> from .codec_options import DEFAULT_CODEC_OPTIONS
|
||||||
|
>>> DEFAULT_CODEC_OPTIONS.tz_aware
|
||||||
|
False
|
||||||
|
>>> options = DEFAULT_CODEC_OPTIONS.with_options(tz_aware=True)
|
||||||
|
>>> options.tz_aware
|
||||||
|
True
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
opts = self._options_dict()
|
||||||
|
opts.update(kwargs)
|
||||||
|
return CodecOptions(**opts)
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_CODEC_OPTIONS = CodecOptions()
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_codec_options(options: Any) -> CodecOptions:
|
||||||
|
"""Parse BSON codec options."""
|
||||||
|
kwargs = {}
|
||||||
|
for k in set(options) & {
|
||||||
|
"document_class",
|
||||||
|
"tz_aware",
|
||||||
|
"uuidrepresentation",
|
||||||
|
"unicode_decode_error_handler",
|
||||||
|
"tzinfo",
|
||||||
|
"type_registry",
|
||||||
|
"datetime_conversion",
|
||||||
|
}:
|
||||||
|
if k == "uuidrepresentation":
|
||||||
|
kwargs["uuid_representation"] = options[k]
|
||||||
|
else:
|
||||||
|
kwargs[k] = options[k]
|
||||||
|
return CodecOptions(**kwargs)
|
108
src/xtquant/xtbson/bson37/codec_options.pyi
Normal file
108
src/xtquant/xtbson/bson37/codec_options.pyi
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
# Copyright 2022-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Workaround for https://bugs.python.org/issue43923.
|
||||||
|
Ideally we would have done this with a single class, but
|
||||||
|
generic subclasses *must* take a parameter, and prior to Python 3.9
|
||||||
|
or in Python 3.7 and 3.8 with `from __future__ import annotations`,
|
||||||
|
you get the error: "TypeError: 'type' object is not subscriptable".
|
||||||
|
"""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import abc
|
||||||
|
import enum
|
||||||
|
from typing import Tuple, Generic, Optional, Mapping, Any, TypeVar, Type, Dict, Iterable, Tuple, MutableMapping, Callable, Union
|
||||||
|
|
||||||
|
|
||||||
|
class TypeEncoder(abc.ABC, metaclass=abc.ABCMeta):
|
||||||
|
@property
|
||||||
|
@abc.abstractmethod
|
||||||
|
def python_type(self) -> Any: ...
|
||||||
|
@abc.abstractmethod
|
||||||
|
def transform_python(self, value: Any) -> Any: ...
|
||||||
|
|
||||||
|
class TypeDecoder(abc.ABC, metaclass=abc.ABCMeta):
|
||||||
|
@property
|
||||||
|
@abc.abstractmethod
|
||||||
|
def bson_type(self) -> Any: ...
|
||||||
|
@abc.abstractmethod
|
||||||
|
def transform_bson(self, value: Any) -> Any: ...
|
||||||
|
|
||||||
|
class TypeCodec(TypeEncoder, TypeDecoder, metaclass=abc.ABCMeta): ...
|
||||||
|
|
||||||
|
Codec = Union[TypeEncoder, TypeDecoder, TypeCodec]
|
||||||
|
Fallback = Callable[[Any], Any]
|
||||||
|
|
||||||
|
class TypeRegistry:
|
||||||
|
_decoder_map: Dict[Any, Any]
|
||||||
|
_encoder_map: Dict[Any, Any]
|
||||||
|
_fallback_encoder: Optional[Fallback]
|
||||||
|
|
||||||
|
def __init__(self, type_codecs: Optional[Iterable[Codec]] = ..., fallback_encoder: Optional[Fallback] = ...) -> None: ...
|
||||||
|
def __eq__(self, other: Any) -> Any: ...
|
||||||
|
|
||||||
|
|
||||||
|
_DocumentType = TypeVar("_DocumentType", bound=Mapping[str, Any])
|
||||||
|
|
||||||
|
class DatetimeConversion(int, enum.Enum):
|
||||||
|
DATETIME = ...
|
||||||
|
DATETIME_CLAMP = ...
|
||||||
|
DATETIME_MS = ...
|
||||||
|
DATETIME_AUTO = ...
|
||||||
|
|
||||||
|
class CodecOptions(Tuple, Generic[_DocumentType]):
|
||||||
|
document_class: Type[_DocumentType]
|
||||||
|
tz_aware: bool
|
||||||
|
uuid_representation: int
|
||||||
|
unicode_decode_error_handler: Optional[str]
|
||||||
|
tzinfo: Optional[datetime.tzinfo]
|
||||||
|
type_registry: TypeRegistry
|
||||||
|
datetime_conversion: Optional[int]
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls: Type[CodecOptions],
|
||||||
|
document_class: Optional[Type[_DocumentType]] = ...,
|
||||||
|
tz_aware: bool = ...,
|
||||||
|
uuid_representation: Optional[int] = ...,
|
||||||
|
unicode_decode_error_handler: Optional[str] = ...,
|
||||||
|
tzinfo: Optional[datetime.tzinfo] = ...,
|
||||||
|
type_registry: Optional[TypeRegistry] = ...,
|
||||||
|
datetime_conversion: Optional[int] = ...,
|
||||||
|
) -> CodecOptions[_DocumentType]: ...
|
||||||
|
|
||||||
|
# CodecOptions API
|
||||||
|
def with_options(self, **kwargs: Any) -> CodecOptions[_DocumentType]: ...
|
||||||
|
|
||||||
|
def _arguments_repr(self) -> str: ...
|
||||||
|
|
||||||
|
def _options_dict(self) -> Dict[Any, Any]: ...
|
||||||
|
|
||||||
|
# NamedTuple API
|
||||||
|
@classmethod
|
||||||
|
def _make(cls, obj: Iterable) -> CodecOptions[_DocumentType]: ...
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, Any]: ...
|
||||||
|
|
||||||
|
def _replace(self, **kwargs: Any) -> CodecOptions[_DocumentType]: ...
|
||||||
|
|
||||||
|
_source: str
|
||||||
|
_fields: Tuple[str]
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_CODEC_OPTIONS: CodecOptions[MutableMapping[str, Any]]
|
||||||
|
_RAW_BSON_DOCUMENT_MARKER: int
|
||||||
|
|
||||||
|
def _raw_document_class(document_class: Any) -> bool: ...
|
||||||
|
|
||||||
|
def _parse_codec_options(options: Any) -> CodecOptions: ...
|
158
src/xtquant/xtbson/bson37/datetime_ms.py
Normal file
158
src/xtquant/xtbson/bson37/datetime_ms.py
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
# Copyright 2022-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you
|
||||||
|
# may not use this file except in compliance with the License. You
|
||||||
|
# may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied. See the License for the specific language governing
|
||||||
|
# permissions and limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for representing the BSON datetime type.
|
||||||
|
|
||||||
|
.. versionadded:: 4.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
import calendar
|
||||||
|
import datetime
|
||||||
|
import functools
|
||||||
|
from typing import Any, Union, cast
|
||||||
|
|
||||||
|
from .codec_options import DEFAULT_CODEC_OPTIONS, CodecOptions, DatetimeConversion
|
||||||
|
from .tz_util import utc
|
||||||
|
|
||||||
|
EPOCH_AWARE = datetime.datetime.fromtimestamp(0, utc)
|
||||||
|
EPOCH_NAIVE = datetime.datetime.utcfromtimestamp(0)
|
||||||
|
|
||||||
|
|
||||||
|
class DatetimeMS:
|
||||||
|
"""Represents a BSON UTC datetime."""
|
||||||
|
|
||||||
|
__slots__ = ("_value",)
|
||||||
|
|
||||||
|
def __init__(self, value: Union[int, datetime.datetime]):
|
||||||
|
"""Represents a BSON UTC datetime.
|
||||||
|
|
||||||
|
BSON UTC datetimes are defined as an int64 of milliseconds since the
|
||||||
|
Unix epoch. The principal use of DatetimeMS is to represent
|
||||||
|
datetimes outside the range of the Python builtin
|
||||||
|
:class:`~datetime.datetime` class when
|
||||||
|
encoding/decoding BSON.
|
||||||
|
|
||||||
|
To decode UTC datetimes as a ``DatetimeMS``, `datetime_conversion` in
|
||||||
|
:class:`~bson.CodecOptions` must be set to 'datetime_ms' or
|
||||||
|
'datetime_auto'. See :ref:`handling-out-of-range-datetimes` for
|
||||||
|
details.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `value`: An instance of :class:`datetime.datetime` to be
|
||||||
|
represented as milliseconds since the Unix epoch, or int of
|
||||||
|
milliseconds since the Unix epoch.
|
||||||
|
"""
|
||||||
|
if isinstance(value, int):
|
||||||
|
if not (-(2**63) <= value <= 2**63 - 1):
|
||||||
|
raise OverflowError("Must be a 64-bit integer of milliseconds")
|
||||||
|
self._value = value
|
||||||
|
elif isinstance(value, datetime.datetime):
|
||||||
|
self._value = _datetime_to_millis(value)
|
||||||
|
else:
|
||||||
|
raise TypeError(f"{type(value)} is not a valid type for DatetimeMS")
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return hash(self._value)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return type(self).__name__ + "(" + str(self._value) + ")"
|
||||||
|
|
||||||
|
def __lt__(self, other: Union["DatetimeMS", int]) -> bool:
|
||||||
|
return self._value < other
|
||||||
|
|
||||||
|
def __le__(self, other: Union["DatetimeMS", int]) -> bool:
|
||||||
|
return self._value <= other
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, DatetimeMS):
|
||||||
|
return self._value == other._value
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, DatetimeMS):
|
||||||
|
return self._value != other._value
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __gt__(self, other: Union["DatetimeMS", int]) -> bool:
|
||||||
|
return self._value > other
|
||||||
|
|
||||||
|
def __ge__(self, other: Union["DatetimeMS", int]) -> bool:
|
||||||
|
return self._value >= other
|
||||||
|
|
||||||
|
_type_marker = 9
|
||||||
|
|
||||||
|
def as_datetime(self, codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS) -> datetime.datetime:
|
||||||
|
"""Create a Python :class:`~datetime.datetime` from this DatetimeMS object.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `codec_options`: A CodecOptions instance for specifying how the
|
||||||
|
resulting DatetimeMS object will be formatted using ``tz_aware``
|
||||||
|
and ``tz_info``. Defaults to
|
||||||
|
:const:`~bson.codec_options.DEFAULT_CODEC_OPTIONS`.
|
||||||
|
"""
|
||||||
|
return cast(datetime.datetime, _millis_to_datetime(self._value, codec_options))
|
||||||
|
|
||||||
|
def __int__(self) -> int:
|
||||||
|
return self._value
|
||||||
|
|
||||||
|
|
||||||
|
# Inclusive and exclusive min and max for timezones.
|
||||||
|
# Timezones are hashed by their offset, which is a timedelta
|
||||||
|
# and therefore there are more than 24 possible timezones.
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def _min_datetime_ms(tz=datetime.timezone.utc):
|
||||||
|
return _datetime_to_millis(datetime.datetime.min.replace(tzinfo=tz))
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def _max_datetime_ms(tz=datetime.timezone.utc):
|
||||||
|
return _datetime_to_millis(datetime.datetime.max.replace(tzinfo=tz))
|
||||||
|
|
||||||
|
|
||||||
|
def _millis_to_datetime(millis: int, opts: CodecOptions) -> Union[datetime.datetime, DatetimeMS]:
|
||||||
|
"""Convert milliseconds since epoch UTC to datetime."""
|
||||||
|
if (
|
||||||
|
opts.datetime_conversion == DatetimeConversion.DATETIME
|
||||||
|
or opts.datetime_conversion == DatetimeConversion.DATETIME_CLAMP
|
||||||
|
or opts.datetime_conversion == DatetimeConversion.DATETIME_AUTO
|
||||||
|
):
|
||||||
|
tz = opts.tzinfo or datetime.timezone.utc
|
||||||
|
if opts.datetime_conversion == DatetimeConversion.DATETIME_CLAMP:
|
||||||
|
millis = max(_min_datetime_ms(tz), min(millis, _max_datetime_ms(tz)))
|
||||||
|
elif opts.datetime_conversion == DatetimeConversion.DATETIME_AUTO:
|
||||||
|
if not (_min_datetime_ms(tz) <= millis <= _max_datetime_ms(tz)):
|
||||||
|
return DatetimeMS(millis)
|
||||||
|
|
||||||
|
diff = ((millis % 1000) + 1000) % 1000
|
||||||
|
seconds = (millis - diff) // 1000
|
||||||
|
micros = diff * 1000
|
||||||
|
|
||||||
|
if opts.tz_aware:
|
||||||
|
dt = EPOCH_AWARE + datetime.timedelta(seconds=seconds, microseconds=micros)
|
||||||
|
if opts.tzinfo:
|
||||||
|
dt = dt.astimezone(tz)
|
||||||
|
return dt
|
||||||
|
else:
|
||||||
|
return EPOCH_NAIVE + datetime.timedelta(seconds=seconds, microseconds=micros)
|
||||||
|
elif opts.datetime_conversion == DatetimeConversion.DATETIME_MS:
|
||||||
|
return DatetimeMS(millis)
|
||||||
|
else:
|
||||||
|
raise ValueError("datetime_conversion must be an element of DatetimeConversion")
|
||||||
|
|
||||||
|
|
||||||
|
def _datetime_to_millis(dtm: datetime.datetime) -> int:
|
||||||
|
"""Convert datetime to milliseconds since epoch UTC."""
|
||||||
|
if dtm.utcoffset() is not None:
|
||||||
|
dtm = dtm - dtm.utcoffset() # type: ignore
|
||||||
|
return int(calendar.timegm(dtm.timetuple()) * 1000 + dtm.microsecond // 1000)
|
133
src/xtquant/xtbson/bson37/dbref.py
Normal file
133
src/xtquant/xtbson/bson37/dbref.py
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
# Copyright 2009-2015 MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for manipulating DBRefs (references to MongoDB documents)."""
|
||||||
|
|
||||||
|
from copy import deepcopy
|
||||||
|
from typing import Any, Mapping, Optional
|
||||||
|
|
||||||
|
from ._helpers import _getstate_slots, _setstate_slots
|
||||||
|
from .son import SON
|
||||||
|
|
||||||
|
|
||||||
|
class DBRef(object):
|
||||||
|
"""A reference to a document stored in MongoDB."""
|
||||||
|
|
||||||
|
__slots__ = "__collection", "__id", "__database", "__kwargs"
|
||||||
|
__getstate__ = _getstate_slots
|
||||||
|
__setstate__ = _setstate_slots
|
||||||
|
# DBRef isn't actually a BSON "type" so this number was arbitrarily chosen.
|
||||||
|
_type_marker = 100
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
collection: str,
|
||||||
|
id: Any,
|
||||||
|
database: Optional[str] = None,
|
||||||
|
_extra: Optional[Mapping[str, Any]] = None,
|
||||||
|
**kwargs: Any
|
||||||
|
) -> None:
|
||||||
|
"""Initialize a new :class:`DBRef`.
|
||||||
|
|
||||||
|
Raises :class:`TypeError` if `collection` or `database` is not
|
||||||
|
an instance of :class:`basestring` (:class:`str` in python 3).
|
||||||
|
`database` is optional and allows references to documents to work
|
||||||
|
across databases. Any additional keyword arguments will create
|
||||||
|
additional fields in the resultant embedded document.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `collection`: name of the collection the document is stored in
|
||||||
|
- `id`: the value of the document's ``"_id"`` field
|
||||||
|
- `database` (optional): name of the database to reference
|
||||||
|
- `**kwargs` (optional): additional keyword arguments will
|
||||||
|
create additional, custom fields
|
||||||
|
|
||||||
|
.. seealso:: The MongoDB documentation on `dbrefs <https://dochub.mongodb.org/core/dbrefs>`_.
|
||||||
|
"""
|
||||||
|
if not isinstance(collection, str):
|
||||||
|
raise TypeError("collection must be an instance of str")
|
||||||
|
if database is not None and not isinstance(database, str):
|
||||||
|
raise TypeError("database must be an instance of str")
|
||||||
|
|
||||||
|
self.__collection = collection
|
||||||
|
self.__id = id
|
||||||
|
self.__database = database
|
||||||
|
kwargs.update(_extra or {})
|
||||||
|
self.__kwargs = kwargs
|
||||||
|
|
||||||
|
@property
|
||||||
|
def collection(self) -> str:
|
||||||
|
"""Get the name of this DBRef's collection."""
|
||||||
|
return self.__collection
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self) -> Any:
|
||||||
|
"""Get this DBRef's _id."""
|
||||||
|
return self.__id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def database(self) -> Optional[str]:
|
||||||
|
"""Get the name of this DBRef's database.
|
||||||
|
|
||||||
|
Returns None if this DBRef doesn't specify a database.
|
||||||
|
"""
|
||||||
|
return self.__database
|
||||||
|
|
||||||
|
def __getattr__(self, key: Any) -> Any:
|
||||||
|
try:
|
||||||
|
return self.__kwargs[key]
|
||||||
|
except KeyError:
|
||||||
|
raise AttributeError(key)
|
||||||
|
|
||||||
|
def as_doc(self) -> SON[str, Any]:
|
||||||
|
"""Get the SON document representation of this DBRef.
|
||||||
|
|
||||||
|
Generally not needed by application developers
|
||||||
|
"""
|
||||||
|
doc = SON([("$ref", self.collection), ("$id", self.id)])
|
||||||
|
if self.database is not None:
|
||||||
|
doc["$db"] = self.database
|
||||||
|
doc.update(self.__kwargs)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
extra = "".join([", %s=%r" % (k, v) for k, v in self.__kwargs.items()])
|
||||||
|
if self.database is None:
|
||||||
|
return "DBRef(%r, %r%s)" % (self.collection, self.id, extra)
|
||||||
|
return "DBRef(%r, %r, %r%s)" % (self.collection, self.id, self.database, extra)
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, DBRef):
|
||||||
|
us = (self.__database, self.__collection, self.__id, self.__kwargs)
|
||||||
|
them = (other.__database, other.__collection, other.__id, other.__kwargs)
|
||||||
|
return us == them
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
"""Get a hash value for this :class:`DBRef`."""
|
||||||
|
return hash(
|
||||||
|
(self.__collection, self.__id, self.__database, tuple(sorted(self.__kwargs.items())))
|
||||||
|
)
|
||||||
|
|
||||||
|
def __deepcopy__(self, memo: Any) -> "DBRef":
|
||||||
|
"""Support function for `copy.deepcopy()`."""
|
||||||
|
return DBRef(
|
||||||
|
deepcopy(self.__collection, memo),
|
||||||
|
deepcopy(self.__id, memo),
|
||||||
|
deepcopy(self.__database, memo),
|
||||||
|
deepcopy(self.__kwargs, memo),
|
||||||
|
)
|
314
src/xtquant/xtbson/bson37/decimal128.py
Normal file
314
src/xtquant/xtbson/bson37/decimal128.py
Normal file
@ -0,0 +1,314 @@
|
|||||||
|
# Copyright 2016-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for working with the BSON decimal128 type.
|
||||||
|
|
||||||
|
.. versionadded:: 3.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
import decimal
|
||||||
|
import struct
|
||||||
|
from typing import Any, Sequence, Tuple, Type, Union
|
||||||
|
|
||||||
|
_PACK_64 = struct.Struct("<Q").pack
|
||||||
|
_UNPACK_64 = struct.Struct("<Q").unpack
|
||||||
|
|
||||||
|
_EXPONENT_MASK = 3 << 61
|
||||||
|
_EXPONENT_BIAS = 6176
|
||||||
|
_EXPONENT_MAX = 6144
|
||||||
|
_EXPONENT_MIN = -6143
|
||||||
|
_MAX_DIGITS = 34
|
||||||
|
|
||||||
|
_INF = 0x7800000000000000
|
||||||
|
_NAN = 0x7C00000000000000
|
||||||
|
_SNAN = 0x7E00000000000000
|
||||||
|
_SIGN = 0x8000000000000000
|
||||||
|
|
||||||
|
_NINF = (_INF + _SIGN, 0)
|
||||||
|
_PINF = (_INF, 0)
|
||||||
|
_NNAN = (_NAN + _SIGN, 0)
|
||||||
|
_PNAN = (_NAN, 0)
|
||||||
|
_NSNAN = (_SNAN + _SIGN, 0)
|
||||||
|
_PSNAN = (_SNAN, 0)
|
||||||
|
|
||||||
|
_CTX_OPTIONS = {
|
||||||
|
"prec": _MAX_DIGITS,
|
||||||
|
"rounding": decimal.ROUND_HALF_EVEN,
|
||||||
|
"Emin": _EXPONENT_MIN,
|
||||||
|
"Emax": _EXPONENT_MAX,
|
||||||
|
"capitals": 1,
|
||||||
|
"flags": [],
|
||||||
|
"traps": [decimal.InvalidOperation, decimal.Overflow, decimal.Inexact],
|
||||||
|
"clamp": 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
_DEC128_CTX = decimal.Context(**_CTX_OPTIONS.copy()) # type: ignore
|
||||||
|
_VALUE_OPTIONS = Union[decimal.Decimal, float, str, Tuple[int, Sequence[int], int]]
|
||||||
|
|
||||||
|
|
||||||
|
def create_decimal128_context() -> decimal.Context:
|
||||||
|
"""Returns an instance of :class:`decimal.Context` appropriate
|
||||||
|
for working with IEEE-754 128-bit decimal floating point values.
|
||||||
|
"""
|
||||||
|
opts = _CTX_OPTIONS.copy()
|
||||||
|
opts["traps"] = []
|
||||||
|
return decimal.Context(**opts) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def _decimal_to_128(value: _VALUE_OPTIONS) -> Tuple[int, int]:
|
||||||
|
"""Converts a decimal.Decimal to BID (high bits, low bits).
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `value`: An instance of decimal.Decimal
|
||||||
|
"""
|
||||||
|
with decimal.localcontext(_DEC128_CTX) as ctx:
|
||||||
|
value = ctx.create_decimal(value)
|
||||||
|
|
||||||
|
if value.is_infinite():
|
||||||
|
return _NINF if value.is_signed() else _PINF
|
||||||
|
|
||||||
|
sign, digits, exponent = value.as_tuple()
|
||||||
|
|
||||||
|
if value.is_nan():
|
||||||
|
if digits:
|
||||||
|
raise ValueError("NaN with debug payload is not supported")
|
||||||
|
if value.is_snan():
|
||||||
|
return _NSNAN if value.is_signed() else _PSNAN
|
||||||
|
return _NNAN if value.is_signed() else _PNAN
|
||||||
|
|
||||||
|
significand = int("".join([str(digit) for digit in digits]))
|
||||||
|
bit_length = significand.bit_length()
|
||||||
|
|
||||||
|
high = 0
|
||||||
|
low = 0
|
||||||
|
for i in range(min(64, bit_length)):
|
||||||
|
if significand & (1 << i):
|
||||||
|
low |= 1 << i
|
||||||
|
|
||||||
|
for i in range(64, bit_length):
|
||||||
|
if significand & (1 << i):
|
||||||
|
high |= 1 << (i - 64)
|
||||||
|
|
||||||
|
biased_exponent = exponent + _EXPONENT_BIAS
|
||||||
|
|
||||||
|
if high >> 49 == 1:
|
||||||
|
high = high & 0x7FFFFFFFFFFF
|
||||||
|
high |= _EXPONENT_MASK
|
||||||
|
high |= (biased_exponent & 0x3FFF) << 47
|
||||||
|
else:
|
||||||
|
high |= biased_exponent << 49
|
||||||
|
|
||||||
|
if sign:
|
||||||
|
high |= _SIGN
|
||||||
|
|
||||||
|
return high, low
|
||||||
|
|
||||||
|
|
||||||
|
class Decimal128(object):
|
||||||
|
"""BSON Decimal128 type::
|
||||||
|
|
||||||
|
>>> Decimal128(Decimal("0.0005"))
|
||||||
|
Decimal128('0.0005')
|
||||||
|
>>> Decimal128("0.0005")
|
||||||
|
Decimal128('0.0005')
|
||||||
|
>>> Decimal128((3474527112516337664, 5))
|
||||||
|
Decimal128('0.0005')
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `value`: An instance of :class:`decimal.Decimal`, string, or tuple of
|
||||||
|
(high bits, low bits) from Binary Integer Decimal (BID) format.
|
||||||
|
|
||||||
|
.. note:: :class:`~Decimal128` uses an instance of :class:`decimal.Context`
|
||||||
|
configured for IEEE-754 Decimal128 when validating parameters.
|
||||||
|
Signals like :class:`decimal.InvalidOperation`, :class:`decimal.Inexact`,
|
||||||
|
and :class:`decimal.Overflow` are trapped and raised as exceptions::
|
||||||
|
|
||||||
|
>>> Decimal128(".13.1")
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
...
|
||||||
|
decimal.InvalidOperation: [<class 'decimal.ConversionSyntax'>]
|
||||||
|
>>>
|
||||||
|
>>> Decimal128("1E-6177")
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
...
|
||||||
|
decimal.Inexact: [<class 'decimal.Inexact'>]
|
||||||
|
>>>
|
||||||
|
>>> Decimal128("1E6145")
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
...
|
||||||
|
decimal.Overflow: [<class 'decimal.Overflow'>, <class 'decimal.Rounded'>]
|
||||||
|
|
||||||
|
To ensure the result of a calculation can always be stored as BSON
|
||||||
|
Decimal128 use the context returned by
|
||||||
|
:func:`create_decimal128_context`::
|
||||||
|
|
||||||
|
>>> import decimal
|
||||||
|
>>> decimal128_ctx = create_decimal128_context()
|
||||||
|
>>> with decimal.localcontext(decimal128_ctx) as ctx:
|
||||||
|
... Decimal128(ctx.create_decimal(".13.3"))
|
||||||
|
...
|
||||||
|
Decimal128('NaN')
|
||||||
|
>>>
|
||||||
|
>>> with decimal.localcontext(decimal128_ctx) as ctx:
|
||||||
|
... Decimal128(ctx.create_decimal("1E-6177"))
|
||||||
|
...
|
||||||
|
Decimal128('0E-6176')
|
||||||
|
>>>
|
||||||
|
>>> with decimal.localcontext(DECIMAL128_CTX) as ctx:
|
||||||
|
... Decimal128(ctx.create_decimal("1E6145"))
|
||||||
|
...
|
||||||
|
Decimal128('Infinity')
|
||||||
|
|
||||||
|
To match the behavior of MongoDB's Decimal128 implementation
|
||||||
|
str(Decimal(value)) may not match str(Decimal128(value)) for NaN values::
|
||||||
|
|
||||||
|
>>> Decimal128(Decimal('NaN'))
|
||||||
|
Decimal128('NaN')
|
||||||
|
>>> Decimal128(Decimal('-NaN'))
|
||||||
|
Decimal128('NaN')
|
||||||
|
>>> Decimal128(Decimal('sNaN'))
|
||||||
|
Decimal128('NaN')
|
||||||
|
>>> Decimal128(Decimal('-sNaN'))
|
||||||
|
Decimal128('NaN')
|
||||||
|
|
||||||
|
However, :meth:`~Decimal128.to_decimal` will return the exact value::
|
||||||
|
|
||||||
|
>>> Decimal128(Decimal('NaN')).to_decimal()
|
||||||
|
Decimal('NaN')
|
||||||
|
>>> Decimal128(Decimal('-NaN')).to_decimal()
|
||||||
|
Decimal('-NaN')
|
||||||
|
>>> Decimal128(Decimal('sNaN')).to_decimal()
|
||||||
|
Decimal('sNaN')
|
||||||
|
>>> Decimal128(Decimal('-sNaN')).to_decimal()
|
||||||
|
Decimal('-sNaN')
|
||||||
|
|
||||||
|
Two instances of :class:`Decimal128` compare equal if their Binary
|
||||||
|
Integer Decimal encodings are equal::
|
||||||
|
|
||||||
|
>>> Decimal128('NaN') == Decimal128('NaN')
|
||||||
|
True
|
||||||
|
>>> Decimal128('NaN').bid == Decimal128('NaN').bid
|
||||||
|
True
|
||||||
|
|
||||||
|
This differs from :class:`decimal.Decimal` comparisons for NaN::
|
||||||
|
|
||||||
|
>>> Decimal('NaN') == Decimal('NaN')
|
||||||
|
False
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ("__high", "__low")
|
||||||
|
|
||||||
|
_type_marker = 19
|
||||||
|
|
||||||
|
def __init__(self, value: _VALUE_OPTIONS) -> None:
|
||||||
|
if isinstance(value, (str, decimal.Decimal)):
|
||||||
|
self.__high, self.__low = _decimal_to_128(value)
|
||||||
|
elif isinstance(value, (list, tuple)):
|
||||||
|
if len(value) != 2:
|
||||||
|
raise ValueError(
|
||||||
|
"Invalid size for creation of Decimal128 "
|
||||||
|
"from list or tuple. Must have exactly 2 "
|
||||||
|
"elements."
|
||||||
|
)
|
||||||
|
self.__high, self.__low = value # type: ignore
|
||||||
|
else:
|
||||||
|
raise TypeError("Cannot convert %r to Decimal128" % (value,))
|
||||||
|
|
||||||
|
def to_decimal(self) -> decimal.Decimal:
|
||||||
|
"""Returns an instance of :class:`decimal.Decimal` for this
|
||||||
|
:class:`Decimal128`.
|
||||||
|
"""
|
||||||
|
high = self.__high
|
||||||
|
low = self.__low
|
||||||
|
sign = 1 if (high & _SIGN) else 0
|
||||||
|
|
||||||
|
if (high & _SNAN) == _SNAN:
|
||||||
|
return decimal.Decimal((sign, (), "N")) # type: ignore
|
||||||
|
elif (high & _NAN) == _NAN:
|
||||||
|
return decimal.Decimal((sign, (), "n")) # type: ignore
|
||||||
|
elif (high & _INF) == _INF:
|
||||||
|
return decimal.Decimal((sign, (), "F")) # type: ignore
|
||||||
|
|
||||||
|
if (high & _EXPONENT_MASK) == _EXPONENT_MASK:
|
||||||
|
exponent = ((high & 0x1FFFE00000000000) >> 47) - _EXPONENT_BIAS
|
||||||
|
return decimal.Decimal((sign, (0,), exponent))
|
||||||
|
else:
|
||||||
|
exponent = ((high & 0x7FFF800000000000) >> 49) - _EXPONENT_BIAS
|
||||||
|
|
||||||
|
arr = bytearray(15)
|
||||||
|
mask = 0x00000000000000FF
|
||||||
|
for i in range(14, 6, -1):
|
||||||
|
arr[i] = (low & mask) >> ((14 - i) << 3)
|
||||||
|
mask = mask << 8
|
||||||
|
|
||||||
|
mask = 0x00000000000000FF
|
||||||
|
for i in range(6, 0, -1):
|
||||||
|
arr[i] = (high & mask) >> ((6 - i) << 3)
|
||||||
|
mask = mask << 8
|
||||||
|
|
||||||
|
mask = 0x0001000000000000
|
||||||
|
arr[0] = (high & mask) >> 48
|
||||||
|
|
||||||
|
# cdecimal only accepts a tuple for digits.
|
||||||
|
digits = tuple(int(digit) for digit in str(int.from_bytes(arr, "big")))
|
||||||
|
|
||||||
|
with decimal.localcontext(_DEC128_CTX) as ctx:
|
||||||
|
return ctx.create_decimal((sign, digits, exponent))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_bid(cls: Type["Decimal128"], value: bytes) -> "Decimal128":
|
||||||
|
"""Create an instance of :class:`Decimal128` from Binary Integer
|
||||||
|
Decimal string.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `value`: 16 byte string (128-bit IEEE 754-2008 decimal floating
|
||||||
|
point in Binary Integer Decimal (BID) format).
|
||||||
|
"""
|
||||||
|
if not isinstance(value, bytes):
|
||||||
|
raise TypeError("value must be an instance of bytes")
|
||||||
|
if len(value) != 16:
|
||||||
|
raise ValueError("value must be exactly 16 bytes")
|
||||||
|
return cls((_UNPACK_64(value[8:])[0], _UNPACK_64(value[:8])[0])) # type: ignore
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bid(self) -> bytes:
|
||||||
|
"""The Binary Integer Decimal (BID) encoding of this instance."""
|
||||||
|
return _PACK_64(self.__low) + _PACK_64(self.__high)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
dec = self.to_decimal()
|
||||||
|
if dec.is_nan():
|
||||||
|
# Required by the drivers spec to match MongoDB behavior.
|
||||||
|
return "NaN"
|
||||||
|
return str(dec)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Decimal128('%s')" % (str(self),)
|
||||||
|
|
||||||
|
def __setstate__(self, value: Tuple[int, int]) -> None:
|
||||||
|
self.__high, self.__low = value
|
||||||
|
|
||||||
|
def __getstate__(self) -> Tuple[int, int]:
|
||||||
|
return self.__high, self.__low
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, Decimal128):
|
||||||
|
return self.bid == other.bid
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
return not self == other
|
35
src/xtquant/xtbson/bson37/errors.py
Normal file
35
src/xtquant/xtbson/bson37/errors.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# Copyright 2009-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Exceptions raised by the BSON package."""
|
||||||
|
|
||||||
|
|
||||||
|
class BSONError(Exception):
|
||||||
|
"""Base class for all BSON exceptions."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidBSON(BSONError):
|
||||||
|
"""Raised when trying to create a BSON object from invalid data."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidStringData(BSONError):
|
||||||
|
"""Raised when trying to encode a string containing non-UTF8 data."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidDocument(BSONError):
|
||||||
|
"""Raised when trying to create a BSON object from an invalid document."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidId(BSONError):
|
||||||
|
"""Raised when trying to create an ObjectId from invalid data."""
|
39
src/xtquant/xtbson/bson37/int64.py
Normal file
39
src/xtquant/xtbson/bson37/int64.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
# Copyright 2014-2015 MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""A BSON wrapper for long (int in python3)"""
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
class Int64(int):
|
||||||
|
"""Representation of the BSON int64 type.
|
||||||
|
|
||||||
|
This is necessary because every integral number is an :class:`int` in
|
||||||
|
Python 3. Small integral numbers are encoded to BSON int32 by default,
|
||||||
|
but Int64 numbers will always be encoded to BSON int64.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `value`: the numeric value to represent
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ()
|
||||||
|
|
||||||
|
_type_marker = 18
|
||||||
|
|
||||||
|
def __getstate__(self) -> Any:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def __setstate__(self, state: Any) -> None:
|
||||||
|
pass
|
903
src/xtquant/xtbson/bson37/json_util.py
Normal file
903
src/xtquant/xtbson/bson37/json_util.py
Normal file
@ -0,0 +1,903 @@
|
|||||||
|
# Copyright 2009-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for using Python's :mod:`json` module with BSON documents.
|
||||||
|
|
||||||
|
This module provides two helper methods `dumps` and `loads` that wrap the
|
||||||
|
native :mod:`json` methods and provide explicit BSON conversion to and from
|
||||||
|
JSON. :class:`~bson.json_util.JSONOptions` provides a way to control how JSON
|
||||||
|
is emitted and parsed, with the default being the Relaxed Extended JSON format.
|
||||||
|
:mod:`~bson.json_util` can also generate Canonical or legacy `Extended JSON`_
|
||||||
|
when :const:`CANONICAL_JSON_OPTIONS` or :const:`LEGACY_JSON_OPTIONS` is
|
||||||
|
provided, respectively.
|
||||||
|
|
||||||
|
.. _Extended JSON: https://github.com/mongodb/specifications/blob/master/source/extended-json.rst
|
||||||
|
|
||||||
|
Example usage (deserialization):
|
||||||
|
|
||||||
|
.. doctest::
|
||||||
|
|
||||||
|
>>> from .json_util import loads
|
||||||
|
>>> loads('[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$scope": {}, "$code": "function x() { return 1; }"}}, {"bin": {"$type": "80", "$binary": "AQIDBA=="}}]')
|
||||||
|
[{'foo': [1, 2]}, {'bar': {'hello': 'world'}}, {'code': Code('function x() { return 1; }', {})}, {'bin': Binary(b'...', 128)}]
|
||||||
|
|
||||||
|
Example usage with :const:`RELAXED_JSON_OPTIONS` (the default):
|
||||||
|
|
||||||
|
.. doctest::
|
||||||
|
|
||||||
|
>>> from . import Binary, Code
|
||||||
|
>>> from .json_util import dumps
|
||||||
|
>>> dumps([{'foo': [1, 2]},
|
||||||
|
... {'bar': {'hello': 'world'}},
|
||||||
|
... {'code': Code("function x() { return 1; }")},
|
||||||
|
... {'bin': Binary(b"\x01\x02\x03\x04")}])
|
||||||
|
'[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]'
|
||||||
|
|
||||||
|
Example usage (with :const:`CANONICAL_JSON_OPTIONS`):
|
||||||
|
|
||||||
|
.. doctest::
|
||||||
|
|
||||||
|
>>> from . import Binary, Code
|
||||||
|
>>> from .json_util import dumps, CANONICAL_JSON_OPTIONS
|
||||||
|
>>> dumps([{'foo': [1, 2]},
|
||||||
|
... {'bar': {'hello': 'world'}},
|
||||||
|
... {'code': Code("function x() { return 1; }")},
|
||||||
|
... {'bin': Binary(b"\x01\x02\x03\x04")}],
|
||||||
|
... json_options=CANONICAL_JSON_OPTIONS)
|
||||||
|
'[{"foo": [{"$numberInt": "1"}, {"$numberInt": "2"}]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]'
|
||||||
|
|
||||||
|
Example usage (with :const:`LEGACY_JSON_OPTIONS`):
|
||||||
|
|
||||||
|
.. doctest::
|
||||||
|
|
||||||
|
>>> from . import Binary, Code
|
||||||
|
>>> from .json_util import dumps, LEGACY_JSON_OPTIONS
|
||||||
|
>>> dumps([{'foo': [1, 2]},
|
||||||
|
... {'bar': {'hello': 'world'}},
|
||||||
|
... {'code': Code("function x() { return 1; }", {})},
|
||||||
|
... {'bin': Binary(b"\x01\x02\x03\x04")}],
|
||||||
|
... json_options=LEGACY_JSON_OPTIONS)
|
||||||
|
'[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }", "$scope": {}}}, {"bin": {"$binary": "AQIDBA==", "$type": "00"}}]'
|
||||||
|
|
||||||
|
Alternatively, you can manually pass the `default` to :func:`json.dumps`.
|
||||||
|
It won't handle :class:`~bson.binary.Binary` and :class:`~bson.code.Code`
|
||||||
|
instances (as they are extended strings you can't provide custom defaults),
|
||||||
|
but it will be faster as there is less recursion.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
If your application does not need the flexibility offered by
|
||||||
|
:class:`JSONOptions` and spends a large amount of time in the `json_util`
|
||||||
|
module, look to
|
||||||
|
`python-bsonjs <https://pypi.python.org/pypi/python-bsonjs>`_ for a nice
|
||||||
|
performance improvement. `python-bsonjs` is a fast BSON to MongoDB
|
||||||
|
Extended JSON converter for Python built on top of
|
||||||
|
`libbson <https://github.com/mongodb/libbson>`_. `python-bsonjs` works best
|
||||||
|
with PyMongo when using :class:`~bson.raw_bson.RawBSONDocument`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import math
|
||||||
|
import re
|
||||||
|
import uuid
|
||||||
|
from typing import Any, Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast
|
||||||
|
|
||||||
|
from .binary import ALL_UUID_SUBTYPES, UUID_SUBTYPE, Binary, UuidRepresentation
|
||||||
|
from .code import Code
|
||||||
|
from .codec_options import CodecOptions, DatetimeConversion
|
||||||
|
from .datetime_ms import (
|
||||||
|
EPOCH_AWARE,
|
||||||
|
DatetimeMS,
|
||||||
|
_datetime_to_millis,
|
||||||
|
_max_datetime_ms,
|
||||||
|
_millis_to_datetime,
|
||||||
|
)
|
||||||
|
from .dbref import DBRef
|
||||||
|
from .decimal128 import Decimal128
|
||||||
|
from .int64 import Int64
|
||||||
|
from .max_key import MaxKey
|
||||||
|
from .min_key import MinKey
|
||||||
|
from .objectid import ObjectId
|
||||||
|
from .regex import Regex
|
||||||
|
from .son import RE_TYPE, SON
|
||||||
|
from .timestamp import Timestamp
|
||||||
|
from .tz_util import utc
|
||||||
|
|
||||||
|
_RE_OPT_TABLE = {
|
||||||
|
"i": re.I,
|
||||||
|
"l": re.L,
|
||||||
|
"m": re.M,
|
||||||
|
"s": re.S,
|
||||||
|
"u": re.U,
|
||||||
|
"x": re.X,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class DatetimeRepresentation:
|
||||||
|
LEGACY = 0
|
||||||
|
"""Legacy MongoDB Extended JSON datetime representation.
|
||||||
|
|
||||||
|
:class:`datetime.datetime` instances will be encoded to JSON in the
|
||||||
|
format `{"$date": <dateAsMilliseconds>}`, where `dateAsMilliseconds` is
|
||||||
|
a 64-bit signed integer giving the number of milliseconds since the Unix
|
||||||
|
epoch UTC. This was the default encoding before PyMongo version 3.4.
|
||||||
|
|
||||||
|
.. versionadded:: 3.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
NUMBERLONG = 1
|
||||||
|
"""NumberLong datetime representation.
|
||||||
|
|
||||||
|
:class:`datetime.datetime` instances will be encoded to JSON in the
|
||||||
|
format `{"$date": {"$numberLong": "<dateAsMilliseconds>"}}`,
|
||||||
|
where `dateAsMilliseconds` is the string representation of a 64-bit signed
|
||||||
|
integer giving the number of milliseconds since the Unix epoch UTC.
|
||||||
|
|
||||||
|
.. versionadded:: 3.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
ISO8601 = 2
|
||||||
|
"""ISO-8601 datetime representation.
|
||||||
|
|
||||||
|
:class:`datetime.datetime` instances greater than or equal to the Unix
|
||||||
|
epoch UTC will be encoded to JSON in the format `{"$date": "<ISO-8601>"}`.
|
||||||
|
:class:`datetime.datetime` instances before the Unix epoch UTC will be
|
||||||
|
encoded as if the datetime representation is
|
||||||
|
:const:`~DatetimeRepresentation.NUMBERLONG`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class JSONMode:
|
||||||
|
LEGACY = 0
|
||||||
|
"""Legacy Extended JSON representation.
|
||||||
|
|
||||||
|
In this mode, :func:`~bson.json_util.dumps` produces PyMongo's legacy
|
||||||
|
non-standard JSON output. Consider using
|
||||||
|
:const:`~bson.json_util.JSONMode.RELAXED` or
|
||||||
|
:const:`~bson.json_util.JSONMode.CANONICAL` instead.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
RELAXED = 1
|
||||||
|
"""Relaxed Extended JSON representation.
|
||||||
|
|
||||||
|
In this mode, :func:`~bson.json_util.dumps` produces Relaxed Extended JSON,
|
||||||
|
a mostly JSON-like format. Consider using this for things like a web API,
|
||||||
|
where one is sending a document (or a projection of a document) that only
|
||||||
|
uses ordinary JSON type primitives. In particular, the ``int``,
|
||||||
|
:class:`~bson.int64.Int64`, and ``float`` numeric types are represented in
|
||||||
|
the native JSON number format. This output is also the most human readable
|
||||||
|
and is useful for debugging and documentation.
|
||||||
|
|
||||||
|
.. seealso:: The specification for Relaxed `Extended JSON`_.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
CANONICAL = 2
|
||||||
|
"""Canonical Extended JSON representation.
|
||||||
|
|
||||||
|
In this mode, :func:`~bson.json_util.dumps` produces Canonical Extended
|
||||||
|
JSON, a type preserving format. Consider using this for things like
|
||||||
|
testing, where one has to precisely specify expected types in JSON. In
|
||||||
|
particular, the ``int``, :class:`~bson.int64.Int64`, and ``float`` numeric
|
||||||
|
types are encoded with type wrappers.
|
||||||
|
|
||||||
|
.. seealso:: The specification for Canonical `Extended JSON`_.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class JSONOptions(CodecOptions):
|
||||||
|
"""Encapsulates JSON options for :func:`dumps` and :func:`loads`.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `strict_number_long`: If ``True``, :class:`~bson.int64.Int64` objects
|
||||||
|
are encoded to MongoDB Extended JSON's *Strict mode* type
|
||||||
|
`NumberLong`, ie ``'{"$numberLong": "<number>" }'``. Otherwise they
|
||||||
|
will be encoded as an `int`. Defaults to ``False``.
|
||||||
|
- `datetime_representation`: The representation to use when encoding
|
||||||
|
instances of :class:`datetime.datetime`. Defaults to
|
||||||
|
:const:`~DatetimeRepresentation.LEGACY`.
|
||||||
|
- `strict_uuid`: If ``True``, :class:`uuid.UUID` object are encoded to
|
||||||
|
MongoDB Extended JSON's *Strict mode* type `Binary`. Otherwise it
|
||||||
|
will be encoded as ``'{"$uuid": "<hex>" }'``. Defaults to ``False``.
|
||||||
|
- `json_mode`: The :class:`JSONMode` to use when encoding BSON types to
|
||||||
|
Extended JSON. Defaults to :const:`~JSONMode.LEGACY`.
|
||||||
|
- `document_class`: BSON documents returned by :func:`loads` will be
|
||||||
|
decoded to an instance of this class. Must be a subclass of
|
||||||
|
:class:`collections.MutableMapping`. Defaults to :class:`dict`.
|
||||||
|
- `uuid_representation`: The :class:`~bson.binary.UuidRepresentation`
|
||||||
|
to use when encoding and decoding instances of :class:`uuid.UUID`.
|
||||||
|
Defaults to :const:`~bson.binary.UuidRepresentation.UNSPECIFIED`.
|
||||||
|
- `tz_aware`: If ``True``, MongoDB Extended JSON's *Strict mode* type
|
||||||
|
`Date` will be decoded to timezone aware instances of
|
||||||
|
:class:`datetime.datetime`. Otherwise they will be naive. Defaults
|
||||||
|
to ``False``.
|
||||||
|
- `tzinfo`: A :class:`datetime.tzinfo` subclass that specifies the
|
||||||
|
timezone from which :class:`~datetime.datetime` objects should be
|
||||||
|
decoded. Defaults to :const:`~bson.tz_util.utc`.
|
||||||
|
- `datetime_conversion`: Specifies how UTC datetimes should be decoded
|
||||||
|
within BSON. Valid options include 'datetime_ms' to return as a
|
||||||
|
DatetimeMS, 'datetime' to return as a datetime.datetime and
|
||||||
|
raising a ValueError for out-of-range values, 'datetime_auto' to
|
||||||
|
return DatetimeMS objects when the underlying datetime is
|
||||||
|
out-of-range and 'datetime_clamp' to clamp to the minimum and
|
||||||
|
maximum possible datetimes. Defaults to 'datetime'. See
|
||||||
|
:ref:`handling-out-of-range-datetimes` for details.
|
||||||
|
- `args`: arguments to :class:`~bson.codec_options.CodecOptions`
|
||||||
|
- `kwargs`: arguments to :class:`~bson.codec_options.CodecOptions`
|
||||||
|
|
||||||
|
.. seealso:: The specification for Relaxed and Canonical `Extended JSON`_.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
The default for `json_mode` was changed from :const:`JSONMode.LEGACY`
|
||||||
|
to :const:`JSONMode.RELAXED`.
|
||||||
|
The default for `uuid_representation` was changed from
|
||||||
|
:const:`~bson.binary.UuidRepresentation.PYTHON_LEGACY` to
|
||||||
|
:const:`~bson.binary.UuidRepresentation.UNSPECIFIED`.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.5
|
||||||
|
Accepts the optional parameter `json_mode`.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
Changed default value of `tz_aware` to False.
|
||||||
|
"""
|
||||||
|
|
||||||
|
json_mode: int
|
||||||
|
strict_number_long: bool
|
||||||
|
datetime_representation: int
|
||||||
|
strict_uuid: bool
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls: Type["JSONOptions"],
|
||||||
|
strict_number_long: Optional[bool] = None,
|
||||||
|
datetime_representation: Optional[int] = None,
|
||||||
|
strict_uuid: Optional[bool] = None,
|
||||||
|
json_mode: int = JSONMode.RELAXED,
|
||||||
|
*args: Any,
|
||||||
|
**kwargs: Any
|
||||||
|
) -> "JSONOptions":
|
||||||
|
kwargs["tz_aware"] = kwargs.get("tz_aware", False)
|
||||||
|
if kwargs["tz_aware"]:
|
||||||
|
kwargs["tzinfo"] = kwargs.get("tzinfo", utc)
|
||||||
|
if datetime_representation not in (
|
||||||
|
DatetimeRepresentation.LEGACY,
|
||||||
|
DatetimeRepresentation.NUMBERLONG,
|
||||||
|
DatetimeRepresentation.ISO8601,
|
||||||
|
None,
|
||||||
|
):
|
||||||
|
raise ValueError(
|
||||||
|
"JSONOptions.datetime_representation must be one of LEGACY, "
|
||||||
|
"NUMBERLONG, or ISO8601 from DatetimeRepresentation."
|
||||||
|
)
|
||||||
|
self = cast(JSONOptions, super(JSONOptions, cls).__new__(cls, *args, **kwargs))
|
||||||
|
if json_mode not in (JSONMode.LEGACY, JSONMode.RELAXED, JSONMode.CANONICAL):
|
||||||
|
raise ValueError(
|
||||||
|
"JSONOptions.json_mode must be one of LEGACY, RELAXED, "
|
||||||
|
"or CANONICAL from JSONMode."
|
||||||
|
)
|
||||||
|
self.json_mode = json_mode
|
||||||
|
if self.json_mode == JSONMode.RELAXED:
|
||||||
|
if strict_number_long:
|
||||||
|
raise ValueError("Cannot specify strict_number_long=True with JSONMode.RELAXED")
|
||||||
|
if datetime_representation not in (None, DatetimeRepresentation.ISO8601):
|
||||||
|
raise ValueError(
|
||||||
|
"datetime_representation must be DatetimeRepresentation."
|
||||||
|
"ISO8601 or omitted with JSONMode.RELAXED"
|
||||||
|
)
|
||||||
|
if strict_uuid not in (None, True):
|
||||||
|
raise ValueError("Cannot specify strict_uuid=False with JSONMode.RELAXED")
|
||||||
|
self.strict_number_long = False
|
||||||
|
self.datetime_representation = DatetimeRepresentation.ISO8601
|
||||||
|
self.strict_uuid = True
|
||||||
|
elif self.json_mode == JSONMode.CANONICAL:
|
||||||
|
if strict_number_long not in (None, True):
|
||||||
|
raise ValueError("Cannot specify strict_number_long=False with JSONMode.RELAXED")
|
||||||
|
if datetime_representation not in (None, DatetimeRepresentation.NUMBERLONG):
|
||||||
|
raise ValueError(
|
||||||
|
"datetime_representation must be DatetimeRepresentation."
|
||||||
|
"NUMBERLONG or omitted with JSONMode.RELAXED"
|
||||||
|
)
|
||||||
|
if strict_uuid not in (None, True):
|
||||||
|
raise ValueError("Cannot specify strict_uuid=False with JSONMode.RELAXED")
|
||||||
|
self.strict_number_long = True
|
||||||
|
self.datetime_representation = DatetimeRepresentation.NUMBERLONG
|
||||||
|
self.strict_uuid = True
|
||||||
|
else: # JSONMode.LEGACY
|
||||||
|
self.strict_number_long = False
|
||||||
|
self.datetime_representation = DatetimeRepresentation.LEGACY
|
||||||
|
self.strict_uuid = False
|
||||||
|
if strict_number_long is not None:
|
||||||
|
self.strict_number_long = strict_number_long
|
||||||
|
if datetime_representation is not None:
|
||||||
|
self.datetime_representation = datetime_representation
|
||||||
|
if strict_uuid is not None:
|
||||||
|
self.strict_uuid = strict_uuid
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _arguments_repr(self) -> str:
|
||||||
|
return (
|
||||||
|
"strict_number_long=%r, "
|
||||||
|
"datetime_representation=%r, "
|
||||||
|
"strict_uuid=%r, json_mode=%r, %s"
|
||||||
|
% (
|
||||||
|
self.strict_number_long,
|
||||||
|
self.datetime_representation,
|
||||||
|
self.strict_uuid,
|
||||||
|
self.json_mode,
|
||||||
|
super(JSONOptions, self)._arguments_repr(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _options_dict(self) -> Dict[Any, Any]:
|
||||||
|
# TODO: PYTHON-2442 use _asdict() instead
|
||||||
|
options_dict = super(JSONOptions, self)._options_dict()
|
||||||
|
options_dict.update(
|
||||||
|
{
|
||||||
|
"strict_number_long": self.strict_number_long,
|
||||||
|
"datetime_representation": self.datetime_representation,
|
||||||
|
"strict_uuid": self.strict_uuid,
|
||||||
|
"json_mode": self.json_mode,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return options_dict
|
||||||
|
|
||||||
|
def with_options(self, **kwargs: Any) -> "JSONOptions":
|
||||||
|
"""
|
||||||
|
Make a copy of this JSONOptions, overriding some options::
|
||||||
|
|
||||||
|
>>> from .json_util import CANONICAL_JSON_OPTIONS
|
||||||
|
>>> CANONICAL_JSON_OPTIONS.tz_aware
|
||||||
|
True
|
||||||
|
>>> json_options = CANONICAL_JSON_OPTIONS.with_options(tz_aware=False, tzinfo=None)
|
||||||
|
>>> json_options.tz_aware
|
||||||
|
False
|
||||||
|
|
||||||
|
.. versionadded:: 3.12
|
||||||
|
"""
|
||||||
|
opts = self._options_dict()
|
||||||
|
for opt in ("strict_number_long", "datetime_representation", "strict_uuid", "json_mode"):
|
||||||
|
opts[opt] = kwargs.get(opt, getattr(self, opt))
|
||||||
|
opts.update(kwargs)
|
||||||
|
return JSONOptions(**opts)
|
||||||
|
|
||||||
|
|
||||||
|
LEGACY_JSON_OPTIONS: JSONOptions = JSONOptions(json_mode=JSONMode.LEGACY)
|
||||||
|
""":class:`JSONOptions` for encoding to PyMongo's legacy JSON format.
|
||||||
|
|
||||||
|
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.LEGACY`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
CANONICAL_JSON_OPTIONS: JSONOptions = JSONOptions(json_mode=JSONMode.CANONICAL)
|
||||||
|
""":class:`JSONOptions` for Canonical Extended JSON.
|
||||||
|
|
||||||
|
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.CANONICAL`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
RELAXED_JSON_OPTIONS: JSONOptions = JSONOptions(json_mode=JSONMode.RELAXED)
|
||||||
|
""":class:`JSONOptions` for Relaxed Extended JSON.
|
||||||
|
|
||||||
|
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.RELAXED`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
DEFAULT_JSON_OPTIONS: JSONOptions = RELAXED_JSON_OPTIONS
|
||||||
|
"""The default :class:`JSONOptions` for JSON encoding/decoding.
|
||||||
|
|
||||||
|
The same as :const:`RELAXED_JSON_OPTIONS`.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
Changed from :const:`LEGACY_JSON_OPTIONS` to
|
||||||
|
:const:`RELAXED_JSON_OPTIONS`.
|
||||||
|
|
||||||
|
.. versionadded:: 3.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def dumps(obj: Any, *args: Any, **kwargs: Any) -> str:
|
||||||
|
"""Helper function that wraps :func:`json.dumps`.
|
||||||
|
|
||||||
|
Recursive function that handles all BSON types including
|
||||||
|
:class:`~bson.binary.Binary` and :class:`~bson.code.Code`.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `json_options`: A :class:`JSONOptions` instance used to modify the
|
||||||
|
encoding of MongoDB Extended JSON types. Defaults to
|
||||||
|
:const:`DEFAULT_JSON_OPTIONS`.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
Now outputs MongoDB Relaxed Extended JSON by default (using
|
||||||
|
:const:`DEFAULT_JSON_OPTIONS`).
|
||||||
|
|
||||||
|
.. versionchanged:: 3.4
|
||||||
|
Accepts optional parameter `json_options`. See :class:`JSONOptions`.
|
||||||
|
"""
|
||||||
|
json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS)
|
||||||
|
return json.dumps(_json_convert(obj, json_options), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def loads(s: str, *args: Any, **kwargs: Any) -> Any:
|
||||||
|
"""Helper function that wraps :func:`json.loads`.
|
||||||
|
|
||||||
|
Automatically passes the object_hook for BSON type conversion.
|
||||||
|
|
||||||
|
Raises ``TypeError``, ``ValueError``, ``KeyError``, or
|
||||||
|
:exc:`~bson.errors.InvalidId` on invalid MongoDB Extended JSON.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `json_options`: A :class:`JSONOptions` instance used to modify the
|
||||||
|
decoding of MongoDB Extended JSON types. Defaults to
|
||||||
|
:const:`DEFAULT_JSON_OPTIONS`.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.0
|
||||||
|
Now loads :class:`datetime.datetime` instances as naive by default. To
|
||||||
|
load timezone aware instances utilize the `json_options` parameter.
|
||||||
|
See :ref:`tz_aware_default_change` for an example.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.5
|
||||||
|
Parses Relaxed and Canonical Extended JSON as well as PyMongo's legacy
|
||||||
|
format. Now raises ``TypeError`` or ``ValueError`` when parsing JSON
|
||||||
|
type wrappers with values of the wrong type or any extra keys.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.4
|
||||||
|
Accepts optional parameter `json_options`. See :class:`JSONOptions`.
|
||||||
|
"""
|
||||||
|
json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS)
|
||||||
|
kwargs["object_pairs_hook"] = lambda pairs: object_pairs_hook(pairs, json_options)
|
||||||
|
return json.loads(s, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def _json_convert(obj: Any, json_options: JSONOptions = DEFAULT_JSON_OPTIONS) -> Any:
|
||||||
|
"""Recursive helper method that converts BSON types so they can be
|
||||||
|
converted into json.
|
||||||
|
"""
|
||||||
|
if hasattr(obj, "items"):
|
||||||
|
return SON(((k, _json_convert(v, json_options)) for k, v in obj.items()))
|
||||||
|
elif hasattr(obj, "__iter__") and not isinstance(obj, (str, bytes)):
|
||||||
|
return list((_json_convert(v, json_options) for v in obj))
|
||||||
|
try:
|
||||||
|
return default(obj, json_options)
|
||||||
|
except TypeError:
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
def object_pairs_hook(
|
||||||
|
pairs: Sequence[Tuple[str, Any]], json_options: JSONOptions = DEFAULT_JSON_OPTIONS
|
||||||
|
) -> Any:
|
||||||
|
return object_hook(json_options.document_class(pairs), json_options)
|
||||||
|
|
||||||
|
|
||||||
|
def object_hook(dct: Mapping[str, Any], json_options: JSONOptions = DEFAULT_JSON_OPTIONS) -> Any:
|
||||||
|
if "$oid" in dct:
|
||||||
|
return _parse_canonical_oid(dct)
|
||||||
|
if (
|
||||||
|
isinstance(dct.get("$ref"), str)
|
||||||
|
and "$id" in dct
|
||||||
|
and isinstance(dct.get("$db"), (str, type(None)))
|
||||||
|
):
|
||||||
|
return _parse_canonical_dbref(dct)
|
||||||
|
if "$date" in dct:
|
||||||
|
return _parse_canonical_datetime(dct, json_options)
|
||||||
|
if "$regex" in dct:
|
||||||
|
return _parse_legacy_regex(dct)
|
||||||
|
if "$minKey" in dct:
|
||||||
|
return _parse_canonical_minkey(dct)
|
||||||
|
if "$maxKey" in dct:
|
||||||
|
return _parse_canonical_maxkey(dct)
|
||||||
|
if "$binary" in dct:
|
||||||
|
if "$type" in dct:
|
||||||
|
return _parse_legacy_binary(dct, json_options)
|
||||||
|
else:
|
||||||
|
return _parse_canonical_binary(dct, json_options)
|
||||||
|
if "$code" in dct:
|
||||||
|
return _parse_canonical_code(dct)
|
||||||
|
if "$uuid" in dct:
|
||||||
|
return _parse_legacy_uuid(dct, json_options)
|
||||||
|
if "$undefined" in dct:
|
||||||
|
return None
|
||||||
|
if "$numberLong" in dct:
|
||||||
|
return _parse_canonical_int64(dct)
|
||||||
|
if "$timestamp" in dct:
|
||||||
|
tsp = dct["$timestamp"]
|
||||||
|
return Timestamp(tsp["t"], tsp["i"])
|
||||||
|
if "$numberDecimal" in dct:
|
||||||
|
return _parse_canonical_decimal128(dct)
|
||||||
|
if "$dbPointer" in dct:
|
||||||
|
return _parse_canonical_dbpointer(dct)
|
||||||
|
if "$regularExpression" in dct:
|
||||||
|
return _parse_canonical_regex(dct)
|
||||||
|
if "$symbol" in dct:
|
||||||
|
return _parse_canonical_symbol(dct)
|
||||||
|
if "$numberInt" in dct:
|
||||||
|
return _parse_canonical_int32(dct)
|
||||||
|
if "$numberDouble" in dct:
|
||||||
|
return _parse_canonical_double(dct)
|
||||||
|
return dct
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_legacy_regex(doc: Any) -> Any:
|
||||||
|
pattern = doc["$regex"]
|
||||||
|
# Check if this is the $regex query operator.
|
||||||
|
if not isinstance(pattern, (str, bytes)):
|
||||||
|
return doc
|
||||||
|
flags = 0
|
||||||
|
# PyMongo always adds $options but some other tools may not.
|
||||||
|
for opt in doc.get("$options", ""):
|
||||||
|
flags |= _RE_OPT_TABLE.get(opt, 0)
|
||||||
|
return Regex(pattern, flags)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_legacy_uuid(doc: Any, json_options: JSONOptions) -> Union[Binary, uuid.UUID]:
|
||||||
|
"""Decode a JSON legacy $uuid to Python UUID."""
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $uuid, extra field(s): %s" % (doc,))
|
||||||
|
if not isinstance(doc["$uuid"], str):
|
||||||
|
raise TypeError("$uuid must be a string: %s" % (doc,))
|
||||||
|
if json_options.uuid_representation == UuidRepresentation.UNSPECIFIED:
|
||||||
|
return Binary.from_uuid(uuid.UUID(doc["$uuid"]))
|
||||||
|
else:
|
||||||
|
return uuid.UUID(doc["$uuid"])
|
||||||
|
|
||||||
|
|
||||||
|
def _binary_or_uuid(data: Any, subtype: int, json_options: JSONOptions) -> Union[Binary, uuid.UUID]:
|
||||||
|
# special handling for UUID
|
||||||
|
if subtype in ALL_UUID_SUBTYPES:
|
||||||
|
uuid_representation = json_options.uuid_representation
|
||||||
|
binary_value = Binary(data, subtype)
|
||||||
|
if uuid_representation == UuidRepresentation.UNSPECIFIED:
|
||||||
|
return binary_value
|
||||||
|
if subtype == UUID_SUBTYPE:
|
||||||
|
# Legacy behavior: use STANDARD with binary subtype 4.
|
||||||
|
uuid_representation = UuidRepresentation.STANDARD
|
||||||
|
elif uuid_representation == UuidRepresentation.STANDARD:
|
||||||
|
# subtype == OLD_UUID_SUBTYPE
|
||||||
|
# Legacy behavior: STANDARD is the same as PYTHON_LEGACY.
|
||||||
|
uuid_representation = UuidRepresentation.PYTHON_LEGACY
|
||||||
|
return binary_value.as_uuid(uuid_representation)
|
||||||
|
|
||||||
|
if subtype == 0:
|
||||||
|
return cast(uuid.UUID, data)
|
||||||
|
return Binary(data, subtype)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_legacy_binary(doc: Any, json_options: JSONOptions) -> Union[Binary, uuid.UUID]:
|
||||||
|
if isinstance(doc["$type"], int):
|
||||||
|
doc["$type"] = "%02x" % doc["$type"]
|
||||||
|
subtype = int(doc["$type"], 16)
|
||||||
|
if subtype >= 0xFFFFFF80: # Handle mongoexport values
|
||||||
|
subtype = int(doc["$type"][6:], 16)
|
||||||
|
data = base64.b64decode(doc["$binary"].encode())
|
||||||
|
return _binary_or_uuid(data, subtype, json_options)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_binary(doc: Any, json_options: JSONOptions) -> Union[Binary, uuid.UUID]:
|
||||||
|
binary = doc["$binary"]
|
||||||
|
b64 = binary["base64"]
|
||||||
|
subtype = binary["subType"]
|
||||||
|
if not isinstance(b64, str):
|
||||||
|
raise TypeError("$binary base64 must be a string: %s" % (doc,))
|
||||||
|
if not isinstance(subtype, str) or len(subtype) > 2:
|
||||||
|
raise TypeError("$binary subType must be a string at most 2 characters: %s" % (doc,))
|
||||||
|
if len(binary) != 2:
|
||||||
|
raise TypeError('$binary must include only "base64" and "subType" components: %s' % (doc,))
|
||||||
|
|
||||||
|
data = base64.b64decode(b64.encode())
|
||||||
|
return _binary_or_uuid(data, int(subtype, 16), json_options)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_datetime(
|
||||||
|
doc: Any, json_options: JSONOptions
|
||||||
|
) -> Union[datetime.datetime, DatetimeMS]:
|
||||||
|
"""Decode a JSON datetime to python datetime.datetime."""
|
||||||
|
dtm = doc["$date"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $date, extra field(s): %s" % (doc,))
|
||||||
|
# mongoexport 2.6 and newer
|
||||||
|
if isinstance(dtm, str):
|
||||||
|
# Parse offset
|
||||||
|
if dtm[-1] == "Z":
|
||||||
|
dt = dtm[:-1]
|
||||||
|
offset = "Z"
|
||||||
|
elif dtm[-6] in ("+", "-") and dtm[-3] == ":":
|
||||||
|
# (+|-)HH:MM
|
||||||
|
dt = dtm[:-6]
|
||||||
|
offset = dtm[-6:]
|
||||||
|
elif dtm[-5] in ("+", "-"):
|
||||||
|
# (+|-)HHMM
|
||||||
|
dt = dtm[:-5]
|
||||||
|
offset = dtm[-5:]
|
||||||
|
elif dtm[-3] in ("+", "-"):
|
||||||
|
# (+|-)HH
|
||||||
|
dt = dtm[:-3]
|
||||||
|
offset = dtm[-3:]
|
||||||
|
else:
|
||||||
|
dt = dtm
|
||||||
|
offset = ""
|
||||||
|
|
||||||
|
# Parse the optional factional seconds portion.
|
||||||
|
dot_index = dt.rfind(".")
|
||||||
|
microsecond = 0
|
||||||
|
if dot_index != -1:
|
||||||
|
microsecond = int(float(dt[dot_index:]) * 1000000)
|
||||||
|
dt = dt[:dot_index]
|
||||||
|
|
||||||
|
aware = datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S").replace(
|
||||||
|
microsecond=microsecond, tzinfo=utc
|
||||||
|
)
|
||||||
|
|
||||||
|
if offset and offset != "Z":
|
||||||
|
if len(offset) == 6:
|
||||||
|
hours, minutes = offset[1:].split(":")
|
||||||
|
secs = int(hours) * 3600 + int(minutes) * 60
|
||||||
|
elif len(offset) == 5:
|
||||||
|
secs = int(offset[1:3]) * 3600 + int(offset[3:]) * 60
|
||||||
|
elif len(offset) == 3:
|
||||||
|
secs = int(offset[1:3]) * 3600
|
||||||
|
if offset[0] == "-":
|
||||||
|
secs *= -1
|
||||||
|
aware = aware - datetime.timedelta(seconds=secs)
|
||||||
|
|
||||||
|
if json_options.tz_aware:
|
||||||
|
if json_options.tzinfo:
|
||||||
|
aware = aware.astimezone(json_options.tzinfo)
|
||||||
|
if json_options.datetime_conversion == DatetimeConversion.DATETIME_MS:
|
||||||
|
return DatetimeMS(aware)
|
||||||
|
return aware
|
||||||
|
else:
|
||||||
|
aware_tzinfo_none = aware.replace(tzinfo=None)
|
||||||
|
if json_options.datetime_conversion == DatetimeConversion.DATETIME_MS:
|
||||||
|
return DatetimeMS(aware_tzinfo_none)
|
||||||
|
return aware_tzinfo_none
|
||||||
|
return _millis_to_datetime(int(dtm), json_options)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_oid(doc: Any) -> ObjectId:
|
||||||
|
"""Decode a JSON ObjectId to bson.objectid.ObjectId."""
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $oid, extra field(s): %s" % (doc,))
|
||||||
|
return ObjectId(doc["$oid"])
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_symbol(doc: Any) -> str:
|
||||||
|
"""Decode a JSON symbol to Python string."""
|
||||||
|
symbol = doc["$symbol"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $symbol, extra field(s): %s" % (doc,))
|
||||||
|
return str(symbol)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_code(doc: Any) -> Code:
|
||||||
|
"""Decode a JSON code to bson.code.Code."""
|
||||||
|
for key in doc:
|
||||||
|
if key not in ("$code", "$scope"):
|
||||||
|
raise TypeError("Bad $code, extra field(s): %s" % (doc,))
|
||||||
|
return Code(doc["$code"], scope=doc.get("$scope"))
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_regex(doc: Any) -> Regex:
|
||||||
|
"""Decode a JSON regex to bson.regex.Regex."""
|
||||||
|
regex = doc["$regularExpression"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $regularExpression, extra field(s): %s" % (doc,))
|
||||||
|
if len(regex) != 2:
|
||||||
|
raise TypeError(
|
||||||
|
'Bad $regularExpression must include only "pattern"'
|
||||||
|
'and "options" components: %s' % (doc,)
|
||||||
|
)
|
||||||
|
opts = regex["options"]
|
||||||
|
if not isinstance(opts, str):
|
||||||
|
raise TypeError(
|
||||||
|
"Bad $regularExpression options, options must be string, was type %s" % (type(opts))
|
||||||
|
)
|
||||||
|
return Regex(regex["pattern"], opts)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_dbref(doc: Any) -> DBRef:
|
||||||
|
"""Decode a JSON DBRef to bson.dbref.DBRef."""
|
||||||
|
return DBRef(doc.pop("$ref"), doc.pop("$id"), database=doc.pop("$db", None), **doc)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_dbpointer(doc: Any) -> Any:
|
||||||
|
"""Decode a JSON (deprecated) DBPointer to bson.dbref.DBRef."""
|
||||||
|
dbref = doc["$dbPointer"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $dbPointer, extra field(s): %s" % (doc,))
|
||||||
|
if isinstance(dbref, DBRef):
|
||||||
|
dbref_doc = dbref.as_doc()
|
||||||
|
# DBPointer must not contain $db in its value.
|
||||||
|
if dbref.database is not None:
|
||||||
|
raise TypeError("Bad $dbPointer, extra field $db: %s" % (dbref_doc,))
|
||||||
|
if not isinstance(dbref.id, ObjectId):
|
||||||
|
raise TypeError("Bad $dbPointer, $id must be an ObjectId: %s" % (dbref_doc,))
|
||||||
|
if len(dbref_doc) != 2:
|
||||||
|
raise TypeError("Bad $dbPointer, extra field(s) in DBRef: %s" % (dbref_doc,))
|
||||||
|
return dbref
|
||||||
|
else:
|
||||||
|
raise TypeError("Bad $dbPointer, expected a DBRef: %s" % (doc,))
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_int32(doc: Any) -> int:
|
||||||
|
"""Decode a JSON int32 to python int."""
|
||||||
|
i_str = doc["$numberInt"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $numberInt, extra field(s): %s" % (doc,))
|
||||||
|
if not isinstance(i_str, str):
|
||||||
|
raise TypeError("$numberInt must be string: %s" % (doc,))
|
||||||
|
return int(i_str)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_int64(doc: Any) -> Int64:
|
||||||
|
"""Decode a JSON int64 to bson.int64.Int64."""
|
||||||
|
l_str = doc["$numberLong"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $numberLong, extra field(s): %s" % (doc,))
|
||||||
|
return Int64(l_str)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_double(doc: Any) -> float:
|
||||||
|
"""Decode a JSON double to python float."""
|
||||||
|
d_str = doc["$numberDouble"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $numberDouble, extra field(s): %s" % (doc,))
|
||||||
|
if not isinstance(d_str, str):
|
||||||
|
raise TypeError("$numberDouble must be string: %s" % (doc,))
|
||||||
|
return float(d_str)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_decimal128(doc: Any) -> Decimal128:
|
||||||
|
"""Decode a JSON decimal128 to bson.decimal128.Decimal128."""
|
||||||
|
d_str = doc["$numberDecimal"]
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $numberDecimal, extra field(s): %s" % (doc,))
|
||||||
|
if not isinstance(d_str, str):
|
||||||
|
raise TypeError("$numberDecimal must be string: %s" % (doc,))
|
||||||
|
return Decimal128(d_str)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_minkey(doc: Any) -> MinKey:
|
||||||
|
"""Decode a JSON MinKey to bson.min_key.MinKey."""
|
||||||
|
if type(doc["$minKey"]) is not int or doc["$minKey"] != 1:
|
||||||
|
raise TypeError("$minKey value must be 1: %s" % (doc,))
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $minKey, extra field(s): %s" % (doc,))
|
||||||
|
return MinKey()
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_canonical_maxkey(doc: Any) -> MaxKey:
|
||||||
|
"""Decode a JSON MaxKey to bson.max_key.MaxKey."""
|
||||||
|
if type(doc["$maxKey"]) is not int or doc["$maxKey"] != 1:
|
||||||
|
raise TypeError("$maxKey value must be 1: %s", (doc,))
|
||||||
|
if len(doc) != 1:
|
||||||
|
raise TypeError("Bad $minKey, extra field(s): %s" % (doc,))
|
||||||
|
return MaxKey()
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_binary(data: bytes, subtype: int, json_options: JSONOptions) -> Any:
|
||||||
|
if json_options.json_mode == JSONMode.LEGACY:
|
||||||
|
return SON([("$binary", base64.b64encode(data).decode()), ("$type", "%02x" % subtype)])
|
||||||
|
return {
|
||||||
|
"$binary": SON([("base64", base64.b64encode(data).decode()), ("subType", "%02x" % subtype)])
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def default(obj: Any, json_options: JSONOptions = DEFAULT_JSON_OPTIONS) -> Any:
|
||||||
|
# We preserve key order when rendering SON, DBRef, etc. as JSON by
|
||||||
|
# returning a SON for those types instead of a dict.
|
||||||
|
if isinstance(obj, ObjectId):
|
||||||
|
return {"$oid": str(obj)}
|
||||||
|
if isinstance(obj, DBRef):
|
||||||
|
return _json_convert(obj.as_doc(), json_options=json_options)
|
||||||
|
if isinstance(obj, datetime.datetime):
|
||||||
|
if json_options.datetime_representation == DatetimeRepresentation.ISO8601:
|
||||||
|
if not obj.tzinfo:
|
||||||
|
obj = obj.replace(tzinfo=utc)
|
||||||
|
assert obj.tzinfo is not None
|
||||||
|
if obj >= EPOCH_AWARE:
|
||||||
|
off = obj.tzinfo.utcoffset(obj)
|
||||||
|
if (off.days, off.seconds, off.microseconds) == (0, 0, 0): # type: ignore
|
||||||
|
tz_string = "Z"
|
||||||
|
else:
|
||||||
|
tz_string = obj.strftime("%z")
|
||||||
|
millis = int(obj.microsecond / 1000)
|
||||||
|
fracsecs = ".%03d" % (millis,) if millis else ""
|
||||||
|
return {
|
||||||
|
"$date": "%s%s%s" % (obj.strftime("%Y-%m-%dT%H:%M:%S"), fracsecs, tz_string)
|
||||||
|
}
|
||||||
|
|
||||||
|
millis = _datetime_to_millis(obj)
|
||||||
|
if json_options.datetime_representation == DatetimeRepresentation.LEGACY:
|
||||||
|
return {"$date": millis}
|
||||||
|
return {"$date": {"$numberLong": str(millis)}}
|
||||||
|
if isinstance(obj, DatetimeMS):
|
||||||
|
if (
|
||||||
|
json_options.datetime_representation == DatetimeRepresentation.ISO8601
|
||||||
|
and 0 <= int(obj) <= _max_datetime_ms()
|
||||||
|
):
|
||||||
|
return default(obj.as_datetime(), json_options)
|
||||||
|
elif json_options.datetime_representation == DatetimeRepresentation.LEGACY:
|
||||||
|
return {"$date": str(int(obj))}
|
||||||
|
return {"$date": {"$numberLong": str(int(obj))}}
|
||||||
|
if json_options.strict_number_long and isinstance(obj, Int64):
|
||||||
|
return {"$numberLong": str(obj)}
|
||||||
|
if isinstance(obj, (RE_TYPE, Regex)):
|
||||||
|
flags = ""
|
||||||
|
if obj.flags & re.IGNORECASE:
|
||||||
|
flags += "i"
|
||||||
|
if obj.flags & re.LOCALE:
|
||||||
|
flags += "l"
|
||||||
|
if obj.flags & re.MULTILINE:
|
||||||
|
flags += "m"
|
||||||
|
if obj.flags & re.DOTALL:
|
||||||
|
flags += "s"
|
||||||
|
if obj.flags & re.UNICODE:
|
||||||
|
flags += "u"
|
||||||
|
if obj.flags & re.VERBOSE:
|
||||||
|
flags += "x"
|
||||||
|
if isinstance(obj.pattern, str):
|
||||||
|
pattern = obj.pattern
|
||||||
|
else:
|
||||||
|
pattern = obj.pattern.decode("utf-8")
|
||||||
|
if json_options.json_mode == JSONMode.LEGACY:
|
||||||
|
return SON([("$regex", pattern), ("$options", flags)])
|
||||||
|
return {"$regularExpression": SON([("pattern", pattern), ("options", flags)])}
|
||||||
|
if isinstance(obj, MinKey):
|
||||||
|
return {"$minKey": 1}
|
||||||
|
if isinstance(obj, MaxKey):
|
||||||
|
return {"$maxKey": 1}
|
||||||
|
if isinstance(obj, Timestamp):
|
||||||
|
return {"$timestamp": SON([("t", obj.time), ("i", obj.inc)])}
|
||||||
|
if isinstance(obj, Code):
|
||||||
|
if obj.scope is None:
|
||||||
|
return {"$code": str(obj)}
|
||||||
|
return SON([("$code", str(obj)), ("$scope", _json_convert(obj.scope, json_options))])
|
||||||
|
if isinstance(obj, Binary):
|
||||||
|
return _encode_binary(obj, obj.subtype, json_options)
|
||||||
|
if isinstance(obj, bytes):
|
||||||
|
return _encode_binary(obj, 0, json_options)
|
||||||
|
if isinstance(obj, uuid.UUID):
|
||||||
|
if json_options.strict_uuid:
|
||||||
|
binval = Binary.from_uuid(obj, uuid_representation=json_options.uuid_representation)
|
||||||
|
return _encode_binary(binval, binval.subtype, json_options)
|
||||||
|
else:
|
||||||
|
return {"$uuid": obj.hex}
|
||||||
|
if isinstance(obj, Decimal128):
|
||||||
|
return {"$numberDecimal": str(obj)}
|
||||||
|
if isinstance(obj, bool):
|
||||||
|
return obj
|
||||||
|
if json_options.json_mode == JSONMode.CANONICAL and isinstance(obj, int):
|
||||||
|
if -(2**31) <= obj < 2**31:
|
||||||
|
return {"$numberInt": str(obj)}
|
||||||
|
return {"$numberLong": str(obj)}
|
||||||
|
if json_options.json_mode != JSONMode.LEGACY and isinstance(obj, float):
|
||||||
|
if math.isnan(obj):
|
||||||
|
return {"$numberDouble": "NaN"}
|
||||||
|
elif math.isinf(obj):
|
||||||
|
representation = "Infinity" if obj > 0 else "-Infinity"
|
||||||
|
return {"$numberDouble": representation}
|
||||||
|
elif json_options.json_mode == JSONMode.CANONICAL:
|
||||||
|
# repr() will return the shortest string guaranteed to produce the
|
||||||
|
# original value, when float() is called on it.
|
||||||
|
return {"$numberDouble": str(repr(obj))}
|
||||||
|
raise TypeError("%r is not JSON serializable" % obj)
|
55
src/xtquant/xtbson/bson37/max_key.py
Normal file
55
src/xtquant/xtbson/bson37/max_key.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
# Copyright 2010-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Representation for the MongoDB internal MaxKey type.
|
||||||
|
"""
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
class MaxKey(object):
|
||||||
|
"""MongoDB internal MaxKey type."""
|
||||||
|
|
||||||
|
__slots__ = ()
|
||||||
|
|
||||||
|
_type_marker = 127
|
||||||
|
|
||||||
|
def __getstate__(self) -> Any:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def __setstate__(self, state: Any) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
return isinstance(other, MaxKey)
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return hash(self._type_marker)
|
||||||
|
|
||||||
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __le__(self, other: Any) -> bool:
|
||||||
|
return isinstance(other, MaxKey)
|
||||||
|
|
||||||
|
def __lt__(self, dummy: Any) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __ge__(self, dummy: Any) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __gt__(self, other: Any) -> bool:
|
||||||
|
return not isinstance(other, MaxKey)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "MaxKey()"
|
55
src/xtquant/xtbson/bson37/min_key.py
Normal file
55
src/xtquant/xtbson/bson37/min_key.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
# Copyright 2010-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Representation for the MongoDB internal MinKey type.
|
||||||
|
"""
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
class MinKey(object):
|
||||||
|
"""MongoDB internal MinKey type."""
|
||||||
|
|
||||||
|
__slots__ = ()
|
||||||
|
|
||||||
|
_type_marker = 255
|
||||||
|
|
||||||
|
def __getstate__(self) -> Any:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def __setstate__(self, state: Any) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
return isinstance(other, MinKey)
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return hash(self._type_marker)
|
||||||
|
|
||||||
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __le__(self, dummy: Any) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __lt__(self, other: Any) -> bool:
|
||||||
|
return not isinstance(other, MinKey)
|
||||||
|
|
||||||
|
def __ge__(self, other: Any) -> bool:
|
||||||
|
return isinstance(other, MinKey)
|
||||||
|
|
||||||
|
def __gt__(self, dummy: Any) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "MinKey()"
|
286
src/xtquant/xtbson/bson37/objectid.py
Normal file
286
src/xtquant/xtbson/bson37/objectid.py
Normal file
@ -0,0 +1,286 @@
|
|||||||
|
# Copyright 2009-2015 MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for working with MongoDB ObjectIds.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import binascii
|
||||||
|
import calendar
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import struct
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from random import SystemRandom
|
||||||
|
from typing import Any, NoReturn, Optional, Type, Union
|
||||||
|
|
||||||
|
from .errors import InvalidId
|
||||||
|
from .tz_util import utc
|
||||||
|
|
||||||
|
_MAX_COUNTER_VALUE = 0xFFFFFF
|
||||||
|
|
||||||
|
|
||||||
|
def _raise_invalid_id(oid: str) -> NoReturn:
|
||||||
|
raise InvalidId(
|
||||||
|
"%r is not a valid ObjectId, it must be a 12-byte input"
|
||||||
|
" or a 24-character hex string" % oid
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _random_bytes() -> bytes:
|
||||||
|
"""Get the 5-byte random field of an ObjectId."""
|
||||||
|
return os.urandom(5)
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectId(object):
|
||||||
|
"""A MongoDB ObjectId."""
|
||||||
|
|
||||||
|
_pid = os.getpid()
|
||||||
|
|
||||||
|
_inc = SystemRandom().randint(0, _MAX_COUNTER_VALUE)
|
||||||
|
_inc_lock = threading.Lock()
|
||||||
|
|
||||||
|
__random = _random_bytes()
|
||||||
|
|
||||||
|
__slots__ = ("__id",)
|
||||||
|
|
||||||
|
_type_marker = 7
|
||||||
|
|
||||||
|
def __init__(self, oid: Optional[Union[str, "ObjectId", bytes]] = None) -> None:
|
||||||
|
"""Initialize a new ObjectId.
|
||||||
|
|
||||||
|
An ObjectId is a 12-byte unique identifier consisting of:
|
||||||
|
|
||||||
|
- a 4-byte value representing the seconds since the Unix epoch,
|
||||||
|
- a 5-byte random value,
|
||||||
|
- a 3-byte counter, starting with a random value.
|
||||||
|
|
||||||
|
By default, ``ObjectId()`` creates a new unique identifier. The
|
||||||
|
optional parameter `oid` can be an :class:`ObjectId`, or any 12
|
||||||
|
:class:`bytes`.
|
||||||
|
|
||||||
|
For example, the 12 bytes b'foo-bar-quux' do not follow the ObjectId
|
||||||
|
specification but they are acceptable input::
|
||||||
|
|
||||||
|
>>> ObjectId(b'foo-bar-quux')
|
||||||
|
ObjectId('666f6f2d6261722d71757578')
|
||||||
|
|
||||||
|
`oid` can also be a :class:`str` of 24 hex digits::
|
||||||
|
|
||||||
|
>>> ObjectId('0123456789ab0123456789ab')
|
||||||
|
ObjectId('0123456789ab0123456789ab')
|
||||||
|
|
||||||
|
Raises :class:`~bson.errors.InvalidId` if `oid` is not 12 bytes nor
|
||||||
|
24 hex digits, or :class:`TypeError` if `oid` is not an accepted type.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `oid` (optional): a valid ObjectId.
|
||||||
|
|
||||||
|
.. seealso:: The MongoDB documentation on `ObjectIds <http://dochub.mongodb.org/core/objectids>`_.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.8
|
||||||
|
:class:`~bson.objectid.ObjectId` now implements the `ObjectID
|
||||||
|
specification version 0.2
|
||||||
|
<https://github.com/mongodb/specifications/blob/master/source/
|
||||||
|
objectid.rst>`_.
|
||||||
|
"""
|
||||||
|
if oid is None:
|
||||||
|
self.__generate()
|
||||||
|
elif isinstance(oid, bytes) and len(oid) == 12:
|
||||||
|
self.__id = oid
|
||||||
|
else:
|
||||||
|
self.__validate(oid)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_datetime(cls: Type["ObjectId"], generation_time: datetime.datetime) -> "ObjectId":
|
||||||
|
"""Create a dummy ObjectId instance with a specific generation time.
|
||||||
|
|
||||||
|
This method is useful for doing range queries on a field
|
||||||
|
containing :class:`ObjectId` instances.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
It is not safe to insert a document containing an ObjectId
|
||||||
|
generated using this method. This method deliberately
|
||||||
|
eliminates the uniqueness guarantee that ObjectIds
|
||||||
|
generally provide. ObjectIds generated with this method
|
||||||
|
should be used exclusively in queries.
|
||||||
|
|
||||||
|
`generation_time` will be converted to UTC. Naive datetime
|
||||||
|
instances will be treated as though they already contain UTC.
|
||||||
|
|
||||||
|
An example using this helper to get documents where ``"_id"``
|
||||||
|
was generated before January 1, 2010 would be:
|
||||||
|
|
||||||
|
>>> gen_time = datetime.datetime(2010, 1, 1)
|
||||||
|
>>> dummy_id = ObjectId.from_datetime(gen_time)
|
||||||
|
>>> result = collection.find({"_id": {"$lt": dummy_id}})
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `generation_time`: :class:`~datetime.datetime` to be used
|
||||||
|
as the generation time for the resulting ObjectId.
|
||||||
|
"""
|
||||||
|
offset = generation_time.utcoffset()
|
||||||
|
if offset is not None:
|
||||||
|
generation_time = generation_time - offset
|
||||||
|
timestamp = calendar.timegm(generation_time.timetuple())
|
||||||
|
oid = struct.pack(">I", int(timestamp)) + b"\x00\x00\x00\x00\x00\x00\x00\x00"
|
||||||
|
return cls(oid)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def is_valid(cls: Type["ObjectId"], oid: Any) -> bool:
|
||||||
|
"""Checks if a `oid` string is valid or not.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `oid`: the object id to validate
|
||||||
|
|
||||||
|
.. versionadded:: 2.3
|
||||||
|
"""
|
||||||
|
if not oid:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
ObjectId(oid)
|
||||||
|
return True
|
||||||
|
except (InvalidId, TypeError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _random(cls) -> bytes:
|
||||||
|
"""Generate a 5-byte random number once per process."""
|
||||||
|
pid = os.getpid()
|
||||||
|
if pid != cls._pid:
|
||||||
|
cls._pid = pid
|
||||||
|
cls.__random = _random_bytes()
|
||||||
|
return cls.__random
|
||||||
|
|
||||||
|
def __generate(self) -> None:
|
||||||
|
"""Generate a new value for this ObjectId."""
|
||||||
|
|
||||||
|
# 4 bytes current time
|
||||||
|
oid = struct.pack(">I", int(time.time()))
|
||||||
|
|
||||||
|
# 5 bytes random
|
||||||
|
oid += ObjectId._random()
|
||||||
|
|
||||||
|
# 3 bytes inc
|
||||||
|
with ObjectId._inc_lock:
|
||||||
|
oid += struct.pack(">I", ObjectId._inc)[1:4]
|
||||||
|
ObjectId._inc = (ObjectId._inc + 1) % (_MAX_COUNTER_VALUE + 1)
|
||||||
|
|
||||||
|
self.__id = oid
|
||||||
|
|
||||||
|
def __validate(self, oid: Any) -> None:
|
||||||
|
"""Validate and use the given id for this ObjectId.
|
||||||
|
|
||||||
|
Raises TypeError if id is not an instance of
|
||||||
|
(:class:`basestring` (:class:`str` or :class:`bytes`
|
||||||
|
in python 3), ObjectId) and InvalidId if it is not a
|
||||||
|
valid ObjectId.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `oid`: a valid ObjectId
|
||||||
|
"""
|
||||||
|
if isinstance(oid, ObjectId):
|
||||||
|
self.__id = oid.binary
|
||||||
|
elif isinstance(oid, str):
|
||||||
|
if len(oid) == 24:
|
||||||
|
try:
|
||||||
|
self.__id = bytes.fromhex(oid)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
_raise_invalid_id(oid)
|
||||||
|
else:
|
||||||
|
_raise_invalid_id(oid)
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
"id must be an instance of (bytes, str, ObjectId), not %s" % (type(oid),)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def binary(self) -> bytes:
|
||||||
|
"""12-byte binary representation of this ObjectId."""
|
||||||
|
return self.__id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def generation_time(self) -> datetime.datetime:
|
||||||
|
"""A :class:`datetime.datetime` instance representing the time of
|
||||||
|
generation for this :class:`ObjectId`.
|
||||||
|
|
||||||
|
The :class:`datetime.datetime` is timezone aware, and
|
||||||
|
represents the generation time in UTC. It is precise to the
|
||||||
|
second.
|
||||||
|
"""
|
||||||
|
timestamp = struct.unpack(">I", self.__id[0:4])[0]
|
||||||
|
return datetime.datetime.fromtimestamp(timestamp, utc)
|
||||||
|
|
||||||
|
def __getstate__(self) -> bytes:
|
||||||
|
"""return value of object for pickling.
|
||||||
|
needed explicitly because __slots__() defined.
|
||||||
|
"""
|
||||||
|
return self.__id
|
||||||
|
|
||||||
|
def __setstate__(self, value: Any) -> None:
|
||||||
|
"""explicit state set from pickling"""
|
||||||
|
# Provide backwards compatability with OIDs
|
||||||
|
# pickled with pymongo-1.9 or older.
|
||||||
|
if isinstance(value, dict):
|
||||||
|
oid = value["_ObjectId__id"]
|
||||||
|
else:
|
||||||
|
oid = value
|
||||||
|
# ObjectIds pickled in python 2.x used `str` for __id.
|
||||||
|
# In python 3.x this has to be converted to `bytes`
|
||||||
|
# by encoding latin-1.
|
||||||
|
if isinstance(oid, str):
|
||||||
|
self.__id = oid.encode("latin-1")
|
||||||
|
else:
|
||||||
|
self.__id = oid
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return binascii.hexlify(self.__id).decode()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "ObjectId('%s')" % (str(self),)
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id == other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id != other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __lt__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id < other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __le__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id <= other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __gt__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id > other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __ge__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, ObjectId):
|
||||||
|
return self.__id >= other.binary
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
"""Get a hash value for this :class:`ObjectId`."""
|
||||||
|
return hash(self.__id)
|
2
src/xtquant/xtbson/bson37/py.typed
Normal file
2
src/xtquant/xtbson/bson37/py.typed
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# PEP-561 Support File.
|
||||||
|
# "Package maintainers who wish to support type checking of their code MUST add a marker file named py.typed to their package supporting typing".
|
196
src/xtquant/xtbson/bson37/raw_bson.py
Normal file
196
src/xtquant/xtbson/bson37/raw_bson.py
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
# Copyright 2015-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for representing raw BSON documents.
|
||||||
|
|
||||||
|
Inserting and Retrieving RawBSONDocuments
|
||||||
|
=========================================
|
||||||
|
|
||||||
|
Example: Moving a document between different databases/collections
|
||||||
|
|
||||||
|
.. doctest::
|
||||||
|
|
||||||
|
>>> import bson
|
||||||
|
>>> from pymongo import MongoClient
|
||||||
|
>>> from .raw_bson import RawBSONDocument
|
||||||
|
>>> client = MongoClient(document_class=RawBSONDocument)
|
||||||
|
>>> client.drop_database('db')
|
||||||
|
>>> client.drop_database('replica_db')
|
||||||
|
>>> db = client.db
|
||||||
|
>>> result = db.test.insert_many([{'_id': 1, 'a': 1},
|
||||||
|
... {'_id': 2, 'b': 1},
|
||||||
|
... {'_id': 3, 'c': 1},
|
||||||
|
... {'_id': 4, 'd': 1}])
|
||||||
|
>>> replica_db = client.replica_db
|
||||||
|
>>> for doc in db.test.find():
|
||||||
|
... print(f"raw document: {doc.raw}")
|
||||||
|
... print(f"decoded document: {bson.decode(doc.raw)}")
|
||||||
|
... result = replica_db.test.insert_one(doc)
|
||||||
|
raw document: b'...'
|
||||||
|
decoded document: {'_id': 1, 'a': 1}
|
||||||
|
raw document: b'...'
|
||||||
|
decoded document: {'_id': 2, 'b': 1}
|
||||||
|
raw document: b'...'
|
||||||
|
decoded document: {'_id': 3, 'c': 1}
|
||||||
|
raw document: b'...'
|
||||||
|
decoded document: {'_id': 4, 'd': 1}
|
||||||
|
|
||||||
|
For use cases like moving documents across different databases or writing binary
|
||||||
|
blobs to disk, using raw BSON documents provides better speed and avoids the
|
||||||
|
overhead of decoding or encoding BSON.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, ItemsView, Iterator, Mapping, Optional
|
||||||
|
|
||||||
|
from . import _get_object_size, _raw_to_dict
|
||||||
|
from .codec_options import _RAW_BSON_DOCUMENT_MARKER
|
||||||
|
from .codec_options import DEFAULT_CODEC_OPTIONS as DEFAULT
|
||||||
|
from .codec_options import CodecOptions
|
||||||
|
from .son import SON
|
||||||
|
|
||||||
|
|
||||||
|
def _inflate_bson(
|
||||||
|
bson_bytes: bytes, codec_options: CodecOptions, raw_array: bool = False
|
||||||
|
) -> Mapping[Any, Any]:
|
||||||
|
"""Inflates the top level fields of a BSON document.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `bson_bytes`: the BSON bytes that compose this document
|
||||||
|
- `codec_options`: An instance of
|
||||||
|
:class:`~bson.codec_options.CodecOptions` whose ``document_class``
|
||||||
|
must be :class:`RawBSONDocument`.
|
||||||
|
"""
|
||||||
|
# Use SON to preserve ordering of elements.
|
||||||
|
return _raw_to_dict(
|
||||||
|
bson_bytes, 4, len(bson_bytes) - 1, codec_options, SON(), raw_array=raw_array
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RawBSONDocument(Mapping[str, Any]):
|
||||||
|
"""Representation for a MongoDB document that provides access to the raw
|
||||||
|
BSON bytes that compose it.
|
||||||
|
|
||||||
|
Only when a field is accessed or modified within the document does
|
||||||
|
RawBSONDocument decode its bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ("__raw", "__inflated_doc", "__codec_options")
|
||||||
|
_type_marker = _RAW_BSON_DOCUMENT_MARKER
|
||||||
|
|
||||||
|
def __init__(self, bson_bytes: bytes, codec_options: Optional[CodecOptions] = None) -> None:
|
||||||
|
"""Create a new :class:`RawBSONDocument`
|
||||||
|
|
||||||
|
:class:`RawBSONDocument` is a representation of a BSON document that
|
||||||
|
provides access to the underlying raw BSON bytes. Only when a field is
|
||||||
|
accessed or modified within the document does RawBSONDocument decode
|
||||||
|
its bytes.
|
||||||
|
|
||||||
|
:class:`RawBSONDocument` implements the ``Mapping`` abstract base
|
||||||
|
class from the standard library so it can be used like a read-only
|
||||||
|
``dict``::
|
||||||
|
|
||||||
|
>>> from . import encode
|
||||||
|
>>> raw_doc = RawBSONDocument(encode({'_id': 'my_doc'}))
|
||||||
|
>>> raw_doc.raw
|
||||||
|
b'...'
|
||||||
|
>>> raw_doc['_id']
|
||||||
|
'my_doc'
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `bson_bytes`: the BSON bytes that compose this document
|
||||||
|
- `codec_options` (optional): An instance of
|
||||||
|
:class:`~bson.codec_options.CodecOptions` whose ``document_class``
|
||||||
|
must be :class:`RawBSONDocument`. The default is
|
||||||
|
:attr:`DEFAULT_RAW_BSON_OPTIONS`.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.8
|
||||||
|
:class:`RawBSONDocument` now validates that the ``bson_bytes``
|
||||||
|
passed in represent a single bson document.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.5
|
||||||
|
If a :class:`~bson.codec_options.CodecOptions` is passed in, its
|
||||||
|
`document_class` must be :class:`RawBSONDocument`.
|
||||||
|
"""
|
||||||
|
self.__raw = bson_bytes
|
||||||
|
self.__inflated_doc: Optional[Mapping[str, Any]] = None
|
||||||
|
# Can't default codec_options to DEFAULT_RAW_BSON_OPTIONS in signature,
|
||||||
|
# it refers to this class RawBSONDocument.
|
||||||
|
if codec_options is None:
|
||||||
|
codec_options = DEFAULT_RAW_BSON_OPTIONS
|
||||||
|
elif not issubclass(codec_options.document_class, RawBSONDocument):
|
||||||
|
raise TypeError(
|
||||||
|
"RawBSONDocument cannot use CodecOptions with document "
|
||||||
|
"class %s" % (codec_options.document_class,)
|
||||||
|
)
|
||||||
|
self.__codec_options = codec_options
|
||||||
|
# Validate the bson object size.
|
||||||
|
_get_object_size(bson_bytes, 0, len(bson_bytes))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def raw(self) -> bytes:
|
||||||
|
"""The raw BSON bytes composing this document."""
|
||||||
|
return self.__raw
|
||||||
|
|
||||||
|
def items(self) -> ItemsView[str, Any]:
|
||||||
|
"""Lazily decode and iterate elements in this document."""
|
||||||
|
return self.__inflated.items()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def __inflated(self) -> Mapping[str, Any]:
|
||||||
|
if self.__inflated_doc is None:
|
||||||
|
# We already validated the object's size when this document was
|
||||||
|
# created, so no need to do that again.
|
||||||
|
# Use SON to preserve ordering of elements.
|
||||||
|
self.__inflated_doc = self._inflate_bson(self.__raw, self.__codec_options)
|
||||||
|
return self.__inflated_doc
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _inflate_bson(bson_bytes: bytes, codec_options: CodecOptions) -> Mapping[Any, Any]:
|
||||||
|
return _inflate_bson(bson_bytes, codec_options)
|
||||||
|
|
||||||
|
def __getitem__(self, item: str) -> Any:
|
||||||
|
return self.__inflated[item]
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[str]:
|
||||||
|
return iter(self.__inflated)
|
||||||
|
|
||||||
|
def __len__(self) -> int:
|
||||||
|
return len(self.__inflated)
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, RawBSONDocument):
|
||||||
|
return self.__raw == other.raw
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "%s(%r, codec_options=%r)" % (
|
||||||
|
self.__class__.__name__,
|
||||||
|
self.raw,
|
||||||
|
self.__codec_options,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _RawArrayBSONDocument(RawBSONDocument):
|
||||||
|
"""A RawBSONDocument that only expands sub-documents and arrays when accessed."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _inflate_bson(bson_bytes: bytes, codec_options: CodecOptions) -> Mapping[Any, Any]:
|
||||||
|
return _inflate_bson(bson_bytes, codec_options, raw_array=True)
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_RAW_BSON_OPTIONS: CodecOptions = DEFAULT.with_options(document_class=RawBSONDocument)
|
||||||
|
_RAW_ARRAY_BSON_OPTIONS: CodecOptions = DEFAULT.with_options(document_class=_RawArrayBSONDocument)
|
||||||
|
"""The default :class:`~bson.codec_options.CodecOptions` for
|
||||||
|
:class:`RawBSONDocument`.
|
||||||
|
"""
|
135
src/xtquant/xtbson/bson37/regex.py
Normal file
135
src/xtquant/xtbson/bson37/regex.py
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
# Copyright 2013-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for representing MongoDB regular expressions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Any, Generic, Pattern, Type, TypeVar, Union
|
||||||
|
|
||||||
|
from ._helpers import _getstate_slots, _setstate_slots
|
||||||
|
from .son import RE_TYPE
|
||||||
|
|
||||||
|
|
||||||
|
def str_flags_to_int(str_flags: str) -> int:
|
||||||
|
flags = 0
|
||||||
|
if "i" in str_flags:
|
||||||
|
flags |= re.IGNORECASE
|
||||||
|
if "l" in str_flags:
|
||||||
|
flags |= re.LOCALE
|
||||||
|
if "m" in str_flags:
|
||||||
|
flags |= re.MULTILINE
|
||||||
|
if "s" in str_flags:
|
||||||
|
flags |= re.DOTALL
|
||||||
|
if "u" in str_flags:
|
||||||
|
flags |= re.UNICODE
|
||||||
|
if "x" in str_flags:
|
||||||
|
flags |= re.VERBOSE
|
||||||
|
|
||||||
|
return flags
|
||||||
|
|
||||||
|
|
||||||
|
_T = TypeVar("_T", str, bytes)
|
||||||
|
|
||||||
|
|
||||||
|
class Regex(Generic[_T]):
|
||||||
|
"""BSON regular expression data."""
|
||||||
|
|
||||||
|
__slots__ = ("pattern", "flags")
|
||||||
|
|
||||||
|
__getstate__ = _getstate_slots
|
||||||
|
__setstate__ = _setstate_slots
|
||||||
|
|
||||||
|
_type_marker = 11
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_native(cls: Type["Regex"], regex: "Pattern[_T]") -> "Regex[_T]":
|
||||||
|
"""Convert a Python regular expression into a ``Regex`` instance.
|
||||||
|
|
||||||
|
Note that in Python 3, a regular expression compiled from a
|
||||||
|
:class:`str` has the ``re.UNICODE`` flag set. If it is undesirable
|
||||||
|
to store this flag in a BSON regular expression, unset it first::
|
||||||
|
|
||||||
|
>>> pattern = re.compile('.*')
|
||||||
|
>>> regex = Regex.from_native(pattern)
|
||||||
|
>>> regex.flags ^= re.UNICODE
|
||||||
|
>>> db.collection.insert_one({'pattern': regex})
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `regex`: A regular expression object from ``re.compile()``.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
Python regular expressions use a different syntax and different
|
||||||
|
set of flags than MongoDB, which uses `PCRE`_. A regular
|
||||||
|
expression retrieved from the server may not compile in
|
||||||
|
Python, or may match a different set of strings in Python than
|
||||||
|
when used in a MongoDB query.
|
||||||
|
|
||||||
|
.. _PCRE: http://www.pcre.org/
|
||||||
|
"""
|
||||||
|
if not isinstance(regex, RE_TYPE):
|
||||||
|
raise TypeError("regex must be a compiled regular expression, not %s" % type(regex))
|
||||||
|
|
||||||
|
return Regex(regex.pattern, regex.flags)
|
||||||
|
|
||||||
|
def __init__(self, pattern: _T, flags: Union[str, int] = 0) -> None:
|
||||||
|
"""BSON regular expression data.
|
||||||
|
|
||||||
|
This class is useful to store and retrieve regular expressions that are
|
||||||
|
incompatible with Python's regular expression dialect.
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `pattern`: string
|
||||||
|
- `flags`: (optional) an integer bitmask, or a string of flag
|
||||||
|
characters like "im" for IGNORECASE and MULTILINE
|
||||||
|
"""
|
||||||
|
if not isinstance(pattern, (str, bytes)):
|
||||||
|
raise TypeError("pattern must be a string, not %s" % type(pattern))
|
||||||
|
self.pattern: _T = pattern
|
||||||
|
|
||||||
|
if isinstance(flags, str):
|
||||||
|
self.flags = str_flags_to_int(flags)
|
||||||
|
elif isinstance(flags, int):
|
||||||
|
self.flags = flags
|
||||||
|
else:
|
||||||
|
raise TypeError("flags must be a string or int, not %s" % type(flags))
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, Regex):
|
||||||
|
return self.pattern == other.pattern and self.flags == other.flags
|
||||||
|
else:
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
__hash__ = None # type: ignore
|
||||||
|
|
||||||
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Regex(%r, %r)" % (self.pattern, self.flags)
|
||||||
|
|
||||||
|
def try_compile(self) -> "Pattern[_T]":
|
||||||
|
"""Compile this :class:`Regex` as a Python regular expression.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
Python regular expressions use a different syntax and different
|
||||||
|
set of flags than MongoDB, which uses `PCRE`_. A regular
|
||||||
|
expression retrieved from the server may not compile in
|
||||||
|
Python, or may match a different set of strings in Python than
|
||||||
|
when used in a MongoDB query. :meth:`try_compile()` may raise
|
||||||
|
:exc:`re.error`.
|
||||||
|
|
||||||
|
.. _PCRE: http://www.pcre.org/
|
||||||
|
"""
|
||||||
|
return re.compile(self.pattern, self.flags)
|
208
src/xtquant/xtbson/bson37/son.py
Normal file
208
src/xtquant/xtbson/bson37/son.py
Normal file
@ -0,0 +1,208 @@
|
|||||||
|
# Copyright 2009-present MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for creating and manipulating SON, the Serialized Ocument Notation.
|
||||||
|
|
||||||
|
Regular dictionaries can be used instead of SON objects, but not when the order
|
||||||
|
of keys is important. A SON object can be used just like a normal Python
|
||||||
|
dictionary."""
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import re
|
||||||
|
from collections.abc import Mapping as _Mapping
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Dict,
|
||||||
|
Iterable,
|
||||||
|
Iterator,
|
||||||
|
List,
|
||||||
|
Mapping,
|
||||||
|
Optional,
|
||||||
|
Pattern,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
# This sort of sucks, but seems to be as good as it gets...
|
||||||
|
# This is essentially the same as re._pattern_type
|
||||||
|
RE_TYPE: Type[Pattern[Any]] = type(re.compile(""))
|
||||||
|
|
||||||
|
_Key = TypeVar("_Key")
|
||||||
|
_Value = TypeVar("_Value")
|
||||||
|
_T = TypeVar("_T")
|
||||||
|
|
||||||
|
|
||||||
|
class SON(Dict[_Key, _Value]):
|
||||||
|
"""SON data.
|
||||||
|
|
||||||
|
A subclass of dict that maintains ordering of keys and provides a
|
||||||
|
few extra niceties for dealing with SON. SON provides an API
|
||||||
|
similar to collections.OrderedDict.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__keys: List[Any]
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
data: Optional[Union[Mapping[_Key, _Value], Iterable[Tuple[_Key, _Value]]]] = None,
|
||||||
|
**kwargs: Any
|
||||||
|
) -> None:
|
||||||
|
self.__keys = []
|
||||||
|
dict.__init__(self)
|
||||||
|
self.update(data)
|
||||||
|
self.update(kwargs)
|
||||||
|
|
||||||
|
def __new__(cls: Type["SON[_Key, _Value]"], *args: Any, **kwargs: Any) -> "SON[_Key, _Value]":
|
||||||
|
instance = super(SON, cls).__new__(cls, *args, **kwargs)
|
||||||
|
instance.__keys = []
|
||||||
|
return instance
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
result = []
|
||||||
|
for key in self.__keys:
|
||||||
|
result.append("(%r, %r)" % (key, self[key]))
|
||||||
|
return "SON([%s])" % ", ".join(result)
|
||||||
|
|
||||||
|
def __setitem__(self, key: _Key, value: _Value) -> None:
|
||||||
|
if key not in self.__keys:
|
||||||
|
self.__keys.append(key)
|
||||||
|
dict.__setitem__(self, key, value)
|
||||||
|
|
||||||
|
def __delitem__(self, key: _Key) -> None:
|
||||||
|
self.__keys.remove(key)
|
||||||
|
dict.__delitem__(self, key)
|
||||||
|
|
||||||
|
def copy(self) -> "SON[_Key, _Value]":
|
||||||
|
other: SON[_Key, _Value] = SON()
|
||||||
|
other.update(self)
|
||||||
|
return other
|
||||||
|
|
||||||
|
# TODO this is all from UserDict.DictMixin. it could probably be made more
|
||||||
|
# efficient.
|
||||||
|
# second level definitions support higher levels
|
||||||
|
def __iter__(self) -> Iterator[_Key]:
|
||||||
|
for k in self.__keys:
|
||||||
|
yield k
|
||||||
|
|
||||||
|
def has_key(self, key: _Key) -> bool:
|
||||||
|
return key in self.__keys
|
||||||
|
|
||||||
|
def iterkeys(self) -> Iterator[_Key]:
|
||||||
|
return self.__iter__()
|
||||||
|
|
||||||
|
# fourth level uses definitions from lower levels
|
||||||
|
def itervalues(self) -> Iterator[_Value]:
|
||||||
|
for _, v in self.items():
|
||||||
|
yield v
|
||||||
|
|
||||||
|
def values(self) -> List[_Value]: # type: ignore[override]
|
||||||
|
return [v for _, v in self.items()]
|
||||||
|
|
||||||
|
def clear(self) -> None:
|
||||||
|
self.__keys = []
|
||||||
|
super(SON, self).clear()
|
||||||
|
|
||||||
|
def setdefault(self, key: _Key, default: _Value) -> _Value: # type: ignore[override]
|
||||||
|
try:
|
||||||
|
return self[key]
|
||||||
|
except KeyError:
|
||||||
|
self[key] = default
|
||||||
|
return default
|
||||||
|
|
||||||
|
def pop(self, key: _Key, *args: Union[_Value, _T]) -> Union[_Value, _T]:
|
||||||
|
if len(args) > 1:
|
||||||
|
raise TypeError("pop expected at most 2 arguments, got " + repr(1 + len(args)))
|
||||||
|
try:
|
||||||
|
value = self[key]
|
||||||
|
except KeyError:
|
||||||
|
if args:
|
||||||
|
return args[0]
|
||||||
|
raise
|
||||||
|
del self[key]
|
||||||
|
return value
|
||||||
|
|
||||||
|
def popitem(self) -> Tuple[_Key, _Value]:
|
||||||
|
try:
|
||||||
|
k, v = next(iter(self.items()))
|
||||||
|
except StopIteration:
|
||||||
|
raise KeyError("container is empty")
|
||||||
|
del self[k]
|
||||||
|
return (k, v)
|
||||||
|
|
||||||
|
def update(self, other: Optional[Any] = None, **kwargs: _Value) -> None: # type: ignore[override]
|
||||||
|
# Make progressively weaker assumptions about "other"
|
||||||
|
if other is None:
|
||||||
|
pass
|
||||||
|
elif hasattr(other, "items"):
|
||||||
|
for k, v in other.items():
|
||||||
|
self[k] = v
|
||||||
|
elif hasattr(other, "keys"):
|
||||||
|
for k in other.keys():
|
||||||
|
self[k] = other[k]
|
||||||
|
else:
|
||||||
|
for k, v in other:
|
||||||
|
self[k] = v
|
||||||
|
if kwargs:
|
||||||
|
self.update(kwargs)
|
||||||
|
|
||||||
|
def get(self, key: _Key, default: Optional[Union[_Value, _T]] = None) -> Union[_Value, _T, None]: # type: ignore[override]
|
||||||
|
try:
|
||||||
|
return self[key]
|
||||||
|
except KeyError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
"""Comparison to another SON is order-sensitive while comparison to a
|
||||||
|
regular dictionary is order-insensitive.
|
||||||
|
"""
|
||||||
|
if isinstance(other, SON):
|
||||||
|
return len(self) == len(other) and list(self.items()) == list(other.items())
|
||||||
|
return self.to_dict() == other
|
||||||
|
|
||||||
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __len__(self) -> int:
|
||||||
|
return len(self.__keys)
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[_Key, _Value]:
|
||||||
|
"""Convert a SON document to a normal Python dictionary instance.
|
||||||
|
|
||||||
|
This is trickier than just *dict(...)* because it needs to be
|
||||||
|
recursive.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def transform_value(value: Any) -> Any:
|
||||||
|
if isinstance(value, list):
|
||||||
|
return [transform_value(v) for v in value]
|
||||||
|
elif isinstance(value, _Mapping):
|
||||||
|
return dict([(k, transform_value(v)) for k, v in value.items()])
|
||||||
|
else:
|
||||||
|
return value
|
||||||
|
|
||||||
|
return transform_value(dict(self))
|
||||||
|
|
||||||
|
def __deepcopy__(self, memo: Dict[int, "SON[_Key, _Value]"]) -> "SON[_Key, _Value]":
|
||||||
|
out: SON[_Key, _Value] = SON()
|
||||||
|
val_id = id(self)
|
||||||
|
if val_id in memo:
|
||||||
|
return memo[val_id]
|
||||||
|
memo[val_id] = out
|
||||||
|
for k, v in self.items():
|
||||||
|
if not isinstance(v, RE_TYPE):
|
||||||
|
v = copy.deepcopy(v, memo)
|
||||||
|
out[k] = v
|
||||||
|
return out
|
124
src/xtquant/xtbson/bson37/timestamp.py
Normal file
124
src/xtquant/xtbson/bson37/timestamp.py
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
# Copyright 2010-2015 MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Tools for representing MongoDB internal Timestamps.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import calendar
|
||||||
|
import datetime
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
|
from ._helpers import _getstate_slots, _setstate_slots
|
||||||
|
from .tz_util import utc
|
||||||
|
|
||||||
|
UPPERBOUND = 4294967296
|
||||||
|
|
||||||
|
|
||||||
|
class Timestamp(object):
|
||||||
|
"""MongoDB internal timestamps used in the opLog."""
|
||||||
|
|
||||||
|
__slots__ = ("__time", "__inc")
|
||||||
|
|
||||||
|
__getstate__ = _getstate_slots
|
||||||
|
__setstate__ = _setstate_slots
|
||||||
|
|
||||||
|
_type_marker = 17
|
||||||
|
|
||||||
|
def __init__(self, time: Union[datetime.datetime, int], inc: int) -> None:
|
||||||
|
"""Create a new :class:`Timestamp`.
|
||||||
|
|
||||||
|
This class is only for use with the MongoDB opLog. If you need
|
||||||
|
to store a regular timestamp, please use a
|
||||||
|
:class:`~datetime.datetime`.
|
||||||
|
|
||||||
|
Raises :class:`TypeError` if `time` is not an instance of
|
||||||
|
:class: `int` or :class:`~datetime.datetime`, or `inc` is not
|
||||||
|
an instance of :class:`int`. Raises :class:`ValueError` if
|
||||||
|
`time` or `inc` is not in [0, 2**32).
|
||||||
|
|
||||||
|
:Parameters:
|
||||||
|
- `time`: time in seconds since epoch UTC, or a naive UTC
|
||||||
|
:class:`~datetime.datetime`, or an aware
|
||||||
|
:class:`~datetime.datetime`
|
||||||
|
- `inc`: the incrementing counter
|
||||||
|
"""
|
||||||
|
if isinstance(time, datetime.datetime):
|
||||||
|
offset = time.utcoffset()
|
||||||
|
if offset is not None:
|
||||||
|
time = time - offset
|
||||||
|
time = int(calendar.timegm(time.timetuple()))
|
||||||
|
if not isinstance(time, int):
|
||||||
|
raise TypeError("time must be an instance of int")
|
||||||
|
if not isinstance(inc, int):
|
||||||
|
raise TypeError("inc must be an instance of int")
|
||||||
|
if not 0 <= time < UPPERBOUND:
|
||||||
|
raise ValueError("time must be contained in [0, 2**32)")
|
||||||
|
if not 0 <= inc < UPPERBOUND:
|
||||||
|
raise ValueError("inc must be contained in [0, 2**32)")
|
||||||
|
|
||||||
|
self.__time = time
|
||||||
|
self.__inc = inc
|
||||||
|
|
||||||
|
@property
|
||||||
|
def time(self) -> int:
|
||||||
|
"""Get the time portion of this :class:`Timestamp`."""
|
||||||
|
return self.__time
|
||||||
|
|
||||||
|
@property
|
||||||
|
def inc(self) -> int:
|
||||||
|
"""Get the inc portion of this :class:`Timestamp`."""
|
||||||
|
return self.__inc
|
||||||
|
|
||||||
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, Timestamp):
|
||||||
|
return self.__time == other.time and self.__inc == other.inc
|
||||||
|
else:
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return hash(self.time) ^ hash(self.inc)
|
||||||
|
|
||||||
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __lt__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, Timestamp):
|
||||||
|
return (self.time, self.inc) < (other.time, other.inc)
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __le__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, Timestamp):
|
||||||
|
return (self.time, self.inc) <= (other.time, other.inc)
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __gt__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, Timestamp):
|
||||||
|
return (self.time, self.inc) > (other.time, other.inc)
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __ge__(self, other: Any) -> bool:
|
||||||
|
if isinstance(other, Timestamp):
|
||||||
|
return (self.time, self.inc) >= (other.time, other.inc)
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Timestamp(%s, %s)" % (self.__time, self.__inc)
|
||||||
|
|
||||||
|
def as_datetime(self) -> datetime.datetime:
|
||||||
|
"""Return a :class:`~datetime.datetime` instance corresponding
|
||||||
|
to the time portion of this :class:`Timestamp`.
|
||||||
|
|
||||||
|
The returned datetime's timezone is UTC.
|
||||||
|
"""
|
||||||
|
return datetime.datetime.fromtimestamp(self.__time, utc)
|
52
src/xtquant/xtbson/bson37/tz_util.py
Normal file
52
src/xtquant/xtbson/bson37/tz_util.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
# Copyright 2010-2015 MongoDB, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Timezone related utilities for BSON."""
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta, tzinfo
|
||||||
|
from typing import Optional, Tuple, Union
|
||||||
|
|
||||||
|
ZERO: timedelta = timedelta(0)
|
||||||
|
|
||||||
|
|
||||||
|
class FixedOffset(tzinfo):
|
||||||
|
"""Fixed offset timezone, in minutes east from UTC.
|
||||||
|
|
||||||
|
Implementation based from the Python `standard library documentation
|
||||||
|
<http://docs.python.org/library/datetime.html#tzinfo-objects>`_.
|
||||||
|
Defining __getinitargs__ enables pickling / copying.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, offset: Union[float, timedelta], name: str) -> None:
|
||||||
|
if isinstance(offset, timedelta):
|
||||||
|
self.__offset = offset
|
||||||
|
else:
|
||||||
|
self.__offset = timedelta(minutes=offset)
|
||||||
|
self.__name = name
|
||||||
|
|
||||||
|
def __getinitargs__(self) -> Tuple[timedelta, str]:
|
||||||
|
return self.__offset, self.__name
|
||||||
|
|
||||||
|
def utcoffset(self, dt: Optional[datetime]) -> timedelta:
|
||||||
|
return self.__offset
|
||||||
|
|
||||||
|
def tzname(self, dt: Optional[datetime]) -> str:
|
||||||
|
return self.__name
|
||||||
|
|
||||||
|
def dst(self, dt: Optional[datetime]) -> timedelta:
|
||||||
|
return ZERO
|
||||||
|
|
||||||
|
|
||||||
|
utc: FixedOffset = FixedOffset(0, "UTC")
|
||||||
|
"""Fixed offset timezone representing UTC."""
|
196
src/xtquant/xtconn.py
Normal file
196
src/xtquant/xtconn.py
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
#coding:utf-8
|
||||||
|
|
||||||
|
from .xtdatacenter import try_create_client
|
||||||
|
|
||||||
|
### config
|
||||||
|
localhost = '127.0.0.1'
|
||||||
|
|
||||||
|
### function
|
||||||
|
status_callback = None
|
||||||
|
|
||||||
|
def try_create_connection(addr):
|
||||||
|
'''
|
||||||
|
addr: 'localhost:58610'
|
||||||
|
'''
|
||||||
|
ip, port = addr.split(':')
|
||||||
|
if not ip:
|
||||||
|
ip = localhost
|
||||||
|
if not port:
|
||||||
|
raise Exception('invalid port')
|
||||||
|
|
||||||
|
cl = try_create_client()
|
||||||
|
cl.set_config_addr(addr)
|
||||||
|
|
||||||
|
global status_callback
|
||||||
|
if status_callback:
|
||||||
|
cl.registerCommonControlCallback("watchxtquantstatus", status_callback)
|
||||||
|
|
||||||
|
ec, msg = cl.connect()
|
||||||
|
if ec < 0:
|
||||||
|
raise Exception((ec, msg))
|
||||||
|
return cl
|
||||||
|
|
||||||
|
|
||||||
|
def create_connection(addr):
|
||||||
|
try:
|
||||||
|
return try_create_connection(addr)
|
||||||
|
except Exception as e:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def scan_all_server_instance():
|
||||||
|
'''
|
||||||
|
扫描当前环境下所有XTQuant服务实例
|
||||||
|
|
||||||
|
return: list
|
||||||
|
[ config1, config2,... ]
|
||||||
|
|
||||||
|
config: dict
|
||||||
|
{
|
||||||
|
'ip': '127.0.0.1', 'port': 58610,
|
||||||
|
'is_running': False,
|
||||||
|
'client_type': 'research',
|
||||||
|
'data_dir': 'xtquant_server/datadir',
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
|
||||||
|
import os, sys
|
||||||
|
import json
|
||||||
|
|
||||||
|
result = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
config_dir = os.path.abspath(os.path.join(os.environ['USERPROFILE'], '.xtquant'))
|
||||||
|
|
||||||
|
for f in os.scandir(config_dir):
|
||||||
|
full_path = f.path
|
||||||
|
|
||||||
|
f_xtdata_cfg = os.path.join(full_path, 'xtdata.cfg')
|
||||||
|
if not os.path.exists(f_xtdata_cfg):
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
config = json.load(open(f_xtdata_cfg, 'r', encoding = 'utf-8'))
|
||||||
|
|
||||||
|
ip = config.get('ip', None)
|
||||||
|
if not ip:
|
||||||
|
config['ip'] = localhost
|
||||||
|
|
||||||
|
port = config.get('port', None)
|
||||||
|
if not port:
|
||||||
|
continue
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
continue
|
||||||
|
|
||||||
|
is_running = False
|
||||||
|
|
||||||
|
f_running_status = os.path.join(full_path, 'running_status')
|
||||||
|
if os.path.exists(f_running_status):
|
||||||
|
try:
|
||||||
|
os.remove(f_running_status)
|
||||||
|
except PermissionError:
|
||||||
|
is_running = True
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
|
||||||
|
config['is_running'] = is_running
|
||||||
|
|
||||||
|
result.append(config)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def get_internal_server_addr():
|
||||||
|
'''
|
||||||
|
获取内部XTQuant服务地址
|
||||||
|
|
||||||
|
return: str
|
||||||
|
'127.0.0.1:58610'
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
from .xtdatacenter import get_local_server_port
|
||||||
|
local_server_port = get_local_server_port()
|
||||||
|
if local_server_port:
|
||||||
|
return f'127.0.0.1:{local_server_port}'
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def scan_available_server_addr():
|
||||||
|
'''
|
||||||
|
扫描当前环境下可用的XTQuant服务实例
|
||||||
|
|
||||||
|
return: list
|
||||||
|
[ '0.0.0.0:58610', '0.0.0.0:58611', ... ]
|
||||||
|
'''
|
||||||
|
|
||||||
|
import os, sys
|
||||||
|
import json
|
||||||
|
|
||||||
|
result = []
|
||||||
|
|
||||||
|
internal_server_addr = get_internal_server_addr()
|
||||||
|
if internal_server_addr:
|
||||||
|
result.append(internal_server_addr)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result_scan = []
|
||||||
|
|
||||||
|
inst_list = scan_all_server_instance()
|
||||||
|
|
||||||
|
for config in inst_list:
|
||||||
|
try:
|
||||||
|
if not config.get('is_running', False):
|
||||||
|
continue
|
||||||
|
|
||||||
|
ip = config.get('ip', None)
|
||||||
|
port = config.get('port', None)
|
||||||
|
if not ip or not port:
|
||||||
|
continue
|
||||||
|
|
||||||
|
addr = f'{ip}:{port}'
|
||||||
|
|
||||||
|
root_dir = os.path.normpath(config.get('root_dir', ''))
|
||||||
|
if root_dir and os.path.normpath(sys.executable).find(root_dir) == 0:
|
||||||
|
result_scan.insert(0, addr)
|
||||||
|
else:
|
||||||
|
result_scan.append(addr)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
continue
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
|
||||||
|
result += result_scan
|
||||||
|
|
||||||
|
result = list(dict.fromkeys(result))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def connect_any(addr_list, start_port, end_port):
|
||||||
|
'''
|
||||||
|
addr_list: [ addr, ... ]
|
||||||
|
addr: 'localhost:58610'
|
||||||
|
'''
|
||||||
|
for addr in addr_list:
|
||||||
|
try:
|
||||||
|
port = int(addr.split(':')[1])
|
||||||
|
if start_port > port or port > end_port:
|
||||||
|
continue
|
||||||
|
|
||||||
|
cl = create_connection(addr)
|
||||||
|
if cl:
|
||||||
|
return cl
|
||||||
|
except Exception as e:
|
||||||
|
continue
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
1173
src/xtquant/xtconstant.py
Normal file
1173
src/xtquant/xtconstant.py
Normal file
File diff suppressed because it is too large
Load Diff
17
src/xtquant/xtdata.ini
Normal file
17
src/xtquant/xtdata.ini
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
[app]
|
||||||
|
appName=IPythonApiClient
|
||||||
|
netThreadNum=8
|
||||||
|
dispatcherThreadNum=8
|
||||||
|
logPath=xtdata.log4cxx
|
||||||
|
logWatch=0
|
||||||
|
reportSeconds=20
|
||||||
|
appendDate=1
|
||||||
|
|
||||||
|
[client_xtdata]
|
||||||
|
tagTemplate=xtdata
|
||||||
|
address=127.0.0.1:58610
|
||||||
|
timeoutSecond=0
|
||||||
|
keepAliveCheckSecond=0
|
||||||
|
reconnectSecond=3
|
||||||
|
requestTimeoutSecond=150
|
||||||
|
watchlog=1
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user