mns-scheduler 1.1.6.5__tar.gz → 1.1.6.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mns-scheduler might be problematic. Click here for more details.
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/PKG-INFO +1 -1
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/clean/kpl_concept_clean_api.py +3 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/clean/ths_concept_clean_api.py +6 -1
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/ths/common/ths_concept_sync_common_api.py +10 -2
- mns_scheduler-1.1.6.8/mns_scheduler/db/script/sync/local_mongo_util.py +231 -0
- mns_scheduler-1.1.6.8/mns_scheduler/db/script/sync/remote_data_sync_to_local.py +52 -0
- mns_scheduler-1.1.6.8/mns_scheduler/db/script/sync/remote_mongo_util.py +306 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/kpl/selection/symbol/sync_best_choose_symbol.py +3 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/kpl/selection/total/sync_kpl_best_total_sync_api.py +2 -2
- mns_scheduler-1.1.6.8/mns_scheduler/zt/script/__init__.py +7 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/zt_pool/em_zt_pool_sync_api.py +1 -1
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler.egg-info/PKG-INFO +1 -1
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler.egg-info/SOURCES.txt +4 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/setup.py +1 -1
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/README.md +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/2014-2015-test/2014_2015_chg_statistics.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/2014-2015-test/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/big_deal/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/big_deal/ths_big_deal_sync.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/base/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/base/sync_company_base_info_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/base/sync_company_hold_info_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/clean/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/clean/company_info_clean_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/constant/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/constant/company_constant_data.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/de_list_stock/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/de_list_stock/de_list_stock_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/remark/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/company_info/remark/company_remark_info_sync.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/clean/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/ths/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/ths/common/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/ths/common/ths_concept_update_common_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/ths/detaill/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/ths/detaill/ths_concept_detail_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/ths/sync_new_index/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/ths/sync_new_index/sync_ths_concept_new_index_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/ths/update_concept_info/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/ths/update_concept_info/sync_one_concept_all_symbols_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/ths/update_concept_info/sync_one_symbol_all_concepts_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/db/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/db/col_move_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/db/db_status.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/db/real_time_task_check.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/db/script/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/db/script/col_move_script.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/debt → mns_scheduler-1.1.6.8/mns_scheduler/db/script/sync}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/finance → mns_scheduler-1.1.6.8/mns_scheduler/debt}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/debt/kzz_bond_info_sync.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/dt/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/dt/stock_dt_pool_sync.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/hk → mns_scheduler-1.1.6.8/mns_scheduler/finance}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/finance/em_financial_asset_liability_sync_service_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/finance/em_financial_profit_sync_service_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/finance/finance_common_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/finance/sync_financial_report_service_api.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/irm → mns_scheduler-1.1.6.8/mns_scheduler/hk}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/hk/hk_company_info_sync_service_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/hk/hk_industry_info_sync_service_api.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/irm/api → mns_scheduler-1.1.6.8/mns_scheduler/irm}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/k_line/clean/daily → mns_scheduler-1.1.6.8/mns_scheduler/irm/api}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/irm/api/sh_stock_sns_sse_info_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/irm/api/sz_stock_sns_sse_info_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/irm/stock_irm_cninfo_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/clean/__init__.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/k_line/clean/week_month → mns_scheduler-1.1.6.8/mns_scheduler/k_line/clean/daily}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/clean/daily/daily_k_line_clean_common_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/clean/daily/daily_k_line_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/clean/k_line_info_clean_impl.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/clean/k_line_info_clean_task.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/k_line/hot_stocks → mns_scheduler-1.1.6.8/mns_scheduler/k_line/clean/week_month}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/clean/week_month/normal_week_month_k_line_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/clean/week_month/sub_new_week_month_k_line_service.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/k_line/test → mns_scheduler-1.1.6.8/mns_scheduler/k_line/hot_stocks}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/hot_stocks/recent_hot_stocks_clean_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/sync/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/sync/bfq_k_line_sync.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/sync/daily_week_month_line_sync.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/lhb → mns_scheduler-1.1.6.8/mns_scheduler/k_line/test}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/k_line/test/k_line_info_clean_his_data.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/kpl/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/kpl/selection/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/kpl/selection/index/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/kpl/selection/index/sync_best_choose_his_index.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/kpl/selection/index/sync_best_choose_index.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/kpl/selection/symbol/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/kpl/selection/total/__init__.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/open → mns_scheduler-1.1.6.8/mns_scheduler/lhb}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/lhb/stock_lhb_sync_service.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/risk → mns_scheduler-1.1.6.8/mns_scheduler/open}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/open/sync_one_day_open_data_to_db_service.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/risk/test → mns_scheduler-1.1.6.8/mns_scheduler/risk}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/compliance/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/compliance/undisclosed_annual_report_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/financial/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/financial/annual_report_audit_check_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/financial/net_assets_check_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/financial/profit_income_check_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/financial/stock_equity_mortgage_check_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/financial_report_risk_check_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/major_violations/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/major_violations/register_and_investigate_stock_sync_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/self/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/self/wei_pan_stock_api.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/self_choose → mns_scheduler-1.1.6.8/mns_scheduler/risk/test}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/test/fix_blask_list.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/transactions/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/risk/transactions/transactions_check_api.py +0 -0
- {mns_scheduler-1.1.6.5/mns_scheduler/zt/script → mns_scheduler-1.1.6.8/mns_scheduler/self_choose}/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/self_choose/ths_self_choose_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/trade/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/trade/auto_ipo_buy_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/trade/auto_login/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/trade/auto_login/trader_auto_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/trade/auto_sell_service_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/trade/sync_position_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/trade/task/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/trade/task/trader_task_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zb/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zb/stock_zb_pool_sync.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/connected_boards/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/connected_boards/zt_five_boards_sync_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/export/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/export/export_kcx_high_chg_open_data_to_excel.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/high_chg/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/high_chg/sync_high_chg_pool_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/high_chg/sync_high_chg_real_time_quotes_service.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/open_data/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/open_data/kcx_high_chg_open_data_sync.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/script/kcx_high_chg_open_his_data_handle.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/script/sync_high_chg_pool_his_data.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/script/sync_now_higt_chg_zt.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/zt_pool/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/zt_pool/ths_zt_pool_sync_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zt/zt_pool/update_null_zt_reason_api.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zz_task/__init__.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/zz_task/data_sync_task.py +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler.egg-info/dependency_links.txt +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler.egg-info/top_level.txt +0 -0
- {mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/setup.cfg +0 -0
{mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/clean/kpl_concept_clean_api.py
RENAMED
|
@@ -70,6 +70,9 @@ def update_kpl_concept_info():
|
|
|
70
70
|
kpl_best_choose_index_detail_df['second_relevance_industry'] = second_relevance_industry
|
|
71
71
|
|
|
72
72
|
mongodb_util.save_mongo(kpl_best_choose_index_one_df, db_name_constant.KPL_BEST_CHOOSE_INDEX)
|
|
73
|
+
kpl_best_choose_index_detail_df['grade'] = 1
|
|
74
|
+
kpl_best_choose_index_detail_df['remark'] = ''
|
|
75
|
+
kpl_best_choose_index_detail_df['remark_flag'] = ''
|
|
73
76
|
mongodb_util.save_mongo(kpl_best_choose_index_detail_df, db_name_constant.KPL_BEST_CHOOSE_INDEX_DETAIL)
|
|
74
77
|
|
|
75
78
|
except Exception as e:
|
{mns_scheduler-1.1.6.5 → mns_scheduler-1.1.6.8}/mns_scheduler/concept/clean/ths_concept_clean_api.py
RENAMED
|
@@ -77,7 +77,12 @@ def update_one_concept_relevance_industry(concept_code):
|
|
|
77
77
|
ths_stock_concept_detail_df['second_relevance_industry'] = second_relevance_industry
|
|
78
78
|
|
|
79
79
|
mongodb_util.save_mongo(ths_concept_list_one_df, db_name_constant.THS_CONCEPT_LIST)
|
|
80
|
-
|
|
80
|
+
|
|
81
|
+
ths_stock_concept_detail_df['grade'] = 1
|
|
82
|
+
# 详细标识
|
|
83
|
+
ths_stock_concept_detail_df['remark'] = ''
|
|
84
|
+
# 简单标识
|
|
85
|
+
ths_stock_concept_detail_df['remark_flag'] = ''
|
|
81
86
|
mongodb_util.save_mongo(ths_stock_concept_detail_df, db_name_constant.THS_STOCK_CONCEPT_DETAIL)
|
|
82
87
|
|
|
83
88
|
|
|
@@ -135,7 +135,11 @@ def save_ths_concept_detail(new_concept_symbol_df,
|
|
|
135
135
|
query_detail = {"concept_code": int(concept_code)}
|
|
136
136
|
exist_concept_detail = mongodb_util.find_query_data(db_name_constant.THS_STOCK_CONCEPT_DETAIL, query_detail)
|
|
137
137
|
if exist_concept_detail is None or exist_concept_detail.shape[0] == 0:
|
|
138
|
-
new_concept_symbol_df['
|
|
138
|
+
new_concept_symbol_df['grade'] = 1
|
|
139
|
+
# 详细标识
|
|
140
|
+
new_concept_symbol_df['remark'] = ''
|
|
141
|
+
# 简单标识
|
|
142
|
+
new_concept_symbol_df['remark_flag'] = ''
|
|
139
143
|
mongodb_util.save_mongo(new_concept_symbol_df, db_name_constant.THS_STOCK_CONCEPT_DETAIL)
|
|
140
144
|
# 保存到当日新增概念列表
|
|
141
145
|
new_concept_symbol_df['concept_type'] = 'ths'
|
|
@@ -145,7 +149,11 @@ def save_ths_concept_detail(new_concept_symbol_df,
|
|
|
145
149
|
new_concept_symbol_df = new_concept_symbol_df.loc[~(
|
|
146
150
|
new_concept_symbol_df['symbol'].isin(exist_concept_detail_symbol_list))]
|
|
147
151
|
if new_concept_symbol_df.shape[0] > 0:
|
|
148
|
-
new_concept_symbol_df['
|
|
152
|
+
new_concept_symbol_df['grade'] = 1
|
|
153
|
+
# 详细标识
|
|
154
|
+
new_concept_symbol_df['remark'] = ''
|
|
155
|
+
# 简单标识
|
|
156
|
+
new_concept_symbol_df['remark_flag'] = ''
|
|
149
157
|
mongodb_util.save_mongo(new_concept_symbol_df, db_name_constant.THS_STOCK_CONCEPT_DETAIL)
|
|
150
158
|
# 保存到当日新增概念列表
|
|
151
159
|
new_concept_symbol_df['concept_type'] = 'ths'
|
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 14
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import pandas as pd
|
|
9
|
+
import pymongo
|
|
10
|
+
from mns_common.utils.async_fun import async_fun
|
|
11
|
+
from loguru import logger
|
|
12
|
+
import warnings
|
|
13
|
+
|
|
14
|
+
warnings.filterwarnings("ignore")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class LocalMongodbUtil:
|
|
18
|
+
def __init__(self, port):
|
|
19
|
+
self.port = port
|
|
20
|
+
|
|
21
|
+
def get_db(self):
|
|
22
|
+
client = pymongo.MongoClient("mongodb://127.0.0.1:" + '27017' + "/patience")
|
|
23
|
+
return client.patience
|
|
24
|
+
|
|
25
|
+
def group(self, query, coll_name):
|
|
26
|
+
db = self.get_db()
|
|
27
|
+
collection = db[coll_name]
|
|
28
|
+
data = collection.aggregate(query)
|
|
29
|
+
return pd.DataFrame(list(data))
|
|
30
|
+
|
|
31
|
+
def remove_data(self, query, coll_name):
|
|
32
|
+
db = self.get_db()
|
|
33
|
+
collection = db[coll_name]
|
|
34
|
+
return collection.delete_many(query)
|
|
35
|
+
|
|
36
|
+
def exist_data_query(self, coll_name, query):
|
|
37
|
+
db = self.get_db()
|
|
38
|
+
collection = db[coll_name]
|
|
39
|
+
return collection.count(query, limit=1) > 0
|
|
40
|
+
|
|
41
|
+
def find_one(self, coll_name, _id):
|
|
42
|
+
db = self.get_db()
|
|
43
|
+
collection = db[coll_name]
|
|
44
|
+
return collection.find_one({'_id': _id})
|
|
45
|
+
|
|
46
|
+
def find_one_query(self, coll_name, query):
|
|
47
|
+
db = self.get_db()
|
|
48
|
+
collection = db[coll_name]
|
|
49
|
+
return pd.DataFrame(collection.find_one(query), index=[0])
|
|
50
|
+
|
|
51
|
+
def find_all_data(self, coll_name):
|
|
52
|
+
db = self.get_db()
|
|
53
|
+
collection = db[coll_name]
|
|
54
|
+
rows = collection.find({})
|
|
55
|
+
df = pd.DataFrame([basic for basic in rows])
|
|
56
|
+
return df
|
|
57
|
+
|
|
58
|
+
def find_query_data(self, coll_name, query):
|
|
59
|
+
db = self.get_db()
|
|
60
|
+
collection = db[coll_name]
|
|
61
|
+
rows = collection.find(query)
|
|
62
|
+
df = pd.DataFrame(list(rows))
|
|
63
|
+
return df
|
|
64
|
+
|
|
65
|
+
def find_query_data_choose_field(self, coll_name, query, query_field):
|
|
66
|
+
db = self.get_db()
|
|
67
|
+
collection = db[coll_name]
|
|
68
|
+
rows = collection.find(query, query_field)
|
|
69
|
+
df = pd.DataFrame(list(rows))
|
|
70
|
+
return df
|
|
71
|
+
|
|
72
|
+
def find_query_data_list(self, coll_name, query):
|
|
73
|
+
db = self.get_db()
|
|
74
|
+
collection = db[coll_name]
|
|
75
|
+
rows = collection.find(query)
|
|
76
|
+
return list(rows)
|
|
77
|
+
|
|
78
|
+
def find_query_data_list(self, coll_name, query):
|
|
79
|
+
db = self.get_db()
|
|
80
|
+
collection = db[coll_name]
|
|
81
|
+
rows = collection.find(query)
|
|
82
|
+
return list(rows)
|
|
83
|
+
|
|
84
|
+
def remove_all_data(self, database):
|
|
85
|
+
db = self.get_db()
|
|
86
|
+
collection = db[database]
|
|
87
|
+
query = {"_id": {"$ne": "null"}}
|
|
88
|
+
return collection.delete_many(query)
|
|
89
|
+
|
|
90
|
+
def drop_collection(self, database):
|
|
91
|
+
db = self.get_db()
|
|
92
|
+
collection = db[database]
|
|
93
|
+
collection.drop()
|
|
94
|
+
|
|
95
|
+
def ascend_query(self, query, coll_name, field, num):
|
|
96
|
+
db = self.get_db()
|
|
97
|
+
collection = db[coll_name]
|
|
98
|
+
return pd.DataFrame(list(collection.find(query).sort(field, 1).skip(0).limit(num)));
|
|
99
|
+
|
|
100
|
+
def descend_query(self, query, coll_name, field, num):
|
|
101
|
+
db = self.get_db()
|
|
102
|
+
collection = db[coll_name]
|
|
103
|
+
return pd.DataFrame(list(collection.find(query).sort(field, -1).skip(0).limit(num)));
|
|
104
|
+
|
|
105
|
+
def count(self, query, coll_name):
|
|
106
|
+
db = self.get_db()
|
|
107
|
+
collection = db[coll_name]
|
|
108
|
+
return collection.count_documents(query)
|
|
109
|
+
|
|
110
|
+
def query_max(self, query, coll_name, field, num):
|
|
111
|
+
db = self.get_db()
|
|
112
|
+
collection = db[coll_name]
|
|
113
|
+
return pd.DataFrame(list(collection.find(query).sort(field, -1).skip(0).limit(num)));
|
|
114
|
+
|
|
115
|
+
def query_min(self, query, coll_name, field):
|
|
116
|
+
db = self.get_db()
|
|
117
|
+
collection = db[coll_name]
|
|
118
|
+
return pd.DataFrame(list(collection.find(query).sort(field, 1).skip(0).limit(1)));
|
|
119
|
+
|
|
120
|
+
def insert_mongo(self, df, database):
|
|
121
|
+
db = self.get_db()
|
|
122
|
+
if df is None or len(df) == 0:
|
|
123
|
+
return
|
|
124
|
+
collection = db[database]
|
|
125
|
+
# 格式转换
|
|
126
|
+
try:
|
|
127
|
+
df = df.drop_duplicates()
|
|
128
|
+
# df = df.T.drop_duplicates().T
|
|
129
|
+
records = df.to_dict('records')
|
|
130
|
+
collection.insert_many(records)
|
|
131
|
+
except BaseException as e:
|
|
132
|
+
logger.error("插入数据异常:{}", e)
|
|
133
|
+
|
|
134
|
+
def insert_mongo_json(self, json, database):
|
|
135
|
+
db = self.get_db()
|
|
136
|
+
collection = db[database]
|
|
137
|
+
# 格式转换
|
|
138
|
+
try:
|
|
139
|
+
collection.insert_many(json)
|
|
140
|
+
except BaseException as e:
|
|
141
|
+
logger.error("插入数据异常:{}", e)
|
|
142
|
+
|
|
143
|
+
def save_mongo_json(self, json, database):
|
|
144
|
+
db = self.get_db()
|
|
145
|
+
collection = db[database]
|
|
146
|
+
for record in json:
|
|
147
|
+
try:
|
|
148
|
+
collection.save(record)
|
|
149
|
+
except BaseException as e:
|
|
150
|
+
logger.error("保存数据出现异常:{}", e)
|
|
151
|
+
|
|
152
|
+
def save_mongo(self, df, database):
|
|
153
|
+
db = self.get_db()
|
|
154
|
+
if df is None or len(df) == 0:
|
|
155
|
+
return
|
|
156
|
+
collection = db[database]
|
|
157
|
+
# df = df.T.drop_duplicates().T
|
|
158
|
+
# 格式转换
|
|
159
|
+
records = df.to_dict('records')
|
|
160
|
+
for record in records:
|
|
161
|
+
try:
|
|
162
|
+
collection.save(record)
|
|
163
|
+
except BaseException as e:
|
|
164
|
+
logger.error("保存数据出现异常:{},{}", record, e)
|
|
165
|
+
|
|
166
|
+
def save_mongo_no_catch_exception(self, df, database):
|
|
167
|
+
db = self.get_db()
|
|
168
|
+
if df is None or len(df) == 0:
|
|
169
|
+
return
|
|
170
|
+
collection = db[database]
|
|
171
|
+
# df = df.T.drop_duplicates().T
|
|
172
|
+
# 格式转换
|
|
173
|
+
records = df.to_dict('records')
|
|
174
|
+
for record in records:
|
|
175
|
+
collection.save(record)
|
|
176
|
+
|
|
177
|
+
def update_one(self, df, database):
|
|
178
|
+
db = self.get_db()
|
|
179
|
+
condition = {'_id': list(df['_id'])[0]}
|
|
180
|
+
if len(df) == 0:
|
|
181
|
+
return
|
|
182
|
+
collection = db[database]
|
|
183
|
+
collection.update(condition, df)
|
|
184
|
+
|
|
185
|
+
def update_many(self, query, new_values, database):
|
|
186
|
+
db = self.get_db()
|
|
187
|
+
collection = db[database]
|
|
188
|
+
x = collection.update_many(query, new_values)
|
|
189
|
+
return x
|
|
190
|
+
|
|
191
|
+
@async_fun
|
|
192
|
+
def update_one_query(self, query, new_values, database):
|
|
193
|
+
db = self.get_db()
|
|
194
|
+
collection = db[database]
|
|
195
|
+
x = collection.update(query, new_values)
|
|
196
|
+
return x
|
|
197
|
+
|
|
198
|
+
def distinct_field(self, database, field, query):
|
|
199
|
+
db = self.get_db()
|
|
200
|
+
collection = db[database]
|
|
201
|
+
return collection.distinct(field, query)
|
|
202
|
+
|
|
203
|
+
def create_index(self, database, index):
|
|
204
|
+
db = self.get_db()
|
|
205
|
+
collection = db[database]
|
|
206
|
+
collection.create_index(
|
|
207
|
+
index)
|
|
208
|
+
|
|
209
|
+
def aggregate(self, pipeline, database):
|
|
210
|
+
db = self.get_db()
|
|
211
|
+
collection = db[database]
|
|
212
|
+
data = collection.aggregate(pipeline)
|
|
213
|
+
return pd.DataFrame(list(data))
|
|
214
|
+
|
|
215
|
+
def get_col_keys(self, database):
|
|
216
|
+
db = self.get_db()
|
|
217
|
+
collection = db[database]
|
|
218
|
+
keys = collection.find_one().keys()
|
|
219
|
+
return keys
|
|
220
|
+
|
|
221
|
+
# 分页查询 descend 是否降序
|
|
222
|
+
def find_page_skip_data(self, coll_name, page_query, page, page_number, field, descend):
|
|
223
|
+
db = self.get_db()
|
|
224
|
+
collection = db[coll_name]
|
|
225
|
+
if descend:
|
|
226
|
+
sort_tag = -1
|
|
227
|
+
else:
|
|
228
|
+
sort_tag = 1
|
|
229
|
+
rows = collection.find(page_query).sort(field, sort_tag).skip((page - 1) * page_number).limit(page_number)
|
|
230
|
+
df = pd.DataFrame(list(rows))
|
|
231
|
+
return df
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 16
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import mns_common.utils.data_frame_util as data_frame_util
|
|
9
|
+
from mns_scheduler.db.script.sync.remote_mongo_util import RemoteMongodbUtil
|
|
10
|
+
from mns_scheduler.db.script.sync.local_mongo_util import LocalMongodbUtil
|
|
11
|
+
from loguru import logger
|
|
12
|
+
|
|
13
|
+
remote_mongodb_util = RemoteMongodbUtil('27017')
|
|
14
|
+
local_mongodb_util = LocalMongodbUtil('27017')
|
|
15
|
+
|
|
16
|
+
col_list = [
|
|
17
|
+
'company_remark_info',
|
|
18
|
+
'company_holding_info',
|
|
19
|
+
'industry_concept_remark',
|
|
20
|
+
'trade_date_list',
|
|
21
|
+
'company_info',
|
|
22
|
+
'de_list_stock',
|
|
23
|
+
'kpl_best_choose_index',
|
|
24
|
+
'kpl_best_choose_index_detail',
|
|
25
|
+
'realtime_quotes_now_zt_new_kc_open',
|
|
26
|
+
'industry_concept_remark',
|
|
27
|
+
'self_black_stock',
|
|
28
|
+
'self_choose_plate',
|
|
29
|
+
'self_choose_stock',
|
|
30
|
+
'stock_account_info',
|
|
31
|
+
'ths_concept_list',
|
|
32
|
+
'stock_zt_pool',
|
|
33
|
+
'ths_stock_concept_detail'
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def remote_data():
|
|
38
|
+
for col in col_list:
|
|
39
|
+
try:
|
|
40
|
+
col_df = remote_mongodb_util.find_all_data(col)
|
|
41
|
+
if data_frame_util.is_not_empty(col_df):
|
|
42
|
+
result = local_mongodb_util.remove_all_data(col)
|
|
43
|
+
if result.acknowledged:
|
|
44
|
+
local_mongodb_util.save_mongo(col_df, col)
|
|
45
|
+
|
|
46
|
+
logger.info("同步集合完成:{}", col)
|
|
47
|
+
except BaseException as e:
|
|
48
|
+
logger.error("同步失败:{},{}", e, col)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
if __name__ == '__main__':
|
|
52
|
+
remote_data()
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
file_path = os.path.abspath(__file__)
|
|
5
|
+
end = file_path.index('mns') + 14
|
|
6
|
+
project_path = file_path[0:end]
|
|
7
|
+
sys.path.append(project_path)
|
|
8
|
+
import pandas as pd
|
|
9
|
+
import pymongo
|
|
10
|
+
from mns_common.utils.async_fun import async_fun
|
|
11
|
+
from loguru import logger
|
|
12
|
+
import warnings
|
|
13
|
+
|
|
14
|
+
warnings.filterwarnings("ignore")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class RemoteMongodbUtil:
|
|
18
|
+
def __init__(self, port):
|
|
19
|
+
self.port = port
|
|
20
|
+
|
|
21
|
+
def get_db(self):
|
|
22
|
+
client = pymongo.MongoClient("mongodb://100.87.2.149:" + '27017' + "/patience")
|
|
23
|
+
return client.patience
|
|
24
|
+
|
|
25
|
+
def group(self, query, coll_name):
|
|
26
|
+
db = self.get_db()
|
|
27
|
+
collection = db[coll_name]
|
|
28
|
+
data = collection.aggregate(query)
|
|
29
|
+
return pd.DataFrame(list(data))
|
|
30
|
+
|
|
31
|
+
def remove_data(self, query, coll_name):
|
|
32
|
+
db = self.get_db()
|
|
33
|
+
collection = db[coll_name]
|
|
34
|
+
return collection.delete_many(query)
|
|
35
|
+
|
|
36
|
+
def exist_data_query(self, coll_name, query):
|
|
37
|
+
db = self.get_db()
|
|
38
|
+
collection = db[coll_name]
|
|
39
|
+
return collection.count(query, limit=1) > 0
|
|
40
|
+
|
|
41
|
+
def find_one(self, coll_name, _id):
|
|
42
|
+
db = self.get_db()
|
|
43
|
+
collection = db[coll_name]
|
|
44
|
+
return collection.find_one({'_id': _id})
|
|
45
|
+
|
|
46
|
+
def find_one_query(self, coll_name, query):
|
|
47
|
+
db = self.get_db()
|
|
48
|
+
collection = db[coll_name]
|
|
49
|
+
return pd.DataFrame(collection.find_one(query), index=[0])
|
|
50
|
+
|
|
51
|
+
def find_all_data(self, coll_name):
|
|
52
|
+
db = self.get_db()
|
|
53
|
+
collection = db[coll_name]
|
|
54
|
+
rows = collection.find({})
|
|
55
|
+
df = pd.DataFrame([basic for basic in rows])
|
|
56
|
+
return df
|
|
57
|
+
|
|
58
|
+
def find_query_data(self, coll_name, query):
|
|
59
|
+
db = self.get_db()
|
|
60
|
+
collection = db[coll_name]
|
|
61
|
+
rows = collection.find(query)
|
|
62
|
+
df = pd.DataFrame(list(rows))
|
|
63
|
+
return df
|
|
64
|
+
|
|
65
|
+
def find_query_data_choose_field(self, coll_name, query, query_field):
|
|
66
|
+
db = self.get_db()
|
|
67
|
+
collection = db[coll_name]
|
|
68
|
+
rows = collection.find(query, query_field)
|
|
69
|
+
df = pd.DataFrame(list(rows))
|
|
70
|
+
return df
|
|
71
|
+
|
|
72
|
+
def find_query_data_list(self, coll_name, query):
|
|
73
|
+
db = self.get_db()
|
|
74
|
+
collection = db[coll_name]
|
|
75
|
+
rows = collection.find(query)
|
|
76
|
+
return list(rows)
|
|
77
|
+
|
|
78
|
+
def find_query_data_list(self, coll_name, query):
|
|
79
|
+
db = self.get_db()
|
|
80
|
+
collection = db[coll_name]
|
|
81
|
+
rows = collection.find(query)
|
|
82
|
+
return list(rows)
|
|
83
|
+
|
|
84
|
+
def remove_all_data(self, database):
|
|
85
|
+
db = self.get_db()
|
|
86
|
+
collection = db[database]
|
|
87
|
+
query = {"_id": {"$ne": "null"}}
|
|
88
|
+
collection.delete_many(query)
|
|
89
|
+
|
|
90
|
+
def drop_collection(self, database):
|
|
91
|
+
db = self.get_db()
|
|
92
|
+
collection = db[database]
|
|
93
|
+
collection.drop()
|
|
94
|
+
|
|
95
|
+
def ascend_query(self, query, coll_name, field, num):
|
|
96
|
+
db = self.get_db()
|
|
97
|
+
collection = db[coll_name]
|
|
98
|
+
return pd.DataFrame(list(collection.find(query).sort(field, 1).skip(0).limit(num)));
|
|
99
|
+
|
|
100
|
+
def descend_query(self, query, coll_name, field, num):
|
|
101
|
+
db = self.get_db()
|
|
102
|
+
collection = db[coll_name]
|
|
103
|
+
return pd.DataFrame(list(collection.find(query).sort(field, -1).skip(0).limit(num)));
|
|
104
|
+
|
|
105
|
+
def count(self, query, coll_name):
|
|
106
|
+
db = self.get_db()
|
|
107
|
+
collection = db[coll_name]
|
|
108
|
+
return collection.count_documents(query)
|
|
109
|
+
|
|
110
|
+
def query_max(self, query, coll_name, field, num):
|
|
111
|
+
db = self.get_db()
|
|
112
|
+
collection = db[coll_name]
|
|
113
|
+
return pd.DataFrame(list(collection.find(query).sort(field, -1).skip(0).limit(num)));
|
|
114
|
+
|
|
115
|
+
def query_min(self, query, coll_name, field):
|
|
116
|
+
db = self.get_db()
|
|
117
|
+
collection = db[coll_name]
|
|
118
|
+
return pd.DataFrame(list(collection.find(query).sort(field, 1).skip(0).limit(1)));
|
|
119
|
+
|
|
120
|
+
def insert_mongo(self, df, database):
|
|
121
|
+
db = self.get_db()
|
|
122
|
+
if df is None or len(df) == 0:
|
|
123
|
+
return
|
|
124
|
+
collection = db[database]
|
|
125
|
+
# 格式转换
|
|
126
|
+
try:
|
|
127
|
+
df = df.drop_duplicates()
|
|
128
|
+
# df = df.T.drop_duplicates().T
|
|
129
|
+
records = df.to_dict('records')
|
|
130
|
+
collection.insert_many(records)
|
|
131
|
+
except BaseException as e:
|
|
132
|
+
logger.error("插入数据异常:{}", e)
|
|
133
|
+
|
|
134
|
+
def insert_mongo_json(self, json, database):
|
|
135
|
+
db = self.get_db()
|
|
136
|
+
collection = db[database]
|
|
137
|
+
# 格式转换
|
|
138
|
+
try:
|
|
139
|
+
collection.insert_many(json)
|
|
140
|
+
except BaseException as e:
|
|
141
|
+
logger.error("插入数据异常:{}", e)
|
|
142
|
+
|
|
143
|
+
def save_mongo_json(self, json, database):
|
|
144
|
+
db = self.get_db()
|
|
145
|
+
collection = db[database]
|
|
146
|
+
for record in json:
|
|
147
|
+
try:
|
|
148
|
+
collection.save(record)
|
|
149
|
+
except BaseException as e:
|
|
150
|
+
logger.error("保存数据出现异常:{}", e)
|
|
151
|
+
|
|
152
|
+
def save_mongo(self, df, database):
|
|
153
|
+
db = self.get_db()
|
|
154
|
+
if df is None or len(df) == 0:
|
|
155
|
+
return
|
|
156
|
+
collection = db[database]
|
|
157
|
+
# df = df.T.drop_duplicates().T
|
|
158
|
+
# 格式转换
|
|
159
|
+
records = df.to_dict('records')
|
|
160
|
+
for record in records:
|
|
161
|
+
try:
|
|
162
|
+
collection.save(record)
|
|
163
|
+
except BaseException as e:
|
|
164
|
+
logger.error("保存数据出现异常:{},{}", record, e)
|
|
165
|
+
|
|
166
|
+
def save_mongo_no_catch_exception(self, df, database):
|
|
167
|
+
db = self.get_db()
|
|
168
|
+
if df is None or len(df) == 0:
|
|
169
|
+
return
|
|
170
|
+
collection = db[database]
|
|
171
|
+
# df = df.T.drop_duplicates().T
|
|
172
|
+
# 格式转换
|
|
173
|
+
records = df.to_dict('records')
|
|
174
|
+
for record in records:
|
|
175
|
+
collection.save(record)
|
|
176
|
+
|
|
177
|
+
def update_one(self, df, database):
|
|
178
|
+
db = self.get_db()
|
|
179
|
+
condition = {'_id': list(df['_id'])[0]}
|
|
180
|
+
if len(df) == 0:
|
|
181
|
+
return
|
|
182
|
+
collection = db[database]
|
|
183
|
+
collection.update(condition, df)
|
|
184
|
+
|
|
185
|
+
def update_many(self, query, new_values, database):
|
|
186
|
+
db = self.get_db()
|
|
187
|
+
collection = db[database]
|
|
188
|
+
x = collection.update_many(query, new_values)
|
|
189
|
+
return x
|
|
190
|
+
|
|
191
|
+
@async_fun
|
|
192
|
+
def update_one_query(self, query, new_values, database):
|
|
193
|
+
db = self.get_db()
|
|
194
|
+
collection = db[database]
|
|
195
|
+
x = collection.update(query, new_values)
|
|
196
|
+
return x
|
|
197
|
+
|
|
198
|
+
def distinct_field(self, database, field, query):
|
|
199
|
+
db = self.get_db()
|
|
200
|
+
collection = db[database]
|
|
201
|
+
return collection.distinct(field, query)
|
|
202
|
+
|
|
203
|
+
def create_index(self, database, index):
|
|
204
|
+
db = self.get_db()
|
|
205
|
+
collection = db[database]
|
|
206
|
+
collection.create_index(
|
|
207
|
+
index)
|
|
208
|
+
|
|
209
|
+
def aggregate(self, pipeline, database):
|
|
210
|
+
db = self.get_db()
|
|
211
|
+
collection = db[database]
|
|
212
|
+
data = collection.aggregate(pipeline)
|
|
213
|
+
return pd.DataFrame(list(data))
|
|
214
|
+
|
|
215
|
+
def get_col_keys(self, database):
|
|
216
|
+
db = self.get_db()
|
|
217
|
+
collection = db[database]
|
|
218
|
+
keys = collection.find_one().keys()
|
|
219
|
+
return keys
|
|
220
|
+
|
|
221
|
+
# 分页查询 descend 是否降序
|
|
222
|
+
def find_page_skip_data(self, coll_name, page_query, page, page_number, field, descend):
|
|
223
|
+
db = self.get_db()
|
|
224
|
+
collection = db[coll_name]
|
|
225
|
+
if descend:
|
|
226
|
+
sort_tag = -1
|
|
227
|
+
else:
|
|
228
|
+
sort_tag = 1
|
|
229
|
+
rows = collection.find(page_query).sort(field, sort_tag).skip((page - 1) * page_number).limit(page_number)
|
|
230
|
+
df = pd.DataFrame(list(rows))
|
|
231
|
+
return df
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
# if __name__ == '__main__':
|
|
235
|
+
# symbol = '002992'
|
|
236
|
+
# query = {'symbol': symbol,
|
|
237
|
+
# '$and': [{'str_day': {'$gte': '2022-07-06'}}, {'str_day': {'$lte': '2022-11-06'}}]}
|
|
238
|
+
# mongodb_util = MongodbUtil('27017')
|
|
239
|
+
# # num = mongodb_util.count(query, 'stock_zt_pool')
|
|
240
|
+
# # print(num)
|
|
241
|
+
# key = mongodb_util.get_col_keys('stock_zt_pool')
|
|
242
|
+
# print(key)
|
|
243
|
+
#
|
|
244
|
+
# # num = mongodb_util.count(query, 'stock_zt_pool')
|
|
245
|
+
# # print(num)
|
|
246
|
+
#
|
|
247
|
+
# pipeline = [
|
|
248
|
+
# {'$match': {
|
|
249
|
+
# "classification": {'$in': ["K", "C"]},
|
|
250
|
+
# "str_day": {'$gte': "2022-03-16"}}},
|
|
251
|
+
# {'$group': {'_id': "$flow_mv_level", 'count': {'$sum': 1}}}
|
|
252
|
+
# ]
|
|
253
|
+
# result = mongodb_util.aggregate(pipeline, 'realtime_quotes_now_zt_new_kc_open')
|
|
254
|
+
#
|
|
255
|
+
# result = result.sort_values(by=['_id'], ascending=True)
|
|
256
|
+
# print(result)
|
|
257
|
+
from io import StringIO
|
|
258
|
+
import re
|
|
259
|
+
|
|
260
|
+
if __name__ == '__main__':
|
|
261
|
+
mongodb_util = RemoteMongodbUtil('27017')
|
|
262
|
+
#
|
|
263
|
+
# kpl_best_choose_index_df = mongodb_util.find_page_skip_data('kpl_best_choose_index', {"index_class": "sub_index"},
|
|
264
|
+
# 1, 100, 'create_time', True)
|
|
265
|
+
key_word = '高速连接'
|
|
266
|
+
EXCLUDE_INFO_KEY = '股东人数'
|
|
267
|
+
# query = {
|
|
268
|
+
# "$or": [{'question': {"$regex": re.compile(key_word, re.IGNORECASE)}},
|
|
269
|
+
# {'answer_content': {"$regex": re.compile(key_word, re.IGNORECASE)}}],
|
|
270
|
+
# "$and": [{'question': {"$not": re.compile(EXCLUDE_INFO_KEY, re.IGNORECASE)}},
|
|
271
|
+
# {'answer_content': {"$not": re.compile(EXCLUDE_INFO_KEY, re.IGNORECASE)}}],
|
|
272
|
+
# }
|
|
273
|
+
#
|
|
274
|
+
# pipeline = [
|
|
275
|
+
# {'$match': query},
|
|
276
|
+
# {'$group': {'_id': "$symbol", 'count': {'$sum': 1}}}
|
|
277
|
+
# ]
|
|
278
|
+
# result = mongodb_util.aggregate(pipeline, 'stock_interactive_question')
|
|
279
|
+
#
|
|
280
|
+
# result = result.sort_values(by=['_id'], ascending=True)
|
|
281
|
+
# print(result)
|
|
282
|
+
#
|
|
283
|
+
# # ths_new_concept = mongodb_util.find_all_data('ths_new_concept')
|
|
284
|
+
# key = mongodb_util.get_col_keys('company_info')
|
|
285
|
+
# print(key)
|
|
286
|
+
|
|
287
|
+
# mongodb_util.create_index('realtime_quotes_now_open', [("number", 1)])
|
|
288
|
+
# mongodb_util.create_index('realtime_quotes_now_open', [("symbol", 1), ("number", 1)])
|
|
289
|
+
# mongodb_util.create_index('realtime_quotes_now_open', [("str_day", 1)])
|
|
290
|
+
# update_query = {"str_day": "2023-06-30"}
|
|
291
|
+
# mongodb_util.update_many(update_query, {"$set": {"number": 1}}, "realtime_quotes_now_open")
|
|
292
|
+
# query = {"symbol": "000617"}
|
|
293
|
+
# company_info_base = mongodb_util.find_query_data('company_info_base', query)
|
|
294
|
+
# ths_stock_concept_detail = mongodb_util.find_query_data('ths_stock_concept_detail', query)
|
|
295
|
+
# ths_stock_concept_detail = ths_stock_concept_detail[[
|
|
296
|
+
# 'concept_code',
|
|
297
|
+
# 'concept_name',
|
|
298
|
+
# 'str_now_time',
|
|
299
|
+
# 'concept_create_day']]
|
|
300
|
+
# # 去除空格
|
|
301
|
+
# ths_stock_concept_detail['concept_name'] = ths_stock_concept_detail['concept_name'].str.replace(' ', '')
|
|
302
|
+
# company_info_base.loc[:, 'ths_concept_list_info'] = ths_stock_concept_detail.to_string(index=False)
|
|
303
|
+
# for company_one in company_info_base.itertuples():
|
|
304
|
+
# ths_concept_list_info = company_one.ths_concept_list_info
|
|
305
|
+
# ths_concept_list_info_df = pd.read_csv(StringIO(ths_concept_list_info), delim_whitespace=True)
|
|
306
|
+
# print(ths_concept_list_info_df)
|
|
@@ -121,6 +121,9 @@ def save_one_plate_detail_data(plate_code, plate_name, index_class, first_plate_
|
|
|
121
121
|
new_df['create_day'] = str_day
|
|
122
122
|
new_df['create_time'] = str_now_date
|
|
123
123
|
new_df = new_df[choose_field]
|
|
124
|
+
new_df['grade'] = 1
|
|
125
|
+
new_df['remark'] = ''
|
|
126
|
+
new_df['remark_flag'] = ''
|
|
124
127
|
mongodb_util.insert_mongo(new_df, 'kpl_best_choose_index_detail')
|
|
125
128
|
|
|
126
129
|
# 保存到当日新增概念列表
|