Commit 3eb4047a authored by Takhir Fakhrutdinov's avatar Takhir Fakhrutdinov

новый loader

parent 1d29e705
!*.tc
!*.res
!*.mpc
!*.log
!*.csv
!*orb*
!*.report
!*.json
!*.met.no.req
!*eopc04_iau2000
!*.list
!*daily_iau2000*
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This source diff could not be displayed because it is too large. You can view the blob instead.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
2272060800 10 # 1 Jan 1972
2287785600 11 # 1 Jul 1972
2303683200 12 # 1 Jan 1973
2335219200 13 # 1 Jan 1974
2366755200 14 # 1 Jan 1975
2398291200 15 # 1 Jan 1976
2429913600 16 # 1 Jan 1977
2461449600 17 # 1 Jan 1978
2492985600 18 # 1 Jan 1979
2524521600 19 # 1 Jan 1980
2571782400 20 # 1 Jul 1981
2603318400 21 # 1 Jul 1982
2634854400 22 # 1 Jul 1983
2698012800 23 # 1 Jul 1985
2776982400 24 # 1 Jan 1988
2840140800 25 # 1 Jan 1990
2871676800 26 # 1 Jan 1991
2918937600 27 # 1 Jul 1992
2950473600 28 # 1 Jul 1993
2982009600 29 # 1 Jul 1994
3029443200 30 # 1 Jan 1996
3076704000 31 # 1 Jul 1997
3124137600 32 # 1 Jan 1999
3345062400 33 # 1 Jan 2006
3439756800 34 # 1 Jan 2009
3550089600 35 # 1 Jul 2012
3644697600 36 # 1 Jul 2015
3692217600 37 # 1 Jan 2017
This diff is collapsed.
This diff is collapsed.
# -*- coding: utf-8 -*-
from .xFile import baseFile
import logging
import io
from datetime import datetime
logger = logging.getLogger('')
class baseEphemFile(baseFile):
""" Загрузка файлов орбит КО, полученных из внешних источников.
"""
async def current_constellation(self):
""" Метод. Получение номера соотвествия номеров prn номерам КА в БД.
Args:
sat_system str: система GNSS
Return:
dict: таблица перевода номеров prn
.. literalinclude:: ../../../../fte/lib/libpy/ephFile.py
:language: python
:lines: 26,40-44
:linenos:
:caption: obj2nko
"""
r = await self.fetchval("""
select json_object_agg(coalesce(gps,substr(prn,2)),norad)
from (
select distinct on (prn) prn,norad
from main.t_gnss_const
where satsys = $1
and tmr @> 'now'::timestamp
order by prn,created desc
) r left join main.t_gnss_gps2qzss on (prn = qzss)
""",self.satSystem)
return r
async def load(self,fd,args):
""" Метод. Загрузка файла измерений в БД.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 194,203-293
:linenos:
:caption: load_data
"""
logger.info('Загрузка файла...')
await super().load(fd,args)
self.fid = await self.getfid()
prn2nko = None
if hasattr(self,'satSystem'):
prn2nko = await self.current_constellation()
orblist = self.parse_file(fd)
self.tbeg,self.tend = datetime.max,datetime.min
fo = io.BytesIO()
for r in orblist:
self.tbeg,self.tend = min(self.tbeg,r.tm),max(self.tend,r.tm)
norad = prn2nko.get(r.prn) if prn2nko else r.norad
if norad:
self.write(fo,(self.fid,norad,r.bdt,r.sec,r.rec,self.ephtp,))
else:
if prn2nko: logger.warning("Неизвестный объект prn[{l.prn}] строка пропускается.")
self.fileattr['ko'] = len(orblist)
fo.seek(0)
async with self.transaction():
await self.create_doc()
await self.copy_to_table('t_ephemeris',source=fo,schema_name='main',delimiter='\t',columns=('fid','nko','bdt','sec','params','tp',))
if self.task_enabled:
task = await self.fetchval("select * from dbq.put_task(_tp=>$1,_payload=>json_build_object('fid',$2::bigint))",self.tasktyp,self.fid)
if task: logger.info(f"Создана задача {task} конвертации орбит, файл {self.args.fname} ({self.fid}).")
else: logger.warning(f"Не создана задача конвертации орбит, файл {self.args.fname} ({self.fid}).")
else: logger.info(f"Конвертация орбит запрещена, файл {self.args.fname} ({self.fid}).")
class yumaFile(baseEphemFile):
""" Загрузка файлов орбит КО, полученных из внешних источников.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 297,306-311
:linenos:
:caption: __init__
"""
from .parsers import yuma
super().__init__(*args,**kwargs)
self.parser = yuma.parsefile
def parser_args(self,fd):
""" Метод. Удаление обработчика для формирования журнала загрузки файла.
.. literalinclude:: ../../../../fte/lib/libpy/xFile.py
:language: python
:lines: 118,127-129
:linenos:
:caption: remove_loghandler
"""
return (fd,self.startDate)
class gpsAlmFile(yumaFile):
""" Загрузка файлов орбит КА в формате альманаха GPS.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/ephFile.py
:language: python
:lines: 167,176-184
:linenos:
:caption: __init__
"""
super().__init__(*args,**kwargs)
self.satSystem = 'GPS'
self.startDate = datetime(1980,1,6)
self.dbtype = 'Ga'
self.tasktyp = 53
self.task_enabled = True
self.ephtp = 'Y'
class qzssAlmFile(yumaFile):
""" Загрузка файла орбит КА QZSS.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/ephFile.py
:language: python
:lines: 190,199-207
:linenos:
:caption: __init__
"""
super().__init__(*args,**kwargs)
self.satSystem = 'QZSS'
self.startDate = datetime(1980,1,6)
self.dbtype = 'Ga'
self.tasktyp = 53
self.task_enabled = True
self.ephtp = 'Y'
#class bduAlmFile(yumaFile):
# """ Загрузка файла орбит навигационных КА BEDOU.
#
# """
# def __init__(self,*args,**kwargs):
# """ Конструктор.
#
# .. literalinclude:: ../../../../fte/lib/libpy/ephFile.py
# :language: python
# :lines: 143,152-161
# :linenos:
# :caption: __init__
# """
# super().__init__(*args,**kwargs)
# self.satSystem = 'BDS'
# self.startDate = datetime(2006,1,1)
# self.dbtype = 'Gb'
# self.tasktyp = 55
# self.task_enabled = True
# self.ephtp = 'B'
class ctleFile(baseEphemFile):
""" Загрузка файла орбит КА по данным IRLS в формате TLE.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/ephFile.py
:language: python
:lines: 121,130-137
:linenos:
:caption: __init__
"""
from .parsers import ctle
super().__init__(*args,**kwargs)
self.parser = ctle.parsefile
self.dbtype = 'tl'
self.tasktyp = 45
self.task_enabled = True
self.ephtp = 'S'
......@@ -3,7 +3,7 @@ import re
import json
import jsonschema as js
def
import asyncpg as pg
class Factory_t:
""" Создание экземпляра класса загрузчика.
......@@ -56,6 +56,14 @@ class Factory_t:
raise ValueError(format)
return creator()
async def connect(self,format,dsn):
creator = self._creators.get(format)
if not creator:
raise ValueError(format)
cn = await pg.connect(dsn,connection_class=creator())
await cn.add_pg_types()
return cn
def _detect_json(self,fd):
fd.seek(0)
data = json.load(fd)
......@@ -84,19 +92,253 @@ class Factory_t:
def _create_tcFile():
from .msFile import tcFile
return tcFile()
return tcFile
def _create_resFile():
from .msFile import resFile
return resFile()
return resFile
def _create_tdmFile():
from .msFile import tdmFile
return tdmFile
def _create_eopFile():
from .msFile import eopFile
return eopFile
def _create_lcvFile():
from .lcvFile import lcvFile
return lcvFile
def _create_iersFile():
from .iersFile import iersFile
return iersFile
def _create_leapFile():
from .iersFile import leapFile
return leapFile
def _create_solarFile():
from .iersFile import solarFile
return solarFile
def _create_eopc04File():
from .iersFile import eopc04File
return eopc04File
def _create_mpcCodeFile():
from .iersFile import mpcCodeFile
return mpcCodeFile
def _create_forecastFile():
from .weaFile import forecastFile
return forecastFile
def _create_constFile():
from .gnssFile import constFile
return constFile
def _create_mpcFile():
from .msFile import mpcFile
return mpcFile()
def _create_ephemFile():
from .gnssFile import ephemFile
return ephemFile
def _create_gpsAlmFile():
from .ephFile import gpsAlmFile
return gpsAlmFile
def _create_qzssAlmFile():
from .ephFile import qzssAlmFile
return qzssAlmFile
def _create_ctleFile():
from .ephFile import ctleFile
return ctleFile
def _create_cwFile():
from .tmsdbFile import cwFile
return cwFile
def _create_vproFile():
from .tmsdbFile import vproFile
return vproFile
def _create_reqFile():
from .reqFile import reqFile
return reqFile
def _create_hwlogFile():
from .hwlogFile import hwlogFile
return hwlogFile
def _create_zrFile():
from .zFile import zrFile
return zrFile
def _create_zoFile():
from .zFile import zoFile
return zoFile
def _create_satcatFile():
from .tleFile import satcatFile
return satcatFile
def _create_tleFile():
from .tleFile import tleFile
return tleFile
factory = Factory_t()
factory.register_file('TC', _create_tcFile, pattern=re.compile(r"^TC(MSM|12406)\.\d+",re.U))
factory.register_file('RES', _create_resFile, pattern=re.compile(r"^((СИСТ|СТ|ИСТ|SYST|SIST)\s)?\d{5}\s\d+$",re.U))
factory.register_file('MPC', _create_mpcFile, pattern=re.compile(r"^COD\s\w{3}$",re.U))
factory.register_file('LCV', _create_lcvFile, pattern=re.compile(r"^\d{5}\s[0-4](\s+[+-]?\d+([.]\d+)?){5}\s[0RVBINU](\s+[-]?\d+\.\d+){4}",re.U))
factory.register_file('IERS', _create_iersFile, pattern=re.compile(r"^[ 0-9]{6}\s\d+\.\d+\s[IP]\s*[-+ ]?\d+\.\d{6}.{30}[IP]",re.U))
factory.register_file('LP', _create_leapFile, pattern=re.compile(r"^2272060800\x0910\x09# 1 Jan 1972",re.U))
factory.register_file('SI', _create_solarFile, pattern=re.compile(r"^[RF]\,\d{4}\-\d{2}\-\d{2}(\,\d+\.\d+){3}",re.U))
factory.register_file('C04', _create_eopc04File, pattern=re.compile(r"EARTH ORIENTATION PARAMETER \(EOP\) PRODUCT CENTER CENTER \(PARIS OBSERVATORY\)",re.U))
factory.register_file('OBS', _create_mpcCodeFile, pattern=re.compile(r"^\w{3}\t\d+\.\d+\t\d+\.\d+\t(\d+)?\t.*$",re.U))
factory.register_file('WF', _create_forecastFile,pattern=re.compile(r"^\d{4}\-\d{2}\-\d{2}T\d{2}\:\d{2}\:\d{2}Z\,[-]?\d{2,3}\,[-]?\d{2,3}\,[-]?\d+\,\d+\,",re.U))
factory.register_file('GNC', _create_constFile, pattern=re.compile(r'^[A-Z]+\t\d+\t\d+\t[A-Z]\d{2}\t\["\d{4}\-\d{2}\-\d{2}\s\d{2}:\d{2}:\d{2}"\,("\d{4}\-\d{2}\-\d{2}\s\d{2}:\d{2}:\d{2}")?\)',re.U))
factory.register_file('GLE', _create_ephemFile, pattern=re.compile(r"^[A-Z]\d{2}\s+\d+\s+[-]?\d+\s\d{2}\/\d{2}\/\d{2}\s+\d+(\s+\d+\.\d+){6}",re.U))
factory.register_file('GPS', _create_gpsAlmFile, pattern=re.compile(r"^GPS YUMA ALMANAC$",re.U))
factory.register_file('QZS', _create_qzssAlmFile, pattern=re.compile(r"^QZSS ALMANAC$",re.U))
factory.register_file('CTL', _create_ctleFile, pattern=re.compile(r"^#CELESTRAK SUPPLEMENTAL",re.U))
factory.register_file('HW', _create_hwlogFile, pattern=re.compile(r"^(?P<dt>[A-Z][a-z]{2}\s+\d+\s\d{2}:\d{2}:\d{2})\s(?P<cl>.+)\shwstats:\s(?P<tp>\w+)(.(?P<num>\d+))?(\.)(?P<prm>.+)\=(?P<v>.+$)",re.U))
factory.register_file('ZR', _create_zrFile, pattern=re.compile(r"^(Измерения НП |Обзор НП )\w{4} за \d{1,2}\.\d{1,2}\.\d{4}",re.U))
factory.register_file('ZO', _create_zoFile, pattern=re.compile(r"^МАССИВ ОРБИТ НА (\d{2}\.){2}\d{4}\s(\d{2}\:){2}\d{2}",re.U))
factory.register_file('CAT', _create_satcatFile, pattern=re.compile(r"^INTLDES\,NORAD_CAT_ID\,OBJECT_TYPE\,SATNAME\,COUNTRY\,LAUNCH",re.U))
factory.register_file('TLE', _create_tleFile, pattern=re.compile(r"^FILE\,TLE_LINE1\,TLE_LINE2$",re.U))
tdm = {
"required": ["header", "tdm"],
"type": "object",
"properties": {
"header": {
"required": ["originator", "creation_date"],
"type": "object",
"properties": {
"originator": {"type": "string"},
"time_system": {"type": "string"},
"creation_date": {"type": "string","pattern": r"\d{4}\-\d{2}\-\d{2}[ T]\d{2}\:\d{2}\:\d{2}\.\d+"}
}
},
"tdm": {
"required": ["data", "metadata"],
"type": "object",
"properties": {
"data": {
"type": "array",
"items": { "$ref": "#/$defs/track" }
},
"metadata": {
"required": [
"angle_format",
"time_system",
"data_type",
"ref_frame",
"sensor",
"angle_type"
],
"type": "object",
"properties": {
"angle_format":{"enum": ["DECIMAL","DEGREE","HMS"] },
"time_system" :{"enum": ["UTC"] },
"data_type" :{"type": "string" },
"ref_frame" :{"enum": ["EME2000"] },
"sensor" :{"type": "integer"},
"angle_type" :{"enum": ["RADEC","AZEL"] }
}
}
}
}
}
,"$defs":{
"track":{
"required": ["trackid", "meas"],
"properties": {
"trackid":{"type": "integer"},
"meas": {
"type": "array",
"items":{
"type":"array",
"prefixItems":[
{"type":"string","pattern": r"\d{4}\-\d{2}\-\d{2}[ T]\d{2}\:\d{2}\:\d{2}\.\d+"},
{"type":"number"}
],
"items":True
}
}
}
}
}
}
factory.register_file('TDM', _create_tdmFile, schema=tdm)
eop = {
"type": "object",
"properties": {
"tracks": {"type": "array","items":{"$ref":"#/$defs/track" }},
"siteid": {"type": "integer"},
"nightid": {"type": "integer"}
},
"required": ["siteid","nightid","tracks"],
"$defs":{
"track":{
"properties": {
"trackid": {"type":"integer"},
"meas": {"type": "array", "items":{"$ref":"#/$defs/ms"}},
},
"required": ["trackid","meas"],
},
"ms":{
"properties":{
"utc":{"type":"string","pattern": r"\d{2}\/\d{2}\/\d{4}\s\d{2}\:\d{2}\:\d{2}(\.\d+)?"},
"ra_j2000":{"type":"string","pattern":r"[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?"},
"dec_j2000":{"type":"string","pattern":r"[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?"},
"mag":{"type":"string","pattern":r"[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?"},
"ra_j2000_full_error":{"type":"string","pattern":r"[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?"},
"dec_j2000_full_error":{"type":"string","pattern":r"[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?"},
"mag_error":{"type":"string","pattern":r"[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?"},
"crops":{"type":"string","media":{"binaryEncoding":"base64","type":"image/png"}}
},
"required": ["utc","ra_j2000","dec_j2000","mag","ra_j2000_full_error","dec_j2000_full_error","mag_error","crops"]
}
}
}
factory.register_file('EOP', _create_eopFile, schema=eop)
met = {
"type": "object",
"required": ["DavisVantage"],
"properties": {
"DavisVantage": {"type": "array"}
}
}
factory.register_file('MET', _create_vproFile, schema=met)
sky = {
"type": "object",
"required": ["CloudWatcher"],
"properties": {
"CloudWatcher": {"type": "array"}
}
}
factory.register_file('SKY', _create_cwFile, schema=sky)
req = {
"type": "object",
"properties": {
"TaskNum": {
"type": "object",
"properties": {
"Num": {"type": "integer"},
"TaskDescription": {"type": "string"},
"TaskEpoch": {"type": "string", "pattern": r"\d{4}\-\d{2}\-\d{2}\s\d{2}\:\d{2}\:\d{2}"},
"ShiftBoss": {"type": "string"}
},
"required": ["Num","TaskDescription","TaskEpoch","ShiftBoss"]
},
"Id": {"type": "integer"},
"Directive": {"type":"object"}
},
"required": ["TaskNum","Id","Directive"]
}
factory.register_file('RQ', _create_reqFile, schema=req)
# -*- coding: utf-8 -*-
from .xFile import baseFile
import logging
import io
from datetime import datetime
logger = logging.getLogger('')
class ephemFile(baseFile):
""" Загрузка файлов орбит КО, полученных из внешних источников.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 297,306-311
:linenos:
:caption: __init__
"""
from .parsers import gloephem
super().__init__(*args,**kwargs)
self.parser = gloephem.parsefile
self.dbtype = 'oe'
def parser_args(self,fd):
""" Метод. Удаление обработчика для формирования журнала загрузки файла.
.. literalinclude:: ../../../../fte/lib/libpy/xFile.py
:language: python
:lines: 118,127-129
:linenos:
:caption: remove_loghandler
"""
return (fd,self.fid)
async def load(self,fd,args):
""" Метод. Загрузка файла измерений в БД.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 194,203-293
:linenos:
:caption: load_data
"""
logger.info('Загрузка файла...')
await super().load(fd,args)
self.fid = await self.getfid()
self.tbeg,self.tend = datetime.max,datetime.min
ephem = self.parse_file(fd)
fo = io.BytesIO()
for r in ephem:
self.tbeg,self.tend = min(self.tbeg,r.tm),max(self.tend,r.tm)
self.write(fo,r.rec)
self.fileattr['ko'] = len(ephem)
fo.seek(0)
async with self.transaction():
await self.create_doc()
await self.copy_to_table('t_gnss_ephem',source=fo,schema_name='main',delimiter='\t',columns=('fid','svn','tm','params','prn',))
class constFile(baseFile):
""" Загрузка файлов орбит КО, полученных из внешних источников.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 297,306-311
:linenos:
:caption: __init__
"""
from .parsers import gnssconst
super().__init__(*args,**kwargs)
self.parser = gnssconst.parsefile
self.dbtype = 'gnc'
def parser_args(self,fd):
""" Метод. Удаление обработчика для формирования журнала загрузки файла.
.. literalinclude:: ../../../../fte/lib/libpy/xFile.py
:language: python
:lines: 118,127-129
:linenos:
:caption: remove_loghandler
"""
return (fd,self.fid)
async def load(self,fd,args):
""" Метод. Загрузка файла измерений в БД.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 194,203-293
:linenos:
:caption: load_data
"""
logger.info('Загрузка файла...')
await super().load(fd,args)
self.fid = await self.getfid()
self.tbeg,self.tend = datetime.max,datetime.min
constelation = self.parse_file(fd)
fo = io.BytesIO()
for r in constelation:
self.tbeg,self.tend = min(self.tbeg,r.tm),max(self.tend,r.tm)
self.write(fo,r.rec)
fo.seek(0)
async with self.transaction():
await self.create_doc()
await self.copy_to_table('t_gnss_const',source=fo,schema_name='main',delimiter='\t',columns=('fid','satsys','svn','norad','prn','tmr'))
# -*- coding: utf-8 -*-
from .xFile import baseFile
import logging
import hashlib as hs
from datetime import datetime,timedelta
import re
import io
import sys
logger = logging.getLogger('')
class hwlogFile(baseFile):
months = dict(Jan=1,Feb=2,Mar=3,Apr=4,May=5,Jun=6,Jul=7,Aug=8,Sep=9,Oct=10,Nov=11,Dec=12)
states = dict(standby=2.0,online=3.0,suspend=1.0,offline=0.0)
values = {'open':1.0,'close':0.0,'false':0.0,'true':1.0}
""" Загрузка файла журнала работы ЭОС.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/reqFile.py
:language: python
:lines: 13,22-28
:linenos:
:caption: __init__
"""
super().__init__(*args,**kwargs)
self.tbeg,self.tend = datetime.max, datetime.min
self.dbtype = 'hwl'
self.log = io.StringIO()
async def get_device(self):
""" Метод. Определение идентификатора устройства.
Return:
(int): идентификатор
.. literalinclude:: ../../../../fte/lib/libpy/rrdFile.py
:language: python
:lines: 133,145-155
:linenos:
:caption: get_device
"""
# Определяем идентификатор метеостанции... зашит в имени файла
telno = re.search(r'^\d+',self.args.fname).group(0)
atype = 510007;
dev = await self.fetchval("select dev from main.t_devattr where atype=$1 and unq and dtr @> 'now'::date and v = $2",atype,telno)
if not dev:
raise RuntimeError('Не удалось идентифицировать ЭОП')
self.fileattr['dev'] = dev
return dev
def _gettm(self,dp,now,year):
""" Метод. Загрузка файла.
.. literalinclude:: ../../../../fte/lib/libpy/hwFile.py
:language: python
:lines: 261,270-
:linenos:
:caption: load_data
"""
mn,d,h,m,s = dp.groups()
tm = datetime(year,self.months[mn],int(d),int(h),int(m),int(s))
if (tm-now) > timedelta(days=1):
tm = datetime(year-1,self.months[mn],int(d),int(h),int(m),int(s))
return tm
async def load(self,fd,args):
""" Метод. Загрузка файла.
.. literalinclude:: ../../../../fte/lib/libpy/weaFile.py
:language: python
:lines: 27,36-
:linenos:
:caption: load_data
"""
logger.info('Загрузка файла...')
await super().load(fd,args)
sp = re.compile(r"^(?P<dt>[A-Z][a-z]{2}\s+\d+\s\d{2}:\d{2}:\d{2})\s(?P<cl>.+)\shwstats:\s(?P<tp>\w+)(.(?P<num>\d+))?(\.)(?P<prm>.+)\=(?P<v>.+$)",re.M)
dp = re.compile(r"^(?P<mon>[A-Z][a-z]{2})\s+(?P<d>\d+)\s(?P<h>\d{2}):(?P<m>\d{2}):(?P<s>\d{2})")
dev = await self.get_device()
self.devs = await self.fetchval("select array_agg(member order by member) from main.t_devmembers where dev = $1",dev)
metrix,clis,tps,prms = set(),dict(),dict(),dict()
sql = "select json_object_agg({0}name,{0}) from xd.t_devhardware_type_{0}"
clis = await self.fetchval(sql.format('cli'))
if not clis: clis = dict()
tps = await self.fetchval(sql.format('tp'))
if not tps: tps = dict()
prms = await self.fetchval(sql.format('prm'))
if not prms: prms = dict()
result = await self.fetch("select mrx from xd.t_devhardware_metrix where dev=$1",dev)
for r, in result:
metrix.add(r)
year = int(re.search(r'[_](\d{4})',self.args.fname).group(1))
now = datetime.utcnow()
fo = io.BytesIO()
hr = logging.StreamHandler(self.log)
hr.setLevel(logging.DEBUG)
hr.setFormatter(logging.Formatter(fmt="%(asctime)s %(levelname)s: %(message)s",datefmt="%Y-%m-%d %H:%M:%S"))
logger.addHandler(hr)
logger.debug('Начало разбора данных...')
matches = sp.finditer(fd.read())
for m in matches:
num = int(m.group('num')) if m.group('num') else 0
tm = self._gettm(dp.match(m.group('dt')),now,year)
self.tbeg,self.tend = min(self.tbeg,tm),max(self.tend,tm)
cliname,tpname,prmname,val = m.group('cl'),m.group('tp'),m.group('prm'),m.group('v')
if prmname == 'state':v = self.states[val]
elif prmname.startswith('serial'): continue # серийные номера пока не используем...
elif val.lower() in self.values: v = self.values[val.lower()]
else: v = float(val)
async with self.transaction():
# справочники и log diffearable
if cliname not in clis:
v = await self.fetchval("insert into xd.t_devhardware_type_cli(cliname) values($1) on conflict do nothing returning cli",cliname)
clis[cliname] = v
logger.info("Добавлен клиент %s",v)
if tpname not in tps:
v = await self.fetchval("insert into xd.t_devhardware_type_tp(tpname) values($1) on conflict do nothing returning tp",tpname)
tps[tpname] = v
logger.info("Добавлено устройство %s",v)
if prmname not in prms:
v = await self.fetchval("insert into xd.t_devhardware_type_prm(prmname) values($1) on conflict do nothing returning prm",prmname)
prms[prmname] = v
logger.info("Добавлен параметр %s",v)
mrx = hash((dev,clis[cliname],tps[tpname],num,prms[prmname],))
if mrx not in metrix:
await self.execute("insert into xd.t_devhardware_metrix(mrx,dev,cli,tp,num,prm) values($1,$2,$3,$4,$5,$6) on conflict do nothing",
mrx,dev,clis[cliname],tps[tpname],num,prms[prmname])
self.write(fo,(tm,mrx,v,))
logger.debug('Окончание разбора данных...')
logger.removeHandler(hr)
fo.seek(0)
self.fid = await self.getfid()
async with self.transaction():
await self.create_doc()
await self.copy_to_table('t_devhardware_log',source=fo,schema_name='xd',delimiter='\t')
# -*- coding: utf-8 -*-
from .xFile import baseFile
from io import BytesIO
import logging
logger = logging.getLogger('')
class leapFile(baseFile):
""" Загрузка значений высокосной секунды по данным IERS.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 373,382-387
:linenos:
:caption: __init__
"""
from .parsers import leap
super().__init__(*args,**kwargs)
self.parser = leap.parsefile
self.dbtype = 'lp'
async def load(self,fd,args):
""" Метод. Загрузка файла измерений в БД.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 194,203-293
:linenos:
:caption: load_data
"""
logger.info('Загрузка файла...')
await super().load(fd,args)
leapseconds = self.parse_file(fd)
self.fid = await self.getfid()
async with self.transaction():
await self.executemany("""
insert into main.t_leapsec values($1,$2,$3,$4,$5)
on conflict(dt) do nothing""",leapseconds)
self.tbeg, self.tend = await self.fetchrow("select min(dt)::timestamp,max(dt)::timestamp from main.t_leapsec")
await self.create_doc()
class iersFile(baseFile):
""" Загрузка файла о параметрах движения Земли по данным IERS.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 373,382-387
:linenos:
:caption: __init__
"""
from .parsers import iersfinals
super().__init__(*args,**kwargs)
self.parser = iersfinals.parsefile
self.dbtype = 'iers'
async def load(self,fd,args):
""" Метод. Загрузка файла измерений в БД.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 194,203-293
:linenos:
:caption: load_data
"""
logger.info('Загрузка файла...')
await super().load(fd,args)
iers = self.parse_file(fd)
fo = BytesIO()
for i in iers:
self.write(fo,(i[0].isoformat(),i[1],))
fo.seek(0)
self.fid = await self.getfid()
async with self.transaction():
await self.execute("truncate main.t_iers")
await self.copy_to_table('t_iers',source=fo,schema_name='main',delimiter='\t')
await self.execute('refresh materialized view public.v_iers')
self.tbeg, self.tend = await self.fetchrow("select min(dt)::timestamp,max(dt)::timestamp from main.t_iers")
await self.create_doc()
class eopc04File(baseFile):
""" Загрузка файла о параметрах движения Земли по данным IERS.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 373,382-387
:linenos:
:caption: __init__
"""
from .parsers import eopc04
super().__init__(*args,**kwargs)
self.parser = eopc04.parsefile
self.dbtype = 'c04'
async def load(self,fd,args):
""" Метод. Загрузка файла измерений в БД.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 194,203-293
:linenos:
:caption: load_data
"""
logger.info('Загрузка файла...')
await super().load(fd,args)
c04 = self.parse_file(fd)
fo = BytesIO()
for l in c04:
self.write(fo,l)
fo.seek(0)
self.fid = await self.getfid()
async with self.transaction():
await self.execute("truncate main.t_iers_c04")
await self.copy_to_table('t_iers_c04',source=fo,schema_name='main',delimiter='\t')
await self.execute('refresh materialized view public.v_iers_c04')
self.tbeg, self.tend = await self.fetchrow("select min(dt)::timestamp,max(dt)::timestamp from main.t_iers_c04")
await self.create_doc()
class solarFile(baseFile):
""" Загрузка файла индексов Солнечной активности.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 373,382-387
:linenos:
:caption: __init__
"""
super().__init__(*args,**kwargs)
self.dbtype = 'si'
async def load(self,fd,args):
""" Метод. Загрузка файла измерений в БД.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 194,203-293
:linenos:
:caption: load_data
"""
logger.info('Загрузка файла...')
await super().load(fd,args)
self.parse_file(fd)
fo = BytesIO()
for l in fd:
l = l.strip('\n')
if len(l):
self.write(fo,l)
fo.seek(0)
self.fid = await self.getfid()
async with self.transaction():
await self.execute("truncate main.t_solarindex")
await self.copy_to_table('t_solarindex',source=fo,schema_name='main',delimiter=',',columns=['type','dt','f_10_7','f_81','kp'])
await self.execute('refresh materialized view public.v_solarindex')
self.tbeg, self.tend = await self.fetchrow("select min(dt)::timestamp,max(dt)::timestamp from main.t_solarindex")
await self.create_doc()
class mpcCodeFile(baseFile):
""" Загрузка файла индексов Солнечной активности.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 373,382-387
:linenos:
:caption: __init__
"""
from datetime import datetime
super().__init__(*args,**kwargs)
self.dbtype = 'mcod'
self.tbeg = self.tend = datetime.utcnow()
async def load(self,fd,args):
""" Метод. Загрузка файла измерений в БД.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 194,203-293
:linenos:
:caption: load_data
"""
logger.info('Загрузка файла...')
await super().load(fd,args)
self.parse_file(fd)
fo = BytesIO()
for l in fd:
l = l.strip('\n')
if len(l):
self.write(fo,l)
fo.seek(0)
self.fid = await self.getfid()
async with self.transaction():
await self.execute("truncate main.main.t_obscodes")
await self.copy_to_table('t_obscodes',source=fo,schema_name='main',delimiter='\t')
await self.create_doc()
# -*- coding: utf-8 -*-
from .xFile import measFile
import hashlib as hs
import logging
import json
import io
logger = logging.getLogger('')
class lcvFile(measFile):
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 297,306-311
:linenos:
:caption: __init__
"""
from .parsers import lcvfile
super().__init__(*args,**kwargs)
self.parser = lcvfile.parsefile
self.dbtype = 'lcv'
self.vc = 26
async def load(self,fd,args):
""" Метод. Загрузка файла измерений в БД.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 194,203-293
:linenos:
:caption: load_data
"""
logger.info('Загрузка файла...')
await super().load(fd,args)
data = self.parse_file(fd)
self.tracks,self.tbeg,self.tend = data['tracks'],data['tbeg'],data['tend']
self.fileattr.update(dict(tr=data['tr'],ms=data['ms']))
if 'hdr' in data:
self.fileattr.update(data['hdr'])
siteid = data['siteid']
dev = await self.fetchval("select * from main.devget_id($1::integer,$2)",int(siteid),self.tbeg.date())
await self.check_priv_device(dev)
night = await self.fetchval("select * from main.devget_night($1,$2)",dev,self.tbeg)
self.devs = [dev]
self.fid = await self.getfid()
tracks_crc = await self.get_db_tracks_crc(dev)
# Формируем файлы csv
fo, fx, fn = io.BytesIO(),io.BytesIO(),io.BytesIO()
for t in self.tracks:
crc,tm = t['crc'],t['tm']
_loaded = crc not in tracks_crc
if _loaded:
head,utr,meas = json.dumps(t['prm'],separators=(',', ':',)),t['utr'],t['meas']
_id = await self.fetchval("select * from nextval('xd.t_tracks_track_seq'::regclass)")
self.write(fo,(_id,tm,dev,self.vc,utr,meas,head,night,))
self.write(fn,(_id,tm,t['nci']['utr'],t['nci']['data'],))
tracks_crc[crc] = _id
else:
_id = tracks_crc[crc]
# проверка, есть ли НКИ?
nki = await self.fetchval("select true from xd.t_tracknci where track=$1 and tm=$2",_id,tm)
if not nki:
self.write(fn,(_id,tm,t['nci']['utr'],t['nci']['data'],))
self.write(fx,(_id,tm,self.fid,_loaded,))
self.state |= 256
fo.seek(0); fx.seek(0); fn.seek(0)
await self.partition_tracks()
async with self.transaction():
await self.execute("select * from template.partition_table('xd.t_tracknci'::regclass,$1)",self.tbeg)
await self.execute("select * from template.partition_table('xd.t_tracknci'::regclass,$1)",self.tend)
async with self.transaction():
await self.create_doc()
await self.copy_to_table('t_tracks',source=fo,schema_name='xd',delimiter='\t')
await self.copy_to_table('t_filetrack',source=fx,schema_name='xd',delimiter='\t',columns=['track','tm','fid','loaded'])
await self.copy_to_table('t_tracknci',source=fn,schema_name='xd',delimiter='\t')
task = await self.fetchval("select * from dbq.put_task_seance($1,$2)",self.fid,self.tasktyp)
if task: logger.info(f"Создана задача {task} идентификации измерений {self.args.fname} ({self.fid}).")
else: logger.warning(f"Не создана задача идентификации измерений {self.args.fname} ({self.fid}).")
# -*- coding: utf-8 -*-
from .xFile import xFile
from .xFile import measFile
import hashlib as hs
import logging
import json
from re import findall
import io
logger = logging.getLogger('')
class msFile(xFile):
def get_trackcrc(self,meas):
""" Метод. Получение контрольной суммы трека (проводки)
Args:
meas (list): массив измерений трека
Return:
(str): контрольная сумма
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 173,188-193
:linenos:
:caption: _get_trackcrc
"""
s = ''
for m in meas:
t = '%s%.2f%.2f' % (m['tm'].strftime('%Y-%m-%d %H:%M:%S.%f'),m['params'][0],m['params'][1],)
s += t
return hs.md5(s.encode('utf8')).hexdigest()
def _get_meas_t(self,meas):
""" Метод.
Args:
meas ():
Return:
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 153,167-172
:linenos:
:caption: _get_meas_t
"""
""" Формирование строки для типа meas_t """
ms = list()
for m in meas:
ms.append(r'"(\\"%s\\",\\"%s\\",)"' % (m['tm'],str(m['params']).replace('[','{').replace(']','}'),))
return '{'+','.join(ms)+'}'
class msFile(measFile):
async def load(self,fd,args):
......@@ -60,77 +19,54 @@ class msFile(xFile):
:linenos:
:caption: load_data
"""
from datetime import datetime,date,time
_2json = lambda obj: (
obj.isoformat()
if isinstance(obj, (datetime,date,time,))
else str(obj) if isinstance(obj, Decimal)
else None
)
logger.info('Загрузка файла...')
super().load(fd,args)
data = self.parse_file(fd)
self.tracks,self.tbeg,self.tend = data['tracks'],data['tbeg'],data['tend']
self.fileattr.update(dict(tr=data['tr'],ms=data['ms']))
if 'hdr' in data:
self.fileattr.update(data['hdr'])
# Проверям кол-во средств в файле
tels = set()
for t in self.tracks: tels.add(t['telno'])
if len(tels) != 1:
raise RuntimeError("Недопустимое количество %s, телескопов в одном файле." % len(tels))
self.dev = await self.fetchval("select * from main.devget_devid($1,$2)",tels[0],self.tbeg.date())
await self.check_priv_device()
self.night = await self.fetchval("select * from main.devget_night($1,$2)",self.dev,self.tbeg)
self.devs = [self.dev]
self.fid = await self.getfid()
await super().load(fd,args)
tracks_crc = await self.fetchval("""
select json_object_agg(crc,track)
from (
select xd.track_crc(vc,meas) crc,track
from xd.t_tracks
where tm between $1 and $2
and utr && tsrange($1,$2,'[]')
and dev = $3
) r
where crc is not null""",self.tbeg,self.tend,self.dev)
if not tracks_crc: track_crc = dict()
vc,sensor,tracks,self.tbeg,self.tend,file_params = self.parse_file(fd)
self.fileattr.update(file_params)
dev = await self.fetchval("select * from main.devget_id($1::integer,$2)",sensor,self.tbeg.date())
await self.check_priv_device(dev)
night = await self.fetchval("select * from main.devget_night($1,$2)",dev,self.tbeg)
self.devs = [dev]
self.fid = await self.getfid()
tracks_crc = await self.get_db_tracks_crc(dev)
# Формируем файлы csv
fo, fx= io.BytesIO(),io.BytesIO()
for t in self.tracks:
tb,te = t['hdr'].pop('tb'),t['hdr'].pop('te')
utr = '["%s","%s"]' % (tb,te,)
params = t['hdr'].copy()
params['obj'] = t['obj']
crc = self._get_trackcrc(t['meas'])
_loaded = crc not in tracks_crc
for t in tracks:
_loaded = t.crc not in tracks_crc
if _loaded:
meas = self._get_meas_t(t['meas'])
head = json.dumps(params,default=_2json,separators=(',', ':',))
_id = await self.fetchval("select * from nextval('main.t_nip_res_trans_id_seq'::regclass)")
self.write(fo,(_id,te,self.dev,self.vc,utr,meas,head,night,))
tracks_crc[crc] = _id
head = json.dumps(t.params,default=_2json,separators=(',', ':',))
_id = await self.fetchval("select * from nextval('xd.t_tracks_track_seq'::regclass)")
self.write(fo,(_id,t.tm,dev,vc,t.utr,t.meas,head,night,))
tracks_crc[t.crc] = _id
else:
_id = tracks_crc[crc]
_id = tracks_crc[t.crc]
# запись в файл для xd.t_filetracks
self.write(fx,(_id,te,self.fid,_loaded,))
self.write(fx,(_id,t.tm,self.fid,_loaded,))
self.state |= 256
fo.seek(0); fx.seek(0)
async with self.transaction():
state = await self.execute("""update main.t_files set state = 0
where ftype='zr' and tels && $1 and utr && tsrange($2,$3,'[]')
and utr != tsrange(null,null)""",self.devs,self.tbeg,self.tend)
rowcount, = findall(r'\d+',state)
if rowcount:
self.state |= 512
await self.partition_tracks()
async with self.transaction():
await self.create_doc()
await self.copy_to_table('t_tracks',source=fo,schema_name='xd',delimiter='\t')
await self.copy_to_table('t_filetrack',source=fx,schema_name='xd',delimiter='\t',columns=['track','tm','fid','loaded'])
body = json.dumps(self.args.comment.split('\n')) #???
await self.execute("select from dbq.put_tasks_mail(_pub=>1011,_p1=>$1,_event=>$2,_message=>$3::json)",self.dev,str(self.fid),body)
await self.execute("select from dbq.put_tasks_mail(_pub=>1010,_p1=>$1,_event=>$2)",self.dev,str(self.fid))
await self.execute("select from dbq.put_task_seance($1)",self.fid)
logger.info(f"Создана задача идентификации измерений. Номер файла: {self.fid}.")
task = await self.fetchval("select * from dbq.put_task_seance($1,$2)",self.fid,self.tasktyp)
if task: logger.info(f"Создана задача {task} идентификации измерений {self.args.fname} ({self.fid}).")
else: logger.warning(f"Не создана задача идентификации измерений {self.args.fname} ({self.fid}).")
class resFile(msFile):
......@@ -150,9 +86,9 @@ class resFile(msFile):
super().__init__(*args,**kwargs)
self.parser = resfile.parsefile
self.dbtype = 'om'
self.atype = [20010,500010]
self.vc = 1
class tcFile(msFile):
""" Запись файла измерений в формате TC в БД.
"""
......@@ -165,27 +101,51 @@ class tcFile(msFile):
:linenos:
:caption: __init__
"""
from .parsers import tcfile
super().__init__(*args,**kwargs)
self.parser = tcfile.parsefile
self.dbtype = 'tc'
self.atype = [20011,500011]
self.vc = 2
class mpcFile(msFile):
""" Запись файла измерений в формате MPC в БД.
class tdmFile(msFile):
""" Запись файла измерений в формате TC в БД.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 373,382-387
:lines: 335,344-349
:linenos:
:caption: __init__
"""
from .parsers import tdmfile
super().__init__(*args,**kwargs)
self.parser = mpcfile.parsefile
self.dbtype = 'mp'
self.atype = [20021]
self.vc = 3
\ No newline at end of file
self.parser = tdmfile.parsefile
self.dbtype = 'tdm'
class eopFile(msFile):
""" Запись файла измерений в формате TC в БД.
"""
def __init__(self,*args,**kwargs):
""" Конструктор.
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 335,344-349
:linenos:
:caption: __init__
"""
from .parsers import eopfile
super().__init__(*args,**kwargs)
self.parser = eopfile.parsefile
self.dbtype = 'eop'
self.vc = 4
# -*- coding: utf-8 -*-
"""
Не актуально на текущий момент
В случае необходимости
нужно доработать парсер
и добавить класс в msFile
class chirpFile(msFile):
def __init__(self,*args,**kwargs):
from .parsers import chirpfile1
super().__init__(*args,**kwargs)
self.parser = chirpfile.parsefile
self.dbtype = 'ch'
self.vc = 5
self.tasktyp = 68
async def get_parser_args(self):
# Хардкодим ИРНР
m = re.search(r'\_(?P<dt>\d{8}\_\d{4})\_',self.args.fname)
dt = datetime.strptime(m.group('dt'),'%Y%m%d_%H%M')
obj = await self.fetchval("select * from ic.devseq_nextval($1,'obj',to_char(main.devget_night($1,$2),'YYYYDDD'))",152,dt)
return (self.f,dt,obj,80002)
дабавить fileFactory
re.compile(r'^N\tTime\tHours\tFreq\(kHz\)\tAmp\tKK\tR\(km\)\tAntAz\((gard|grad)\)\tAntEl\((gard|grad)\)'):'CHRIP1',
re.compile(r'^T\(hrs\)\tR\tEp\tGam'):'CHRIP2',
+ создание класса
"""
from datetime import datetime
import hashlib as hs
......@@ -31,7 +63,7 @@ def _makerec(l,dt):
return dict(tm=datetime.combine(dt.date(),tm.time()),params=[az,el,r,amp,freq,kk])
def parsefile(f,dt,obj):
def parsefile(f,dt,obj,telno):
""" Функция. Обработка файла.
Args:
......@@ -71,7 +103,7 @@ def parsefile(f,dt,obj):
tb = min(tb,r['tm'])
te = max(te,r['tm'])
cnt = len(meas)
return dict(tracks=[dict(meas=meas,obj=obj,hdr=dict(tb=tb,te=te,cnt=cnt))],tbeg=tb,tend=te,ms=cnt,tr=1)
return dict(tracks=[dict(telno=telno,meas=meas,obj=obj,hdr=dict(tb=tb,te=te,cnt=cnt))],tbeg=tb,tend=te,ms=cnt,tr=1)
finally:
f.seek(0)
......
# -*- coding: utf-8 -*-
import re
from datetime import datetime, timedelta
import logging
import hashlib as hs
logger = logging.getLogger('')
## Измерения в формате CHIRP
#
# Пример файла: @verbatim
# T(hrs) R Ep Gam
# #27597 12 RCS=17.2870 T,c= 14.696 Ravg= 836.306
# 6.759634 827.267 9.630 0.071
# 6.759204 828.877 10.409 1.013
# 6.758808 830.371 11.110 0.422
# 6.758389 832.180 11.904 0.497
# 6.757986 833.749 12.651 0.619
# 6.757599 835.556 13.307 1.405
# 6.757568 835.858 13.335 1.610
# 6.757147 837.959 14.074 0.942
# 6.756759 840.008 14.799 1.725
# 6.756392 842.082 15.455 1.646
# 6.755969 844.623 16.203 1.548
# 6.755551 847.139 16.913 1.484
# @endverbatim
#
_HEAD = re.compile(r"^[#]?(?P<obj>\d+)\t(\d+\tRCS=(?P<rcs>\d+\.\d+)\t)?T\,c",re.U)
def _makerec(l,dt):
""" Функция. Формирование данных для записи в БД.
Args:
l(str): исходная строка файла
dt(str): дата
Return:
dict: данные
.. literalinclude:: ../../../../fte/lib/libpy/parsers/chirpfile2.py
:language: python
:lines: 31,48-55
:linenos:
:caption: _makerec
"""
p = list(map(float,l.split('\t')))
# ВАЖНО! округляем время до милисекунд
tm = datetime(1900,1,1) + timedelta(seconds=round(p[0]*3600,3))
az,el,r = p[2],p[3],p[1]
return dict(tm=datetime.combine(dt.date(),tm.time()),params=[az,el,r])
## Разбор исходного файла с измерениями
def parsefile(f,dt):
""" Функция. Обработка файла.
Args:
f (file): входной файл
dt(datetime): дата
Return:
dict: данные для записи в БД
.. literalinclude:: ../../../../fte/lib/libpy/parsers/chirpfile2.py
:language: python
:lines: 56,72-124
:linenos:
:caption: parsefile
"""
ms = 0
tbeg = datetime.max
tend = datetime.min
tracks = list()
crc = set()
f.seek(0)
try:
for n,l in enumerate(f):
if n:
l = l.strip('\r\n')
if len(l):
m = _HEAD.search(l)
if m:
# уже есть хоть один трек?
if len(tracks):
cnt = len(tracks[-1]['meas'])
if cnt:
tracks[-1]['hdr'] = dict(cnt=cnt,tb=tb,te=te)
ms += cnt
tbeg, tend = min(tbeg,tb), max(tend,te)
else:
tracks.pop(-1)
logger.debug('%s:Пропускаем пустую проводку.',no)
# новый трек
tracks.append(dict(meas=list(),obj=int(m.group('obj'))))
tb,te = datetime.max,datetime.min
else:
_crc = hs.md5(l.encode('utf-8')).hexdigest()
if _crc in crc:
logger.debug('%s:Пропускаем дубль строки "%s"',n,l)
continue
else:
crc.add(_crc)
r = _makerec(l,dt)
tracks[-1]['meas'].append(r)
tb,te = min(tb,r['tm']), max(te,r['tm'])
# Последний трек
cnt = len(tracks[-1]['meas'])
if cnt:
tracks[-1]['hdr'] = dict(cnt=cnt,tb=tb,te=te)
ms += cnt
tbeg, tend = min(tbeg,tb), max(tend,te)
else:
tracks.pop(-1)
logger.debug('%s:Пропускаем пустую проводку.',no)
return dict(tracks=tracks,tbeg=tbeg,tend=tend,ms=ms,tr=len(tracks))
finally:
f.seek(0)
if __name__ == '__main__':
import sys
import io
import os
import logging.config
logging.config.dictConfig({
'version': 1,
'disable_existing_loggers': False, # this fixes the problem
'formatters': {
'standard': {
'format': '%(asctime)s %(levelname)s:%(module)s:%(lineno)s: %(message)s',
'datefmt':'%Y-%m-%d %H:%M:%S'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'formatter':'standard',
'class':'logging.handlers.RotatingFileHandler',
'filename': os.path.splitext(__file__)[0]+'.log',
'maxBytes': 10485760,
'backupCount': 20,
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
}
}
})
f = io.StringIO(sys.stdin.read())
prm = [f,datetime(2019,11,18),80002]
t = parsefile(*prm)
print(t)
......@@ -3,6 +3,7 @@
import re
from datetime import datetime,timedelta
import hashlib as hs
from collections import namedtuple
import logging
logger = logging.getLogger('')
......@@ -55,8 +56,7 @@ def parsefile(f):
:linenos:
:caption: parsefile
"""
orblist = list()
crc = set()
orblist,crc = list(),set()
f.seek(0)
try:
for no,l in enumerate(f):
......@@ -66,6 +66,7 @@ def parsefile(f):
if _crc in crc:
logger.debug('%s:Пропускаем дубль строки "%s"',no,l)
continue
crc.add(_crc)
m = _TL1.match(l)
if m:
......@@ -89,8 +90,10 @@ def parsefile(f):
prm['ma'] = float(r['ma'])
prm['mm'] = float(r['mm'])
rec = [prm[i] for i in ('mdot','mddot','bstar','etype','i','raan','e','argp','ma','mm') if i in prm]
orblist.append(dict(norad=norad,bdt=bdt,sec=sec,rec=rec))
continue
tm = bdt+timedelta(seconds=sec)
c = namedtuple('ctle_t',('tm','norad','bdt','sec','rec',))(tm,norad,bdt,sec,rec)
orblist.append(c)
return orblist
......
# -*- coding: utf-8 -*-
import logging
import json
from datetime import datetime
import hashlib as hs
import base64
from collections import namedtuple
logger = logging.getLogger()
def _hms(_angle):
""" Метод. Преобразование угла из градусной меры в часовую.
Args:
_angle (float): значение угла в градусах
Return:
(float): значение угла в ЧЧММСС.ссссс
.. literalinclude:: ../../../../fte/lib/libpy/eopFile.py
:language: python
:lines: 135,150-156
:linenos:
:caption: _hms
"""
hh,d = divmod(abs(_angle),1)
sec = d * 3600.0
mm,ss = divmod(sec,60.0)
res = hh*10000+mm*100+round(ss,2)
return res if _angle >= 0 else -res
def _get_trackcrc(meas):
""" Метод. Получение контрольной суммы трека (проводки)
Args:
meas (list): массив измерений трека
Return:
(str): контрольная сумма
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 157,172-184
:linenos:
:caption: _get_trackcrc
"""
s = ''
for m in meas:
tm = datetime.strptime(m['utc'],'%d/%m/%Y %H:%M:%S.%f')
t = '%s%.2f%.2f' % (
tm.strftime('%Y-%m-%d %H:%M:%S.%f'),
_hms(float( m['ra_j2000'])),
_hms(float( m['dec_j2000'])),
)
s += t
return hs.md5(s.encode('utf8')).hexdigest()
# Формирование строки для типа meas_t
def _get_meas_t(meas):
""" Метод. Формирование строки для записи трека в БД
Args:
meas (dict): параметры измерения
Return:
(list): начало трека, окончание трекаб средний блеск, строка для записи трека в БД
.. literalinclude:: ../../../../fte/lib/libpy/eopFile.py
:language: python
:lines: 45,60-76
:linenos:
:caption: _get_meas_t
"""
_fields = ('ra_j2000','dec_j2000','mag','ra_j2000_full_error','dec_j2000_full_error','mag_error',)
ms,mags,tb,te = list(),list(),datetime.max, datetime.min
for m in meas:
tm = datetime.strptime(m['utc'],'%d/%m/%Y %H:%M:%S.%f')
tb,te = min(tb,tm),max(te,tm)
rec = [float(m.get(i,0.0)) for i in (_fields)]
crops = r'\\\\\\\\x%s' % base64.b64decode(m['crops']).hex()
mag = float(m.get('mag',0.0))
if mag != 0: mags.append(mag)
ms.append(r'"(\\"%s\\",\\"%s\\",\\"%s\\")"' % (tm.isoformat(),str(rec).replace('[','{').replace(']','}'),crops,))
avg_m = round(sum(mags)/len(mags),1) if len(mags) else 0.0
return tb,te,avg_m,'{'+','.join(ms)+'}'
def parsefile(f):
data = json.load(f)
tracks,tbeg,tend,mcnt = list(),datetime.max,datetime.min,0
for t in data['tracks']:
tb,te,avg_m,ms = _get_meas_t(t['meas'])
tbeg,tend,cnt = min(tbeg,tb),max(tend,te),len(t['meas'])
mcnt += cnt
# Формируем проводку
params=dict(
cnt=cnt,mag=avg_m,obj=t['trackid'],bind=t.get('objectid'),
area=t['target']
)
tr = dict(
tm=te,
meas=ms,
utr='["%s","%s"]' % (tb,te,),
crc=_get_trackcrc(t['meas']),
params=params
)
tracks.append(namedtuple('track_t',tr.keys())(**tr))
return 4,data['siteid'],tracks,tbeg,tend,dict(tr=len(tracks),ms=mcnt)
......@@ -3,6 +3,7 @@
import re
from datetime import datetime,timedelta
import hashlib as hs
from collections import namedtuple
import logging
logger = logging.getLogger('')
......@@ -25,8 +26,7 @@ def parsefile(f,fid):
:linenos:
:caption: parsefile
"""
crc = set()
ephems = list()
ephems,crc = list(),set()
f.seek(0)
try:
for no,l in enumerate(f):
......@@ -36,13 +36,14 @@ def parsefile(f,fid):
if _crc in crc:
logger.debug('%s:Пропускаем дубль строки "%s"',no,l)
continue
crc.add(_crc)
m = _EPH.match(l)
if m:
r = dict(m.groupdict())
tm = datetime.strptime(r['dt'],'%y/%m/%d')+timedelta(seconds=float(r['sec']))
prn,svn,params = r['prn'],int(r['svn']),'{%s}'% ','.join(re.split('\s+',r['vc'].strip()))
ephems.append([fid,svn,tm,params,prn])
e = namedtuple('ephem_t',('tm','rec',))(tm,[fid,svn,tm,params,prn])
ephems.append(e)
return ephems
finally:
......
# -*- coding: utf-8 -*-
import re
from datetime import datetime,timedelta
from datetime import datetime
import hashlib as hs
from collections import namedtuple
import logging
logger = logging.getLogger('')
......@@ -46,8 +46,7 @@ def parsefile(f,fid):
:linenos:
:caption: parsefile
"""
crc = set()
constelation = list()
constelation,crc = list(),set()
f.seek(0)
try:
for no,l in enumerate(f):
......@@ -57,13 +56,15 @@ def parsefile(f,fid):
if _crc in crc:
logger.debug('%s:Пропускаем дубль строки "%s"',no,l)
continue
crc.add(_crc)
m = _CNST.match(l)
if m:
r = dict(m.groupdict())
sys,svn,norad,prn = r['sys'],int(r['svn']),int(_alpha5num(r['norad'])),r['prn']
tb,te = datetime.fromisoformat(r['tb']),datetime.fromisoformat(r['te']) if r.get('te') else None
constelation.append([fid,sys,svn,norad,prn,tb,te])
rng = '["{}",{})'.format(tb,'"{}"'.format(te) if te else '')
c = namedtuple('constelation_t',('tm','rec',))(tb,[fid,sys,svn,norad,prn,rng])
constelation.append(c)
return constelation
finally:
......
# -*- coding: utf-8 -*-
"""
Не актуально на текущий момент
В случае необходимости
нужно доработать парсер
и добавить класс в msFile
class mpcFile(msFile):
def __init__(self,*args,**kwargs):
from .parsers import mpcfile
super().__init__(*args,**kwargs)
self.parser = mpcfile.parsefile
self.dbtype = 'mp'
self.vc = 3
добавить в filefactory
def _create_mpcFile():
from .msFile import mpcFile
return mpcFile
factory.register_file('MPC', _create_mpcFile, pattern=re.compile(r"^COD\s\w{3}$",re.U))
"""
import re
from datetime import datetime,time
......
......@@ -4,6 +4,7 @@ import re
from datetime import datetime
import hashlib as hs
import logging
from collections import namedtuple
logger = logging.getLogger('')
## Измерения в формате RES
......@@ -28,6 +29,49 @@ _MS = re.compile(r"^(?P<tm>\d{6}\s\d{8,9})\s(?P<ra>\d{8})\s(?P<dec>[+-]\d{8})\s
_HEAD = re.compile(r"^((СИСТ|СТ|ИСТ|SYST|SIST)\s)?(?P<telno>\d+)\s(?P<obj>\d+)$",re.U)
_END = re.compile(r"^(END|КОНЕЦ)|^$",re.U)
def get_trackcrc(meas):
""" Метод. Получение контрольной суммы трека (проводки)
Args:
meas (list): массив измерений трека
Return:
(str): контрольная сумма
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 173,188-193
:linenos:
:caption: get_trackcrc
"""
s = ''
for m in meas:
t = '%s%.2f%.2f' % (m.tm.strftime('%Y-%m-%d %H:%M:%S.%f'),m.params[0],m.params[1],)
s += t
return hs.md5(s.encode('utf8')).hexdigest()
def _get_meas_t(meas):
""" Метод.
Args:
meas ():
Return:
.. literalinclude:: ../../../../fte/lib/libpy/msFile.py
:language: python
:lines: 153,167-172
:linenos:
:caption: _get_meas_t
"""
ms = list()
for m in meas:
ms.append(r'"(\\"%s\\",\\"%s\\",)"' % (m.tm,str(m.params).replace('[','{').replace(']','}'),))
return '{'+','.join(ms)+'}'
def _makerec(m,no):
""" Функция. Формирование данных для записи единичного измерения в БД.
......@@ -56,9 +100,9 @@ def _makerec(m,no):
if er == 0: logger.debug(u'%s:СКО астрометрической ошибки 0',no)
mag = round(float(m.group('ma'))*0.1,1)
if mag == 0: logger.debug(u'%s:Средний блеск 0',no)
return dict(tm=tm,params=[ra,dec,mag,er])
return namedtuple('meas_t',('tm','params'))(tm,[ra,dec,mag,er])
def _avg_mag(track):
def _avg_mag(meas):
""" Функция. Вычисление среднего блеска трека (проводки)
Args:
......@@ -75,7 +119,7 @@ def _avg_mag(track):
"""
# Подсчёт среднего значения блеска в проводке
#print (track)
mags = [ m['params'][2] for m in track if m['params'][2] != 0]
mags = [ m.params[2] for m in meas if m.params[2] != 0]
if len(mags):
return round(sum(mags)/len(mags),1)
return 0
......@@ -96,37 +140,37 @@ def parsefile(f):
:linenos:
:caption: parsefile
"""
ms = 0
tbeg = datetime.max
tend = datetime.min
tracks = list()
crc = set()
last_track_crc = set()
f.seek(0)
try:
tracks,tbeg,tend,mcnt = list(),datetime.max,datetime.min,0
sensor,crc = set(),set()
for no,l in enumerate(f):
l = l.strip('\n')
if len(l):
if _END.match(l):
cnt = len(tracks[-1]['meas'])
if cnt == 0:
tracks.pop(-1)
logger.debug('%s:Пропускаем пустую проводку.',no)
cnt = len(meas)
if cnt:
tr = dict(
tm=te,
meas=_get_meas_t(meas),
params=dict(cnt=cnt,mag=_avg_mag(meas),obj=obj),
utr='["%s","%s"]' % (tb,te,),
crc=get_trackcrc(meas))
tracks.append(namedtuple('track_t',tr.keys())(**tr))
mcnt += cnt
tbeg,tend = min(tbeg,tb),max(tend,te)
else:
mag = _avg_mag(tracks[-1]['meas'])
tracks[-1]['hdr'] = dict(cnt=cnt,tb=tb,te=te,mag=mag)
ms += cnt
tbeg = min(tbeg,tb)
tend = max(tend,te)
logger.debug('%s:Пропускаем пустую проводку.',no)
continue
m = _HEAD.match(l)
if m:
tracks.append(
dict(meas=list(),telno=m.group('telno'),obj=int(m.group('obj')),hdr=None))
tb = datetime.max
te = datetime.min
last_track_crc = set()
sensor.add(int(m.group('telno')))
if len(sensor) > 1:
raise RuntimeError(f"{no}:Недопустимое количество телескопов в одном файле.")
meas,obj,tb,te,last_tm_crc = list(),int(m.group('obj')),datetime.max,datetime.min,set()
continue
m = _MS.match(l)
if m:
_crc = hs.md5(l.encode('utf-8')).hexdigest()
......@@ -137,22 +181,20 @@ def parsefile(f):
crc.add(_crc)
_lcrc = hs.md5(l.encode('utf-8')[:15]).hexdigest()
if _lcrc in last_track_crc:
if _lcrc in last_tm_crc:
logger.debug('%s:Пропускаем строку с дублем времени в одной проводке. "%s"',no,l)
continue
else:
last_track_crc.add(_lcrc)
last_tm_crc.add(_lcrc)
rec = _makerec(m,no)
if rec:
tracks[-1]['meas'].append(rec)
tb = min(tb,rec['tm'])
te = max(te,rec['tm'])
meas.append(rec)
tb,te = min(tb,rec.tm),max(te,rec.tm)
continue
return dict(tracks=tracks,tbeg=tbeg,tend=tend,ms=ms,tr=len(tracks))
finally:
f.seek(0)
return 1,sensor.pop(),tracks,tbeg,tend,dict(tr=len(tracks),ms=mcnt)
if __name__ == '__main__':
import sys
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -3,6 +3,8 @@
import re
import hashlib as hs
import logging
from collections import namedtuple
from .tm import *
logger = logging.getLogger('')
......@@ -78,7 +80,8 @@ def _getrec(m,no):
vc += l[i].replace('+','') if i not in ('perr','tm_orb','tm_meas') else u'"%s"' % l[i]
vc += ','
vc = u'[%s]' % vc[:-1]
return dict(tm=tm,nko=nko,unko=unko,vc=vc)
orb = dict(tm=tm,nko=nko,unko=unko,vc=vc)
return namedtuple('orbit_t',orb.keys())(**orb)
## Разбор исходного файла со списком орбит
def parsefile(f):
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment