Skip to content

Commit e075bb2

Browse files
committed
- Code linting
- Update protobuf format
1 parent 079f46e commit e075bb2

File tree

6 files changed

+359
-358
lines changed

6 files changed

+359
-358
lines changed

dfparser/__init__.py

+22-14
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,29 @@
1-
#!/usr/bin/env python3
2-
# -*- coding: utf-8 -*-
3-
"""
4-
Created on Mon Dec 26 14:24:42 2016
1+
"""Init script.
52
6-
@author: chernov
3+
Set package version.
74
"""
8-
95
import os
106
import sys
117

12-
cur_dir = os.path.dirname(os.path.realpath(__file__))
13-
if not cur_dir in sys.path: sys.path.append(cur_dir)
14-
del cur_dir
8+
from pkg_resources import get_distribution
159

16-
from envelope_parser import create_message, parse_from_file, parse_message
17-
from envelope_parser import read_machine_header, get_messages_from_stream
18-
from rsh_parser import RshPackage, serialise_to_rsh, parse_from_rsb
19-
from rsh_parser import serialize_to_rsb, dump_to_rsb
10+
CUR_DIR = os.path.dirname(os.path.realpath(__file__))
11+
if CUR_DIR not in sys.path:
12+
sys.path.append(CUR_DIR)
13+
del CUR_DIR
14+
15+
from df_data import def_values
16+
from df_data import type_codes
17+
from envelope_parser import create_message
18+
from envelope_parser import get_messages_from_stream
19+
from envelope_parser import parse_from_file
20+
from envelope_parser import parse_message
21+
from envelope_parser import read_machine_header
2022
from rsb_event_pb2 import Point
21-
from df_data import def_values, type_codes
23+
from rsh_parser import dump_to_rsb
24+
from rsh_parser import parse_from_rsb
25+
from rsh_parser import RshPackage
26+
from rsh_parser import serialise_to_rsh
27+
from rsh_parser import serialize_to_rsb
28+
29+
__version__ = get_distribution('dfparser').version

dfparser/configs/rsb_event.proto

+10-7
Original file line numberDiff line numberDiff line change
@@ -6,24 +6,27 @@ message Point {
66
message Channel {
77
message Block {
88
// Необработанное событие
9-
message Event {
10-
uint64 time = 1; //время в наносекундах от начала блока
9+
message Frame {
10+
uint64 time = 1; // время первого бина в наносекундах
11+
// от начала блока
1112
bytes data = 2; // массив кадра события в формате int16
12-
// ед. измерения - каналы
13+
// ед. измерения - каналы, C-order
1314
}
1415
// Обработанные события. Содержат только время и амплитуду сигналов.
1516
// Для экономии места при сериализации амплитуды и времена лежат в
1617
// разных массивах. Амплитуда и время имеющие одинаковые индесы
1718
// соответствуют одному событию
18-
message Peaks {
19+
message Events {
1920
repeated uint64 times = 1; //время в наносекундах от начала блока
2021
repeated uint64 amplitudes = 2; //амплитуда события в каналах
2122
}
2223
uint64 time = 1; // время начала блока в наносекундах с начала эпохи
23-
repeated Event events = 2; // массив необработанных событий
24-
Peaks peaks = 3; // массив обработанных событий
24+
repeated Frame frames = 2; // массив необработанных событий
25+
Events events = 3; // массив обработанных событий
26+
uint64 length = 4; // размер блока в наносекундах
27+
uint64 bin_size = 5; // размер бина в наносекундах
2528
}
26-
uint64 num = 1; // номер канала
29+
uint64 id = 1; // номер канала
2730
repeated Block blocks = 2; // набранные блоки
2831
}
2932
repeated Channel channels = 1; // массиив событий по каналам

dfparser/envelope_parser.py

+78-83
Original file line numberDiff line numberDiff line change
@@ -1,52 +1,48 @@
1-
#!/usr/bin/env python3
2-
# -*- coding: utf-8 -*-
3-
"""
4-
Created on Fri Sep 9 20:54:46 2016
5-
6-
@author: chernov
7-
"""
1+
"""Dataforge envelope format parser utils."""
82

3+
import json
94
import os
10-
import sys
115
import re
12-
import json
136
import struct
7+
import sys
148
import time
159

16-
cur_dir = os.path.dirname(os.path.realpath(__file__))
17-
if not cur_dir in sys.path: sys.path.append(cur_dir)
18-
del cur_dir
10+
CUR_DIR = os.path.dirname(os.path.realpath(__file__))
11+
if CUR_DIR not in sys.path:
12+
sys.path.append(CUR_DIR)
13+
del CUR_DIR
14+
15+
from df_data.type_codes import header_types
16+
from df_data.type_codes import meta_types
17+
1918

20-
from df_data.type_codes import meta_types, header_types
21-
2219
def create_message(json_meta: dict, data: bytearray=b'',
2320
data_type: "binary_types"=0) -> bytearray:
24-
"""
25-
Create message, containing meta and data in df-envelope format
26-
@json_meta - metadata
27-
@data - binary data
28-
@data_type - data type code for binary data
29-
@return - message as bytearray
30-
21+
"""Create message, containing meta and data in df-envelope format.
22+
23+
@json_meta - metadata
24+
@data - binary data
25+
@data_type - data type code for binary data
26+
@return - message as bytearray
27+
3128
"""
3229
__check_data(data)
33-
30+
3431
header = __create_machine_header(json_meta, data, data_type)
3532
meta = __prepare_meta(json_meta)
36-
33+
3734
return header + meta + data
38-
39-
35+
36+
4037
def parse_from_file(filename: str, nodata: bool=False) \
41-
-> [dict, dict, bytearray]:
42-
"""
43-
Parse df message from file
44-
@filename - path to file
45-
@nodata - do not load data
46-
@return - [binary header, metadata, binary data]
47-
38+
-> [dict, dict, bytearray]:
39+
"""Parse df message from file.
40+
41+
@filename - path to file
42+
@nodata - do not load data
43+
@return - [binary header, metadata, binary data]
44+
4845
"""
49-
5046
header = None
5147
with open(filename, "rb") as file:
5248
header = read_machine_header(file.read(30))
@@ -55,19 +51,18 @@ def parse_from_file(filename: str, nodata: bool=False) \
5551
data = b''
5652
if not nodata:
5753
data = file.read(header['data_len'])
58-
return header, meta, data
59-
60-
54+
return header, meta, data
55+
56+
6157
def parse_message(message: bytearray, nodata: bool=False) \
62-
-> [dict, dict, bytearray]:
63-
"""
64-
Parse df message from bytearray
65-
@message - message data
66-
@nodata - do not load data
67-
@return - [binary header, metadata, binary data]
68-
58+
-> [dict, dict, bytearray]:
59+
"""Parse df message from bytearray.
60+
61+
@message - message data
62+
@nodata - do not load data
63+
@return - [binary header, metadata, binary data]
64+
6965
"""
70-
7166
header = read_machine_header(message)
7267
meta_raw = message[30:30 + header['meta_len']]
7368
meta = __parse_meta(meta_raw, header)
@@ -77,32 +72,32 @@ def parse_message(message: bytearray, nodata: bool=False) \
7772
data = message[data_start:data_start + header['data_len']]
7873
return header, meta, data
7974

80-
75+
8176
def read_machine_header(data: bytearray) -> dict:
77+
"""Parse binary header.
78+
79+
@data - bytearray, contains binary header
80+
@return - parsed binary header
81+
8282
"""
83-
Parse binary header
84-
@data - bytearray, contains binary header
85-
@return - parsed binary header
86-
"""
87-
8883
header = dict()
8984
header['type'] = struct.unpack('>I', data[2:6])[0]
9085
header['time'] = struct.unpack('>I', data[6:10])[0]
9186
header['meta_type'] = struct.unpack('>I', data[10:14])[0]
9287
header['meta_len'] = struct.unpack('>I', data[14:18])[0]
9388
header['data_type'] = struct.unpack('>I', data[18:22])[0]
9489
header['data_len'] = struct.unpack('>I', data[22:26])[0]
95-
90+
9691
return header
97-
98-
92+
93+
9994
def get_messages_from_stream(data: bytearray) \
100-
-> [[{dict, dict, bytearray}], bytearray]:
101-
"""
102-
Extract complete messages from stream and cut out them from stream
103-
@data - stream binary data
104-
@return - [list of messages, choped stream data]
105-
95+
-> [[{dict, dict, bytearray}], bytearray]:
96+
"""Extract complete messages from stream and cut out them from stream.
97+
98+
@data - stream binary data
99+
@return - [list of messages, choped stream data]
100+
106101
"""
107102
messages = []
108103
iterator = get_messages_from_stream.header_re.finditer(data)
@@ -115,63 +110,63 @@ def get_messages_from_stream(data: bytearray) \
115110

116111
if cur_last_pos > len(data):
117112
break
118-
113+
119114
header, meta, bin_data = parse_message(data[pos:])
120115
messages.append({'header': header, 'meta': meta, 'data': bin_data})
121-
116+
122117
last_pos = cur_last_pos
123-
118+
124119
data = data[last_pos:]
125120
return messages, data
126-
121+
127122
get_messages_from_stream.header_re = re.compile(b"#!.{24}!#", re.DOTALL)
128-
123+
129124

130125
def __parse_meta(meta_raw, header):
131126
if header["meta_type"] == meta_types["JSON_METATYPE"]:
132127
return json.loads(meta_raw.decode())
133128
else:
134-
err = "Parsing meta type %s not implemented"%(bin(header["meta_type"]))
129+
err = "Parsing meta type %s not implemented" % \
130+
(bin(header["meta_type"]))
135131
raise NotImplementedError(err)
136-
137-
132+
133+
138134
def __prepare_meta(json_meta):
139-
if type(json_meta) is dict:
135+
if isinstance(json_meta, dict):
140136
json_meta = json.dumps(json_meta, indent=4).encode()
141137
json_meta += b'\r\n\r\n'
142-
elif not type(json_meta) is str:
138+
elif isinstance(json_meta, str):
143139
raise ValueError("Input meta should be dict or str")
144140
return json_meta
145141

146142

147143
def __check_data(data):
148-
if not type(data) is bytes:
144+
if isinstance(data, bytes):
149145
raise ValueError("Input data should have bytes type")
150-
146+
151147

152148
def __create_machine_header(json_meta: dict, data: bytearray=b'',
153149
data_type: "binary_types"=0) -> bytearray:
154-
150+
155151
json_meta = __prepare_meta(json_meta)
156152
__check_data(data)
157-
153+
158154
binary_header = b'#!'
159-
160-
#binary header type
155+
156+
# binary header type
161157
binary_header += struct.pack('>I', header_types["DEFAULT"])
162158
millis = int(round(time.time() * 1000))
163-
#current time
159+
# current time
164160
binary_header += struct.pack('>Q', millis)[4:]
165-
#meta type
161+
# meta type
166162
binary_header += struct.pack('>I', meta_types["JSON_METATYPE"])
167-
#meta length
163+
# meta length
168164
binary_header += struct.pack('>I', len(json_meta))
169-
#data type
165+
# data type
170166
binary_header += struct.pack('>I', data_type)
171-
#data length
167+
# data length
172168
binary_header += struct.pack('>I', len(data))
173-
169+
174170
binary_header += b'!#\r\n'
175-
176-
return binary_header
177171

172+
return binary_header

0 commit comments

Comments
 (0)