Skip to content

Commit f3ca0e2

Browse files
committed
Upgrade to 0.0.19
- Update readme. Add usage samples - Add DF02 protocol version support - Lint some files
1 parent b8d12a1 commit f3ca0e2

File tree

7 files changed

+268
-164
lines changed

7 files changed

+268
-164
lines changed

.gitignore

+3
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
.spyproject
2+
MANIFEST
3+
14
# Byte-compiled / optimized / DLL files
25
__pycache__/
36
*.py[cod]

README.md

+71-13
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,77 @@
11
# python-df-parser
22

3-
Parser for (dataforge)[http://npm.mipt.ru/dataforge/] envelope format
3+
Python parser for [dataforge envelope](http://npm.mipt.ru/dataforge/) format.
4+
Envelope format designed to transport and store tuples with binary data and
5+
metadata in a single packet [more info](http://npm.mipt.ru/dataforge/docs.html#envelope_format).
46

5-
## Installation
6-
Latest version on PyPi can be installed by command:
7+
Implementation currently supports:
8+
- Parse and serialize metadata in JSON format.
9+
- Handle 0x14000 and DF02 protocol versions.
10+
- Transparent binary data compression (only zlib supported)
711

8-
pip3 install dfparser
9-
10-
To install latest version use:
11-
12-
pip3 install https://github.com/kapot65/python-df-parser/archive/master.zip
13-
14-
## Build
15-
To update protobuf formats use:
12+
## Installation
13+
Latest version on PyPi can be installed by command `pip3 install dfparser`
1614

17-
cd configs && protoc rsb_event.proto --python_out ../ && cd ..
15+
## Usage
16+
#### Create simple message
17+
- 0x14000 protocol version
18+
>>> import dfparser
19+
>>> dfparser.create_message({"param": "abc"}, data=b'bnary')
20+
b'#!\x00\x01@\x00pY_2\x00\x01\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x05!#\r\n{\n "param": "abc"\n}\r\n\r\nbnary'
21+
- DF02 version
22+
>>> import dfparser
23+
>>> dfparser.create_message({"param": "abc"}, b'binary', version=b'DF02')
24+
b'#~DF02JS\x00\x00\x00\x1a\x00\x00\x00\x06~#\r\n{\n "param": "abc"\n}\r\n\r\nbinary'
25+
26+
#### Parse message
27+
- From file
28+
>>> import dfparser
29+
>>> header, meta, data = envelope_parser.parse_from_file("df02.df")
30+
>>> # Or read only metadata
31+
>>> header, meta, _ = envelope_parser.parse_from_file("df02.df", nodata=True)
32+
- From bytes
33+
>>> import dfparser
34+
>>> data = b'#~DF02JS\x00\x00\x00\x1a\x00\x00\x00\x06~#\r\n{\n "param": "abc"\n}\r\n\r\nbinary'
35+
>>> envelope_parser.parse_message(data)
36+
({'data_len': 6, 'meta_len': 26, 'meta_type': b'JS', 'type': b'DF02'},
37+
{'param': 'abc'},
38+
b'binary')
39+
40+
- From stream
41+
>>> import dfparser
42+
>>> # stream.df contains multiple messages sequentaly written
43+
>>> data = open("/home/chernov/stream.df", 'rb').read()
44+
>>> dfparser.get_messages_from_stream(data)
45+
([{'data': b'bnary',
46+
'header': {'data_len': 5,
47+
'data_type': 0,
48+
'meta_len': 26,
49+
'meta_type': 65536,
50+
'time': 1885524830,
51+
'type': b'\x00\x01@\x00'},
52+
'meta': {'param': 'abc'}},
53+
{'data': b'binary',
54+
'header': {'data_len': 6,
55+
'meta_len': 26,
56+
'meta_type': b'JS',
57+
'type': b'DF02'},
58+
'meta': {'param': 'abc'}}],
59+
b'')
1860

19-
(Protobuf 3.2.0+)[https://github.com/google/protobuf/releases] should be installed
61+
### Transparent compression
62+
To apply transparent compression to message meta should contains field `"compression": "zlib"`
63+
>>> import dfparser
64+
>>> data = b''.join(b'0' for _ in range(100))
65+
>>> compr = dfparser.create_message({"compression": "zlib"}, data)
66+
>>> compr
67+
b'#!\x00\x01@\x00pt\xf7\xa7\x00\x01\x00\x00\x00\x00\x00!\x00\x00\x00\x00\x00\x00\x00\x0c!#\r\n{\n "compression": "zlib"\n}\r\n\r\nx\x9c30\xa0=\x00\x00\xb3q\x12\xc1'
68+
>>> _, _, decompr = envelope_parser.parse_message(compr)
69+
>>> data == decompr
70+
True
71+
72+
## Build
73+
# Update protobuf formats use:
74+
#(Protobuf 3.2.0+)[https://github.com/google/protobuf/releases] should
75+
# be installed and be in $PATH
76+
cd configs && protoc rsb_event.proto --python_out ../ && cd ..
77+
python3 setup.py build

dfparser/df_data/def_values.py

+13-13
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
@author: chernov
77
"""
88

9-
def_rsh_params = {
9+
DEF_RSH_PARAMS = {
1010
"filepath": "this",
1111
"num_blocks": 0,
1212
"aquisition_time": 1.0e+4,
@@ -17,7 +17,7 @@
1717
"synchro_channel": {
1818
"type": "SYNCHRO_PROGRAMM",
1919
"params": ["Default"],
20-
"gain": 1
20+
"gain": 1
2121
},
2222
"sample_freq": 3.125e+6,
2323
"pre_history": 8,
@@ -27,14 +27,14 @@
2727
"hysteresis": 0,
2828
"channel": [
2929
{
30-
"params": ["Used", "Synchro", "Resist50Ohm"],
31-
"adjustment": 0,
32-
"gain": 1
33-
},
34-
{
35-
"params": ["NotUsed"],
36-
"adjustment": 0,
37-
"gain": 1
38-
}
39-
]
40-
}
30+
"params": ["Used", "Synchro", "Resist50Ohm"],
31+
"adjustment": 0,
32+
"gain": 1
33+
},
34+
{
35+
"params": ["NotUsed"],
36+
"adjustment": 0,
37+
"gain": 1
38+
}
39+
]
40+
}

dfparser/df_data/type_codes.py

+54-46
Original file line numberDiff line numberDiff line change
@@ -6,62 +6,70 @@
66
@author: chernov
77
"""
88

9-
header_types = {
10-
"DEFAULT": 0x00014000
11-
}
12-
13-
meta_types = {
14-
"UNDEFINED_METATYPE": 0x00000000,
15-
"JSON_METATYPE": 0x00010000,
16-
"QDATASTREAM_METATYPE": 0x00010007
17-
}
18-
19-
binary_types = {
20-
"UNDEFINED_BINARY": 0x00000000,
21-
"POINT_DIRECT_BINARY": 0x00000100,
22-
"POINT_QDATASTREAM_BINARY": 0x00000107,
23-
"HV_BINARY": 0x00000200,
24-
"HV_TEXT_BINARY": 0x00000201
25-
}
9+
ENVELOPE_HEADER_CODES = {
10+
b'\x00\x01@\x00': {
11+
"header_len": 30,
12+
"meta_types": {
13+
"UNDEFINED_METATYPE": 0x00000000,
14+
"JSON_METATYPE": 0x00010000,
15+
"QDATASTREAM_METATYPE": 0x00010007
16+
},
17+
"binary_types": {
18+
"UNDEFINED_BINARY": 0x00000000,
19+
"POINT_DIRECT_BINARY": 0x00000100,
20+
"POINT_QDATASTREAM_BINARY": 0x00000107,
21+
"HV_BINARY": 0x00000200,
22+
"HV_TEXT_BINARY": 0x00000201
23+
}
24+
},
25+
b"DF02": {
26+
"header_len": 20,
27+
"meta_types": {
28+
"UNDEFINED_METATYPE": 0,
29+
"JSON_METATYPE": b"JS",
30+
"XML_METATYPE": b"XM"
31+
},
32+
}
33+
}
2634

27-
#https://bitbucket.org/Kapot/lan10-12pci_base/src/e99ac0a81f952f597c90aa58b7df6a0798204775/Lan12_Params.h?at=master&fileviewer=file-view-default
35+
# https://bitbucket.org/Kapot/lan10-12pci_base/src/e99ac0a81f952f597c90aa58b7df6a0798204775/Lan12_Params.h?at=master&fileviewer=file-view-default
2836
synchro_channel_types = {
29-
"SYNCHRO_EXTERNAL" : 0,
30-
"SYNCHRO_INTERNAL" : 1,
31-
"SYNCHRO_PROGRAMM" : 2
32-
};
37+
"SYNCHRO_EXTERNAL": 0,
38+
"SYNCHRO_INTERNAL": 1,
39+
"SYNCHRO_PROGRAMM": 2
40+
}
3341

3442
# http://www.rudshel.ru/soft/SDK2/Doc/CPP_USER_RU/html/struct_rsh_channel.html
35-
channel_control = {
36-
"NotUsed" : 0x0,
37-
"NoSynchro" : 0x0,
38-
"Resist1MOhm" : 0x0,
43+
channel_control = {
44+
"NotUsed": 0x0,
45+
"NoSynchro": 0x0,
46+
"Resist1MOhm": 0x0,
3947
"DC": 0x0,
40-
"ICPPowerOff" : 0x0,
41-
"Used" : 0x1,
42-
"Synchro" : 0x2,
43-
"AC" : 0x4,
44-
"Resist50Ohm" : 0x8,
45-
"ICPPowerOn" : 0x10,
46-
"FirstChannel" : 0x10000
48+
"ICPPowerOff": 0x0,
49+
"Used": 0x1,
50+
"Synchro": 0x2,
51+
"AC": 0x4,
52+
"Resist50Ohm": 0x8,
53+
"ICPPowerOn": 0x10,
54+
"FirstChannel": 0x10000
4755
}
4856

4957
# http://www.rudshel.ru/soft/SDK2/Doc/CPP_USER_RU/html/struct_rsh_synchro_channel.html
5058
synchro_channel_control = {
51-
"FilterOff" : 0x0,
52-
"Resist1MOhm" : 0x0,
53-
"DC" : 0x0,
54-
"FilterLow" : 0x1,
55-
"FilterHigh" : 0x2,
56-
"AC" : 0x4,
57-
"Resist50Ohm" : 0x8
59+
"FilterOff": 0x0,
60+
"Resist1MOhm": 0x0,
61+
"DC": 0x0,
62+
"FilterLow": 0x1,
63+
"FilterHigh": 0x2,
64+
"AC": 0x4,
65+
"Resist50Ohm": 0x8
5866
}
5967

6068
# http://www.rudshel.ru/soft/SDK2/Doc/CPP_USER_RU/html/struct_rsh_init_a_d_c.html
6169
synchro_control = {
62-
"FrequencySwitchOff" : 0x0,
63-
"SlopeFront" : 0x0,
64-
"SlopeDecline" : 0x2,
65-
"FrequencySwitchToMinimum" : 0x4,
66-
"FrequencySwitchToMaximum" : 0x8
67-
}
70+
"FrequencySwitchOff": 0x0,
71+
"SlopeFront": 0x0,
72+
"SlopeDecline": 0x2,
73+
"FrequencySwitchToMinimum": 0x4,
74+
"FrequencySwitchToMaximum": 0x8
75+
}

0 commit comments

Comments
 (0)