Skip to content

Commit b3ae0fe

Browse files
authored
Merge pull request #33 from hpuhr/devel
Devel
2 parents 7446909 + 53ce9ea commit b3ae0fe

File tree

4 files changed

+43
-23
lines changed

4 files changed

+43
-23
lines changed

CMakeLists.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@ set ( CMAKE_BUILD_TYPE RelWithDebInfo ) #Debug RelWithDebInfo Release
66

77
project( jASTERIX )
88
SET(CPACK_PACKAGE_VERSION_MAJOR "0")
9-
SET(CPACK_PACKAGE_VERSION_MINOR "0")
10-
SET(CPACK_PACKAGE_VERSION_PATCH "8")
9+
SET(CPACK_PACKAGE_VERSION_MINOR "1")
10+
SET(CPACK_PACKAGE_VERSION_PATCH "0")
1111

1212
message(" System: ${CMAKE_SYSTEM}")
1313
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)

analyze/data_items.py

Lines changed: 39 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/usr/bin/python
1+
#!/usr/bin/python3
22

33
import sys
44
import json
@@ -76,32 +76,52 @@ def print(self, count_parent=None):
7676
val_stat.print(self.count)
7777

7878
class DataItemStatisticsCalculator:
79-
def __init__(self):
79+
def __init__(self, per_source):
80+
self._per_source = per_source
8081
self.__num_records = 0
81-
self.__statistics = {} # type: Dict[str, DataItemStatistic]
82+
self.__statistics = {} # type: Dict[(int, int), Dict[str, DataItemStatistic]]
8283

8384
@property
8485
def num_records(self):
8586
return self.__num_records
8687

8788
def process_record(self, cat, record):
8889

90+
sac = None
91+
sic = None
92+
93+
if self._per_source:
94+
sac = find_value('010.SAC', record)
95+
sic = find_value('010.SIC', record)
96+
97+
assert sac is not None and sic is not None
98+
8999
self.__num_records += 1
90100

91101
cat_str = str(cat).zfill(3)
92102

93-
if cat_str not in self.__statistics:
94-
self.__statistics[cat_str] = DataItemStatistic(cat_str)
103+
if (sac, sic) not in self.__statistics:
104+
self.__statistics[(sac, sic)] = {}
105+
106+
if cat_str not in self.__statistics[(sac, sic)]:
107+
self.__statistics[(sac, sic)][cat_str] = DataItemStatistic(cat_str)
95108

96-
self.__statistics[cat_str].process_object(record)
109+
self.__statistics[(sac, sic)][cat_str].process_object(record)
97110

98111
def print(self):
99112
print('num records {}'.format(self.__num_records))
100113

101-
print('data items')
102-
for cat, stat in sorted(self.__statistics.items()):
103-
print()
104-
stat.print()
114+
for (sac, sic), stat_dict in sorted(self.__statistics.items()):
115+
116+
if self._per_source:
117+
print('data items for {}/{}'.format(sac, sic))
118+
else:
119+
print('data items')
120+
121+
for cat, stat in sorted(stat_dict.items()):
122+
print()
123+
stat.print()
124+
print('\n\n')
105125

106126

107127
# filter functions return True if record should be skipped
@@ -122,32 +142,32 @@ def filter_cats(cat, record):
122142
def main(argv):
123143

124144
parser = argparse.ArgumentParser(description='ASTERIX data item analysis')
125-
parser.add_argument('--framing', help='Framing True or False', required=True)
145+
parser.add_argument('--framing', help='Framing', default=False, action='store_true', required=False)
126146
parser.add_argument('--cats', help='ASTERIX categories to be analyzed as CSV', required=False)
147+
parser.add_argument('--per_source', help='Whether to do analysis per SAC/SIC', default=False, action='store_true', required=False)
127148

128149
args = parser.parse_args()
129150

130-
assert args.framing is not None
131-
assert args.framing == 'True' or args.framing == 'False'
132-
framing = args.framing == 'True'
151+
#assert args.framing is not None
152+
#assert args.framing == 'True' or args.framing == 'False'
153+
print('framing {} '.format(args.framing))
133154

134155
global cat_list
135156
if args.cats is not None:
136157
cat_list = args.cats.split(",")
137158
cat_list = [int(i) for i in cat_list]
138159

139-
print('framing {}'.format(framing))
140160
print('cats {}'.format(cat_list))
161+
print('per-source {} '.format(args.per_source))
141162

142163
num_blocks = 0
143164

144-
statistics_calc = DataItemStatisticsCalculator() # type: DataItemStatisticsCalculator
165+
statistics_calc = DataItemStatisticsCalculator(args.per_source) # type: DataItemStatisticsCalculator
145166

146167
if cat_list is None: # without filtering
147-
record_extractor = RecordExtractor (framing, statistics_calc.process_record) # type: RecordExtractor
168+
record_extractor = RecordExtractor (args.framing, statistics_calc.process_record) # type: RecordExtractor
148169
else: # with filtering lambda
149-
record_extractor = RecordExtractor(framing, statistics_calc.process_record, filter_cats) # type: RecordExtractor
150-
170+
record_extractor = RecordExtractor(args.framing, statistics_calc.process_record, filter_cats) # type: RecordExtractor
151171

152172
start_time = time.time()
153173

definitions/categories/categories.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@
4343
},
4444
"010":{
4545
"comment":"Category 010 Transmission of Monosensor Surface Movement Data",
46-
"default_edition":"0.24_sensis",
46+
"default_edition":"0.31",
4747
"default_mapping":"",
4848
"editions":{
4949
"0.24_sensis":{

src/asterix/record.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -314,7 +314,7 @@ size_t Record::parseItem(const char* data, size_t index, size_t size, size_t cur
314314

315315
if (special_purpose_field_present)
316316
{
317-
size_t re_bytes = static_cast<size_t>(data[index + parsed_bytes]);
317+
size_t re_bytes = static_cast<unsigned char>(data[index + parsed_bytes]);
318318

319319
parsed_bytes += 1; // read 1 len byte
320320
re_bytes -= 1; // includes 1 len byte

0 commit comments

Comments
 (0)