-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrun_survey.py
executable file
·120 lines (106 loc) · 3.13 KB
/
run_survey.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
#!/usr/bin/env python3
import json
import sys
import argparse
from datetime import datetime
from multicast_mpeg_scan.scan import Scan
# Parse cli arguments
arg_parser = argparse.ArgumentParser(
description='Scan URLs for MPEG stream information'
)
arg_parser.add_argument(
'-f', '--file',
help='Filename where scan data is persisted'
)
arg_parser.add_argument(
'-p', '--patterns',
nargs='+',
help='Pattern(s) to scan in the format of "foo{:d}bar@iterable" where you can generate multiple entries to scan by using a Python generator like "range" and the "{:d}" will be replaced by the iterated value (refer to Python documentation for advanced usage)',
metavar='PATTERN'
)
arg_parser.add_argument(
'-c', '--concurrency',
type=int,
default=1,
help='How many probes to run in parallel'
)
arg_parser.add_argument(
'-t', '--timeout',
type=int,
default=10,
help='Timeout for a single probe'
)
arg_parser.add_argument(
'-v', '--verbosity',
type=int,
default=1,
help='How much information to display during the scan'
)
arguments = arg_parser.parse_args()
scanner = Scan(
concurrency = arguments.concurrency,
timeout = arguments.timeout,
verbosity = arguments.verbosity
)
# Build URL list from parameters
if arguments.patterns:
for pattern_spec in arguments.patterns:
pattern_spec = pattern_spec.split('@')
pattern = pattern_spec[0]
iterator = eval(pattern_spec[1])
for i in iterator:
scanner.add(
pattern.format(i)
)
# Build URL list from stdin
if not sys.stdin.isatty():
for url in sys.stdin.readlines():
scanner.add(
url.strip()
)
# Run the scan
scan_results = scanner.run()
# Read the channel database
db = {}
if arguments.file:
try:
with open(arguments.file, mode='r') as db_file:
db = json.load(db_file)
except FileNotFoundError:
pass
# Update channel data
for url in scanner.addresses.keys():
# Skip if there is no data for the scan altogether
if not scan_results[url]:
print('Error: scan data for "' + url + '" was not present.', file=sys.stderr)
continue
# Skip if scan failed
if scan_results[url]['returncode']:
continue
# Skip if the scan didn't have a channel name and no name present in the database
# Note: doesn't skip if channel not present in database altogether
try:
scan_results[url]['stdout']['programs'][0]['tags']['service_name']
except Exception as ex:
if db.get(url) and \
db[url].get('programs') and \
db[url]['programs'][0].get('tags') and \
db[url]['programs'][0]['tags']['service_name']:
continue
# Create a dict if this URL is new
if not db.get(url):
db[url] = {}
# Update timestamp on channel
db[url]['last_updated'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
# Save scan data
db[url]['scan_data'] = scan_results[url]
# Save channel data to file
if arguments.file:
db_file = open(arguments.file, mode='w')
else:
db_file = sys.stdout
json.dump(
db,
db_file,
indent=4
)