-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathb64_pycurl_forwarder_records.py
167 lines (158 loc) · 5.7 KB
/
b64_pycurl_forwarder_records.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
#!/usr/bin/python3.5
################################################################################
# Copyright 2017 by David Brenner Jr <[email protected]>
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
################################################################################
# import required modules
try:
import os
import sys
import time
import calendar
import json
import re
import syslog
except ImportError:
print("FAILURE: Failed to import required modules for b64_pycurl_forwarder_records")
sys.exit()
# import required globals
try:
from b64_pycurl_forwarder_check import CLIENT_LOG_PATHNAME
except NameError:
print("FAILURE: Failed to import required globals from module b64_pycurl_forwarder_check")
sys.exit()
# open existing record file or create new record file
def create_record_file():
try:
fd = open("b64_pycurl_forwarder_records.json", "a")
fd.close()
except OSError:
fd = open("b64_pycurl_forwarder_records.json", "x")
fd.close()
# json.dump() doesn't dump valid json. afterwards file "b64_pycurl_forwarder_records.json"
# has file size equal to 2.
if os.path.getsize("b64_pycurl_forwarder_records.json") == 0:
fd = open("b64_pycurl_forwarder_records.json", "w+")
fd.write('[\n')
fd.close()
# return next record number, highest numbered record plus one
def next_record_number():
number = 0
largest = 0
fd = open("b64_pycurl_forwarder_records.json", "r")
data = fd.readlines()
fd.close()
for line in data:
if "size" in line:
number = str(number)
number, _ = line.split(":")
number = number.replace("size","")
number = number.strip()
number = number[1:-1]
number = int(number)
# keep largest number
if largest < number:
largest = number
else:
number = largest
count = largest + 1
return(count)
# create records in file "b64_pycurl_forwarder_records.json" of all files in target
# directory specified by user. ignore hidden folders and their files.
def create_records():
count = next_record_number()
fd = open("b64_pycurl_forwarder_records.json", "a+")
for root,dirs,files in os.walk(CLIENT_LOG_PATHNAME, topdown=True, onerror=None, followlinks=True):
files_list = []
# get list of files
for fname in files:
files_list.append(os.path.realpath(os.path.join(root,fname)))
# remove duplicates
files_list = list(set(files_list))
# create record for file
for name in files_list:
if os.path.exists(os.path.realpath(os.path.join(root, name))):
if os.path.getsize("b64_pycurl_forwarder_records.json") != 2:
data = fd.readlines()
for line in data:
if not os.path.realpath(os.path.join(root, name)) in line:
pathname_name = "%s pathname" % count
size_name = "%s size" % count
checked_name = "%s checked" % count
sent_name = "%s sent" % count
json.dump({pathname_name:os.path.realpath(os.path.join(root, name)), size_name:os.path.getsize(os.path.realpath(os.path.join(root, name))), checked_name:calendar.timegm(time.gmtime()), sent_name:0}, fd, indent=4)
# json.dump() doesn't dump valid json
fd.write(',')
fd.write('\n')
count = count + 1
else:
pathname_name = "%s pathname" % count
size_name = "%s size" % count
checked_name = "%s checked" % count
sent_name = "%s sent" % count
json.dump({pathname_name:os.path.realpath(os.path.join(root, name)), size_name:os.path.getsize(os.path.realpath(os.path.join(root, name))), checked_name:calendar.timegm(time.gmtime()), sent_name:0}, fd, indent=4)
# json.dump() doesn't dump valid json
fd.write(',')
fd.write('\n')
count = count + 1
# required clean up
del files_list
# add trailing ']'
fd.write(']\n')
fd.close()
# clean up '\n]]\n' caused by running twice
fd = open("b64_pycurl_forwarder_records.json", "r")
data = fd.read()
if "]]" in data:
data = data[:-2]
else:
data = data[:-1]
# clean up invalid json '},\n]\n'
data = re.sub(r'},\n]', '}\n]', data)
fd.close()
fd = open("b64_pycurl_forwarder_records.json", "w")
fd.write(data)
fd.close()
# update record file with latest sizes of every file
def update_file_sizes():
count = next_record_number()
# open records file
fd = open("b64_pycurl_forwarder_records.json", "r")
data = json.load(fd)
fd.close()
# use pathnames from json file to update sizes
for counter in range(0, count-1):
set = []
set = data[counter]
counter = counter + 1
# get size of file from pathname
key = "%s pathname" % counter
value = set[key]
size = os.path.getsize(value)
# update size of file
key = "%s size" % counter
set[key] = size
# update timestamp
key = "%s checked" % counter
set[key] = calendar.timegm(time.gmtime())
counter = counter - 1
# update data
data[counter] = set
del set
# update data in json file
fd = open("b64_pycurl_forwarder_records.json", "w")
fd.write(json.dumps(data, fd, indent=4))
fd.close()
sys.exit()