Menu

[a7720a]: / proxpect.py  Maximize  Restore  History

Download this file

597 lines (495 with data), 21.0 kB

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
# -*- coding: utf-8 -*-
import base64
import datetime
import getpass
import hashlib
import os
import pexpect
import pxssh
import settings
import sys
import tempfile
import thread
import utils
import web
from models import Communication, Crontab, File, FileMeta, FileAllow, FileDeny, Host, Resource, Tag, create_all
from multiprocessing import active_children, Pool, Process, Queue
try:
from pygments import highlight
from pygments.lexers import guess_lexer_for_filename
from pygments.formatters import HtmlFormatter
except ImportError:
pass
from sqlalchemy import create_engine, or_
from sqlalchemy.orm import eagerload, scoped_session, sessionmaker
from sqlalchemy.orm.exc import NoResultFound
from time import sleep
from web.background import background, backgrounder
if settings.DEBUG:
if os.path.exists("%slogging.txt" % settings.DATA_DIRECTORY):
import logging
LOG_FILENAME = "%slogging.txt" % settings.DATA_DIRECTORY
logging.basicConfig(filename=LOG_FILENAME,level=logging.DEBUG,)
#import warnings
#warnings.simplefilter("error")
crawl_queue = Queue()
urls = (
"/", "index",
"/login", "login",
"/logout", "logout",
"/host/(\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3})", "HostDetail",
"/host/add", "HostAdd",
"/host/edit", "HostEdit",
"/host/command/(.*)", "host_command",
"/search", "Search",
"/host/([0-9.]+)/explorer", "HostExplorer",
"/file/([a-z0-9]+)", "FileDetail",
"/file/base64/([a-z0-9]+)", "FileBase64Encoded",
"/download/([a-z0-9]+)", "FileDownload",
"/tag/add", "TagAdd",
"/testa", "Testa",
)
app = web.application(urls, globals())
def load_sqla(handler):
web.ctx.orm = scoped_session(sessionmaker(bind=engine))
try:
return handler()
except web.HTTPError:
web.ctx.orm.commit()
raise
except:
web.ctx.orm.rollback()
raise
finally:
web.ctx.orm.commit()
db_file_name = "%sdb.sqlite" % settings.DATA_DIRECTORY
if os.path.exists(db_file_name):
engine = create_engine("sqlite:///%s" % db_file_name)
app.add_processor(load_sqla)
def get_host_meta(db_session):
return db_session.query(Meta).filter(Meta.id == 1).one()
def get_remote_directory_contents(ip, directory):
ssh_c = pxsshConnection().login(ip)
output = ssh_c.execute("ls -aBl --full-time --color=never %s" % directory)
output = output.split("\n")
del output[0] # Total XX
return output
def like_grep(q, text):
import re
new_text = []
i = 1
if text is None:
return ""
else:
for line in text.split("\n"):
line = web.htmlquote(line).strip()
if q.strip() != "" and q.lower() in line.lower():
css_class = "line_q"
else:
css_class = "line"
line = "<div class=\"%s\">%s</div>\n" % (css_class, line)
new_text.append(line)
i = i + 1
return "\n".join(new_text)
def links_from_directory(directory):
link = []
previous = None
sep = directory.split("/")
for v in sep:
href = v
if previous is not None:
href = v
link.append('<a href="directories?dir=/%s">%s</a>' % (href, v))
previous = v
return "/".join(link)
def download_file_link(file_meta):
if file_meta.type == "-":
return '<a href="/download/%s" class="file">download</a>' % file_meta.hash
# TODO: Isn't "l" for links? Why am I treating as a directory?
elif file_meta.type == "l":
return '<a href="/host/%s/explorer?directory=%s/%s" class="link">%s -> %s</a>' % (file_meta.file.host.ip, file_meta.file.path, file_meta.file.name, file_meta.file.name, file_meta.links_to)
def highlight_contents(file_meta):
try:
lexer = guess_lexer_for_filename(file_meta.file.name, file_meta.contents)
formatter = HtmlFormatter(noclasses=True)
result = highlight(file_meta.contents, lexer, formatter)
return result
except:
return "<pre>%s</pre>" % file_meta.contents
def link_from_file_meta(file_meta):
file = file_meta.file
if file_meta.type == "-":
return '<a href="/file/%s" class="file">%s</a>' % (file_meta.hash, file.name)
elif file_meta.type == "d":
if file.path == "/":
path = ""
else:
path = file.path
return '<a href="/host/%s/explorer?directory=%s/%s" class="directory">%s</a>' % (file.host.ip, path, file.name, file.name)
# TODO: Isn't "l" for links? Why am I treating as a directory?
elif file_meta.type == "l":
return '<a href="/host/%s/explorer?directory=%s/%s" class="link">%s -> %s</a>' % (file.host.ip, file.path, file.name, file.name, file_meta.links_to)
def navigate_backwards(directory):
# I am pretty sure there's a better way of doing this. 100% sure.
sep = directory.split("/")
backwards = []
i = 1
while i < len(sep):
if i == 1:
backwards.append("/")
else:
backwards.append("/".join(sep[:i]))
i = i + 1
backwards.append("/".join(sep))
return backwards
def tag_list():
return web.ctx.orm.query(Tag).distinct().order_by(Tag.tag)
render = web.template.render("templates/")
web.template.Template.globals.update(dict(
render=render,
download_file_link=download_file_link,
highlight_contents=highlight_contents,
like_grep=like_grep,
link_from_file_meta=link_from_file_meta,
navigate_backwards=navigate_backwards,
tag_list=tag_list,
size_h=utils.size_h,
))
session_dir = tempfile.mkdtemp()
# web.config.debug = False
# session = web.session.Session(app, web.session.DiskStore(session_dir))
if web.config.get("_session") is None:
session = web.session.Session(app, web.session.DiskStore(session_dir))
web.config._session = session
else:
session = web.config._session
# http://code.activestate.com/recipes/52558/#c10
class pxsshConnection(object):
"""Implement Pattern: SINGLETON """
__lockObj = thread.allocate_lock() # lock object
__instance = None # the unique instance
def __new__(cls, *args, **kargs):
return cls.getInstance(cls, *args, **kargs)
def getInstance(cls, *args, **kargs):
"""Static method to have a reference to **THE UNIQUE** instance"""
# Critical section start
cls.__lockObj.acquire()
try:
if cls.__instance is None:
# (Some exception may be thrown...)
# Initialize **the unique** instance
# cls.__instance = object.__new__(cls, *args, **kargs) # Deprecation warning, __new__() takes no arguments
cls.__instance = object.__new__(cls)
'''Initialize object **here**, as you would do in __init__()...'''
finally:
# Exit from critical section whatever happens
cls.__lockObj.release()
# Critical section end
return cls.__instance
getInstance = classmethod(getInstance)
def __init__(self):
pass
def login(self, ip):
host = web.ctx.orm.query(Host).filter(Host.ip==ip).one()
if not hasattr(self, "_connected") or not self._connected or self._connected != ip:
try:
self._s = pxssh.pxssh(logfile=utils.logfile(ip))
self._s.login(ip, host.username, host.password)
self._connected = ip
# Commands can get truncated because of terminal size, rendering return .replace on self.execute() useless
# If we send for example "ls -and -a -bunch -of -other -parameters /on/some/directory", what really happens is
# login@host:/some/dir$ ls -and -a -bunch -of -other -parameters /on/some/directory
# Which has 82 caracters, above the usual 80 (type "stty size" on a terminal to see yours)
# And therefore, for a ls, self._s.before will be something like (notice the carriage return 2 characters before the end of line
# ls -and -a -bunch -of -other -parameters /on/some/directo\rry
# total 44
# ... rest of ls
# And if we print self._s.before, this is what appears:
# ss -aBl --full-time --color=never /home/notroot/dev/proxpect/template
# total 44
# ... rest of ls
# So we increase the terminal size!
# 1000 is pretty random. Will we ever send a command larger than 1000 characters? Should I increase it dinamically based on len(command)?
self._s.sendline("stty rows 24 cols 1000")
self._s.prompt()
print "connecting to " + ip
web.ctx.orm.add(Communication(host, True, datetime.datetime.today()))
output = self.execute("crontab -l")
output_hash = hashlib.md5(output).hexdigest()
try:
crontab = web.ctx.orm.query(Crontab).filter_by(hash=output_hash).one()
except NoResultFound:
crontab = None
if crontab is None:
web.ctx.orm.add(Crontab(output_hash, host, output, datetime.datetime.today()))
except pxssh.ExceptionPxssh, e:
self._connected = False
print "pxssh failed on login."
print str(e)
web.ctx.orm.add(Communication(host, False, datetime.datetime.today()))
else:
print "already connected to " + str(self._connected)
web.ctx.orm.add(Communication(host, True, datetime.datetime.today()))
return self
def execute(self, command):
self._s.sendline(command) # TODO: cat /etc/lsb-release works better on Ubuntu (and probably lsb distros?)
self._s.prompt()
return self._s.before.replace(command, "").strip()
def logout(self):
s.logout()
def get_files_meta_hash(host):
files_meta = set()
current_files_meta = web.ctx.orm.query(FileMeta).filter(File.host==host).all()
for file in current_files_meta:
files_meta.add(file.hash)
return files_meta
def login_required(f):
def wrapped(*args, **kargs):
if not "loggedin" in session:
raise web.seeother("/login")
return f(*args, **kargs)
return wrapped
def host_list():
return [host.ip for host in web.ctx.orm.query(Host.ip).order_by(Host.ip)]
def secret_word():
f = open("%s.secret_word" % settings.DATA_DIRECTORY)
data = f.read()
f.close()
return data
class FileBase64Encoded:
@login_required
def GET(self, hash):
file = web.ctx.orm.query(File).filter_by(hash=hash).one()
if file.contents is not None:
if file.contents == "":
print "não vazio"
# We have the file on the db
if file.contents is not None and file.contents != "":
contents = file.contents
# We don't have it... download and serve its contents
else:
file.download()
contents = open("data/files/%s" % file.name).read()
file.remove_local()
return "data:%s;base64,%s" % (file.mime_type, base64.b64encode(contents))
class FileDetail:
@login_required
def GET(self, hash):
file_meta = web.ctx.orm.query(FileMeta).filter_by(hash=hash).one()
other_versions = web.ctx.orm.query(FileMeta).filter_by(file_id=file_meta.file_id).order_by(FileMeta.last_modified).all()
return render.file_detail(file_meta, other_versions)
class FileDownload:
@login_required
def GET(self, hash):
file_meta = web.ctx.orm.query(FileMeta).filter(FileMeta.hash==hash).one()
# We have the file on the db
if file_meta.contents is not None and file_meta.contents != "":
contents = file.contents
# We don't have it... download and serve its contents
else:
file_meta.file.download()
contents = open("data/files/%s" % file_meta.file.name).read()
file_meta.file.remove_local()
# http://php.net/manual/en/function.header.php#83384
web.header("Pragma", "public");
web.header("Expires", "0");
web.header("Cache-Control", "must-revalidate, post-check=0, pre-check=0");
web.header("Content-Type", "application/force-download");
web.header("Content-Type", "application/octet-stream");
web.header("Content-Type", "application/download");
# Double quotes around file name needed it it has spaces
# http://php.net/manual/en/function.header.php#87449
web.header("Content-Disposition", 'attachment; filename="%s";' % file_meta.file.name);
web.header("Content-Transfer-Encoding", "binary");
web.header("Content-Length", file_meta.size);
return contents
class HostAdd:
def POST(self):
i = web.input()
host = Host(i.ip)
web.ctx.orm.add(host)
raise web.seeother("/")
class HostDetail:
@login_required
def GET(self, ip):
host = web.ctx.orm.query(Host).filter(Host.ip==ip).one()
return render.host_detail(host_list(), host)
class HostEdit:
@login_required
def POST(self):
i = web.input()
host = web.ctx.orm.query(Host).filter(Host.ip==i.ip).one()
host.username = i.username
host.password = i.password
if "sudo" in i:
host.sudo = True
else:
host.sudo = False
web.ctx.orm.add(host)
raise web.seeother("/host/%s" % i.ip)
class HostExplorer:
@login_required
def GET(self, ip):
i = web.input()
host = web.ctx.orm.query(Host).filter_by(ip=ip).one()
ssh_c = pxsshConnection().login(ip)
if web.input().has_key("directory"): # Specified directory
current_directory = i.directory
else: # Wherever we go when logged in by ssh
current_directory = ssh_c.execute("pwd").split().pop()
if not isinstance(current_directory, unicode):
current_directory = unicode(current_directory, "utf-8")
current_files = {}
for file in host.files:
current_files["%s/%s" % (file.path, file.name)] = file
current_files_meta = get_files_meta_hash(host)
output = get_remote_directory_contents(ip, current_directory)
for o in output:
partes = o.split()
if o[0] == "-" or o[0] == "d":
if len(partes) == 9:
mode, links, owner, group, size, Ymd, time, tz, name = partes
else:
# http://www.nabble.com/Re%3A-Is-there-any-nice-way-to-unpack-a-list-of-unknown-size---p19483627.html
mode, links, owner, group, size, Ymd, time, tz = partes[:8]
name = " ".join(partes[8:])
links_to = None
elif o[0] == "l":
mode, links, owner, group, size, Ymd, time, tz, name, arrow, links_to = partes
if name == "." or name == "..":
continue
path_name = current_directory + "/" + name
file_type = mode[0]
permission = mode[1:]
Y, m, d = Ymd.split("-")
H, i, s = time.split(":")
last_modified = datetime.datetime(int(Y), int(m), int(d), int(H), int(i), int(s[0:2]))
hash = utils.file_hash(host.ip, current_directory, name, permission, str(last_modified))
if path_name in current_files:
file = current_files[path_name]
else:
file = File(host, current_directory, name)
if hash not in current_files_meta:
file_meta = FileMeta(file, file_type, links_to, permission, owner, group, last_modified)
file_meta.hash = hash
file_meta.crawl()
# TODO: Download the files in parallel, limiting to 4
#p_crawl = Process(target=file.crawl)
#p_crawl.start()
#p_crawl.join()
# Get the latest files (order by last modified and group by name)
files_meta = web.ctx.orm.query(FileMeta).join(File).filter(File.path==current_directory).order_by(File.name).group_by(FileMeta.file_id).all()
return render.host_explorer(current_directory, files_meta)
class Testa:
@backgrounder
def GET(self):
print "Started!"
print "hit f5 to refresh!"
longrunning()
@background
def longrunning():
for i in range(10):
sleep(1)
print "%s: %s" % (i, now())
class Search:
@login_required
def GET(self):
i = web.input()
q = i.q.strip()
tag = i.tag.strip()
if q == "" and tag == "":
raise web.seeother("/")
query = web.ctx.orm.query(FileMeta).join(File)
if q != "":
query = query.filter(or_(FileMeta.contents.ilike("%" + i.q + "%"),
File.name.ilike("%" + i.q + "%")
))
if tag != "":
query = query.join(Tag).filter(Tag.tag==tag)
results = query.order_by(File.name).order_by(FileMeta.last_modified.desc()).all()
return render.search(i.q, results)
class TagAdd:
@login_required
def POST(self):
i = web.input()
try:
file = web.ctx.orm.query(File).filter_by(id=i.id).one()
except NoResultFound:
return None
tag = Tag(file, i.tag)
web.ctx.orm.add(tag)
return i.tag
class index:
@login_required
def GET(self):
return render.index(host_list())
class login:
def GET(self):
if "loggedin" in session:
raise web.seeother("/")
return render.login()
def POST(self):
if "loggedin" in session:
raise web.seeother("/")
i = web.input()
if hashlib.sha512(i.secret_word).hexdigest() == secret_word():
session.loggedin = True
raise web.seeother("/")
else:
raise web.seeother("/login")
class logout:
def GET(self):
session.kill()
raise web.seeother("/")
if __name__ == "__main__":
if not os.path.exists(settings.DATA_DIRECTORY):
os.mkdir(settings.DATA_DIRECTORY)
if not os.path.exists("%sfiles" % settings.DATA_DIRECTORY):
os.mkdir("%sfiles" % settings.DATA_DIRECTORY)
# Ask user to create a secret word (to access the app).
# This should only happen once, the first time Proxpect runs.
# LOW LAYER OF SECURITY FTW! It's not actuallty *that* bad, but I should
# warn the users that the point here is not to provide the best possible
# security ever. It's just one layer. They should SSH properly and SUDO
# and IPTABLES the whole thing.
if os.path.exists("%s.secret_word" % settings.DATA_DIRECTORY) and os.path.exists(db_file_name):
p_app_run = Process(target=app.run)
p_app_run.start()
# session = scoped_session(sessionmaker(bind=engine))
# def get_uncrawled():
# return session.query(File).filter_by(type="d", crawled=False).order_by(File.name, File.last_modified).group_by(File.name).all()
# def crawl(file):
# file.crawl()
# session.commit()
# print file
# uncrawled_files = get_uncrawled()
# for file in uncrawled_files:
# Process(target=crawl, args=(file,)).start()
# sleep(0.5)
else:
secret_word = ""
print "It looks like this is the first time you're running Proxpect. Please set the secret word, which will be used to access Proxpect.\n"
try:
while 1:
if not secret_word:
secret_word = getpass.getpass("Secret word: ")
secret_word2 = getpass.getpass("Secret word (again): ")
if secret_word != secret_word2:
sys.stderr.write("Error: Your secret words didn't match.\n")
secret_word = None
continue
if secret_word.strip() == "":
sys.stderr.write("Error: Blank secret words aren't allowed.\n")
secret_word = None
continue
break
except KeyboardInterrupt:
sys.stderr.write("\nOperation cancelled.\n")
sys.exit(1)
# Store the secret word (hashed)
fp = open("%s.secret_word" % settings.DATA_DIRECTORY, "wb")
fp.write(hashlib.sha512(secret_word).hexdigest())
fp.close()
engine = create_engine("sqlite:///%s" % db_file_name)
create_all(engine)
print "You're good to go. Start the app again."
Want the latest updates on software, tech news, and AI?
Get latest updates about software, tech news, and AI from SourceForge directly in your inbox once a month.